2 * Samsung SoC DP (Display Port) interface driver.
4 * Copyright (C) 2012 Samsung Electronics Co., Ltd.
5 * Author: Jingoo Han <jg1.han@samsung.com>
7 * This program is free software; you can redistribute it and/or modify it
8 * under the terms of the GNU General Public License as published by the
9 * Free Software Foundation; either version 2 of the License, or (at your
10 * option) any later version.
14 #include "drm_crtc_helper.h"
16 #include <linux/module.h>
17 #include <linux/platform_device.h>
18 #include <linux/slab.h>
19 #include <linux/err.h>
20 #include <linux/clk.h>
21 #include <linux/gpio.h>
23 #include <linux/interrupt.h>
24 #include <linux/delay.h>
26 #include <linux/workqueue.h>
28 #include <video/exynos_dp.h>
29 #include "exynos_drm_drv.h"
30 #include "exynos_drm_display.h"
32 #ifdef CONFIG_DRM_PTN3460
33 #include "i2c/ptn3460.h"
37 #include <plat/gpio-cfg.h>
39 #include "exynos_dp_core.h"
41 #define PLL_MAX_TRIES 100
43 static int exynos_dp_init_dp(struct exynos_dp_device *dp)
47 /* SW defined function Normal operation */
48 exynos_dp_enable_sw_function(dp);
50 exynos_dp_init_analog_func(dp);
52 exynos_dp_init_hpd(dp);
53 exynos_dp_init_aux(dp);
58 static int exynos_dp_detect_hpd(struct exynos_dp_device *dp)
62 if (gpio_is_valid(dp->hpd_gpio))
63 return !gpio_get_value(dp->hpd_gpio);
65 while (exynos_dp_get_plug_in_status(dp) != 0) {
67 if (DP_TIMEOUT_LOOP_COUNT < timeout_loop) {
68 dev_err(dp->dev, "failed to get hpd plug status\n");
77 static unsigned char exynos_dp_calc_edid_check_sum(unsigned char *edid_data)
80 unsigned char sum = 0;
82 for (i = 0; i < EDID_BLOCK_LENGTH; i++)
83 sum = sum + edid_data[i];
88 static int exynos_dp_read_edid(struct exynos_dp_device *dp)
90 unsigned char edid[EDID_BLOCK_LENGTH * 2];
91 unsigned int extend_block = 0;
93 unsigned char test_vector;
97 * EDID device address is 0x50.
98 * However, if necessary, you must have set upper address
99 * into E-EDID in I2C device, 0x30.
102 /* Read Extension Flag, Number of 128-byte EDID extension blocks */
103 retval = exynos_dp_read_byte_from_i2c(dp, I2C_EDID_DEVICE_ADDR,
109 if (extend_block > 0) {
110 dev_dbg(dp->dev, "EDID data includes a single extension!\n");
113 retval = exynos_dp_read_bytes_from_i2c(dp, I2C_EDID_DEVICE_ADDR,
116 &edid[EDID_HEADER_PATTERN]);
118 dev_err(dp->dev, "EDID Read failed!\n");
121 sum = exynos_dp_calc_edid_check_sum(edid);
123 dev_err(dp->dev, "EDID bad checksum!\n");
127 /* Read additional EDID data */
128 retval = exynos_dp_read_bytes_from_i2c(dp,
129 I2C_EDID_DEVICE_ADDR,
132 &edid[EDID_BLOCK_LENGTH]);
134 dev_err(dp->dev, "EDID Read failed!\n");
137 sum = exynos_dp_calc_edid_check_sum(&edid[EDID_BLOCK_LENGTH]);
139 dev_err(dp->dev, "EDID bad checksum!\n");
143 exynos_dp_read_byte_from_dpcd(dp, DPCD_ADDR_TEST_REQUEST,
145 if (test_vector & DPCD_TEST_EDID_READ) {
146 exynos_dp_write_byte_to_dpcd(dp,
147 DPCD_ADDR_TEST_EDID_CHECKSUM,
148 edid[EDID_BLOCK_LENGTH + EDID_CHECKSUM]);
149 exynos_dp_write_byte_to_dpcd(dp,
150 DPCD_ADDR_TEST_RESPONSE,
151 DPCD_TEST_EDID_CHECKSUM_WRITE);
154 dev_info(dp->dev, "EDID data does not include any extensions.\n");
157 retval = exynos_dp_read_bytes_from_i2c(dp,
158 I2C_EDID_DEVICE_ADDR,
161 &edid[EDID_HEADER_PATTERN]);
163 dev_err(dp->dev, "EDID Read failed!\n");
166 sum = exynos_dp_calc_edid_check_sum(edid);
168 dev_err(dp->dev, "EDID bad checksum!\n");
172 exynos_dp_read_byte_from_dpcd(dp,
173 DPCD_ADDR_TEST_REQUEST,
175 if (test_vector & DPCD_TEST_EDID_READ) {
176 exynos_dp_write_byte_to_dpcd(dp,
177 DPCD_ADDR_TEST_EDID_CHECKSUM,
178 edid[EDID_CHECKSUM]);
179 exynos_dp_write_byte_to_dpcd(dp,
180 DPCD_ADDR_TEST_RESPONSE,
181 DPCD_TEST_EDID_CHECKSUM_WRITE);
185 dev_err(dp->dev, "EDID Read success!\n");
189 static int exynos_dp_handle_edid(struct exynos_dp_device *dp)
195 /* Read DPCD DPCD_ADDR_DPCD_REV~RECEIVE_PORT1_CAP_1 */
196 ret = exynos_dp_read_bytes_from_dpcd(dp, DPCD_ADDR_DPCD_REV, 12, buf);
201 for (i = 0; i < 3; i++) {
202 ret = exynos_dp_read_edid(dp);
210 static void exynos_dp_enable_rx_to_enhanced_mode(struct exynos_dp_device *dp,
215 exynos_dp_read_byte_from_dpcd(dp, DPCD_ADDR_LANE_COUNT_SET, &data);
218 exynos_dp_write_byte_to_dpcd(dp, DPCD_ADDR_LANE_COUNT_SET,
219 DPCD_ENHANCED_FRAME_EN |
220 DPCD_LANE_COUNT_SET(data));
222 exynos_dp_write_byte_to_dpcd(dp, DPCD_ADDR_LANE_COUNT_SET,
223 DPCD_LANE_COUNT_SET(data));
226 static int exynos_dp_is_enhanced_mode_available(struct exynos_dp_device *dp)
231 exynos_dp_read_byte_from_dpcd(dp, DPCD_ADDR_MAX_LANE_COUNT, &data);
232 retval = DPCD_ENHANCED_FRAME_CAP(data);
237 static void exynos_dp_set_enhanced_mode(struct exynos_dp_device *dp)
241 data = exynos_dp_is_enhanced_mode_available(dp);
242 exynos_dp_enable_rx_to_enhanced_mode(dp, data);
243 exynos_dp_enable_enhanced_mode(dp, data);
246 static void exynos_dp_training_pattern_dis(struct exynos_dp_device *dp)
248 exynos_dp_set_training_pattern(dp, DP_NONE);
250 exynos_dp_write_byte_to_dpcd(dp,
251 DPCD_ADDR_TRAINING_PATTERN_SET,
252 DPCD_TRAINING_PATTERN_DISABLED);
255 static void exynos_dp_set_lane_lane_pre_emphasis(struct exynos_dp_device *dp,
256 int pre_emphasis, int lane)
260 exynos_dp_set_lane0_pre_emphasis(dp, pre_emphasis);
263 exynos_dp_set_lane1_pre_emphasis(dp, pre_emphasis);
267 exynos_dp_set_lane2_pre_emphasis(dp, pre_emphasis);
271 exynos_dp_set_lane3_pre_emphasis(dp, pre_emphasis);
276 static int exynos_dp_link_start(struct exynos_dp_device *dp)
278 int ret, lane, lane_count, pll_tries;
281 lane_count = dp->link_train.lane_count;
283 dp->link_train.lt_state = CLOCK_RECOVERY;
284 dp->link_train.eq_loop = 0;
286 for (lane = 0; lane < lane_count; lane++)
287 dp->link_train.cr_loop[lane] = 0;
289 /* Set link rate and count as you want to establish*/
290 exynos_dp_set_link_bandwidth(dp, dp->link_train.link_rate);
291 exynos_dp_set_lane_count(dp, dp->link_train.lane_count);
293 /* Setup RX configuration */
294 buf[0] = dp->link_train.link_rate;
295 buf[1] = dp->link_train.lane_count;
296 ret = exynos_dp_write_bytes_to_dpcd(dp, DPCD_ADDR_LINK_BW_SET, 2, buf);
300 /* Set TX pre-emphasis to minimum */
301 for (lane = 0; lane < lane_count; lane++)
302 exynos_dp_set_lane_lane_pre_emphasis(dp,
303 PRE_EMPHASIS_LEVEL_0, lane);
305 /* Wait for PLL lock */
307 while (exynos_dp_get_pll_lock_status(dp) == PLL_UNLOCKED) {
308 if (pll_tries == PLL_MAX_TRIES)
315 /* Set training pattern 1 */
316 exynos_dp_set_training_pattern(dp, TRAINING_PTN1);
318 /* Set RX training pattern */
319 ret = exynos_dp_write_byte_to_dpcd(dp, DPCD_ADDR_TRAINING_PATTERN_SET,
320 DPCD_SCRAMBLING_DISABLED | DPCD_TRAINING_PATTERN_1);
324 for (lane = 0; lane < lane_count; lane++)
325 buf[lane] = DPCD_PRE_EMPHASIS_PATTERN2_LEVEL0 |
326 DPCD_VOLTAGE_SWING_PATTERN1_LEVEL0;
327 ret = exynos_dp_write_bytes_to_dpcd(dp, DPCD_ADDR_TRAINING_LANE0_SET,
335 static unsigned char exynos_dp_get_lane_status(u8 link_status[6], int lane)
337 int shift = (lane & 1) * 4;
338 u8 link_value = link_status[lane>>1];
340 return (link_value >> shift) & 0xf;
343 static int exynos_dp_clock_recovery_ok(u8 link_status[6], int lane_count)
348 for (lane = 0; lane < lane_count; lane++) {
349 lane_status = exynos_dp_get_lane_status(link_status, lane);
350 if ((lane_status & DPCD_LANE_CR_DONE) == 0)
356 static int exynos_dp_channel_eq_ok(u8 link_status[6], int lane_count)
362 lane_align = link_status[2];
363 if ((lane_align & DPCD_INTERLANE_ALIGN_DONE) == 0)
366 for (lane = 0; lane < lane_count; lane++) {
367 lane_status = exynos_dp_get_lane_status(link_status, lane);
368 lane_status &= DPCD_CHANNEL_EQ_BITS;
369 if (lane_status != DPCD_CHANNEL_EQ_BITS)
375 static unsigned char exynos_dp_get_adjust_request_voltage(u8 adjust_request[2],
378 int shift = (lane & 1) * 4;
379 u8 link_value = adjust_request[lane>>1];
381 return (link_value >> shift) & 0x3;
384 static unsigned char exynos_dp_get_adjust_request_pre_emphasis(
385 u8 adjust_request[2],
388 int shift = (lane & 1) * 4;
389 u8 link_value = adjust_request[lane>>1];
391 return ((link_value >> shift) & 0xc) >> 2;
394 static void exynos_dp_set_lane_link_training(struct exynos_dp_device *dp,
395 u8 training_lane_set, int lane)
399 exynos_dp_set_lane0_link_training(dp, training_lane_set);
402 exynos_dp_set_lane1_link_training(dp, training_lane_set);
406 exynos_dp_set_lane2_link_training(dp, training_lane_set);
410 exynos_dp_set_lane3_link_training(dp, training_lane_set);
415 static unsigned int exynos_dp_get_lane_link_training(
416 struct exynos_dp_device *dp,
423 reg = exynos_dp_get_lane0_link_training(dp);
426 reg = exynos_dp_get_lane1_link_training(dp);
429 reg = exynos_dp_get_lane2_link_training(dp);
432 reg = exynos_dp_get_lane3_link_training(dp);
439 static void exynos_dp_reduce_link_rate(struct exynos_dp_device *dp)
441 if (dp->link_train.link_rate == LINK_RATE_2_70GBPS) {
442 /* set to reduced bit rate */
443 dp->link_train.link_rate = LINK_RATE_1_62GBPS;
444 dev_err(dp->dev, "set to bandwidth %.2x\n",
445 dp->link_train.link_rate);
446 dp->link_train.lt_state = START;
448 exynos_dp_training_pattern_dis(dp);
449 /* set enhanced mode if available */
450 exynos_dp_set_enhanced_mode(dp);
451 dp->link_train.lt_state = FAILED;
455 static void exynos_dp_get_adjust_train(struct exynos_dp_device *dp,
456 u8 adjust_request[2])
464 lane_count = dp->link_train.lane_count;
465 for (lane = 0; lane < lane_count; lane++) {
466 voltage_swing = exynos_dp_get_adjust_request_voltage(
467 adjust_request, lane);
468 pre_emphasis = exynos_dp_get_adjust_request_pre_emphasis(
469 adjust_request, lane);
470 training_lane = DPCD_VOLTAGE_SWING_SET(voltage_swing) |
471 DPCD_PRE_EMPHASIS_SET(pre_emphasis);
473 if (voltage_swing == VOLTAGE_LEVEL_3 ||
474 pre_emphasis == PRE_EMPHASIS_LEVEL_3) {
475 training_lane |= DPCD_MAX_SWING_REACHED;
476 training_lane |= DPCD_MAX_PRE_EMPHASIS_REACHED;
478 dp->link_train.training_lane[lane] = training_lane;
482 static int exynos_dp_check_max_cr_loop(struct exynos_dp_device *dp,
488 lane_count = dp->link_train.lane_count;
489 for (lane = 0; lane < lane_count; lane++) {
490 if (voltage_swing == VOLTAGE_LEVEL_3 ||
491 dp->link_train.cr_loop[lane] == MAX_CR_LOOP)
497 static int exynos_dp_process_clock_recovery(struct exynos_dp_device *dp)
499 int ret, lane, lane_count;
500 u8 voltage_swing, pre_emphasis, training_lane, link_status[6];
505 ret = exynos_dp_read_bytes_from_dpcd(dp, DPCD_ADDR_LANE0_1_STATUS, 6,
510 lane_count = dp->link_train.lane_count;
514 adjust_request = link_status + 4;
516 if (exynos_dp_clock_recovery_ok(link_status, lane_count) == 0) {
517 /* set training pattern 2 for EQ */
518 exynos_dp_set_training_pattern(dp, TRAINING_PTN2);
520 ret = exynos_dp_write_byte_to_dpcd(dp,
521 DPCD_ADDR_TRAINING_PATTERN_SET,
522 DPCD_SCRAMBLING_DISABLED | DPCD_TRAINING_PATTERN_2);
526 dp->link_train.lt_state = EQUALIZER_TRAINING;
528 for (lane = 0; lane < lane_count; lane++) {
529 training_lane = exynos_dp_get_lane_link_training(
531 voltage_swing = exynos_dp_get_adjust_request_voltage(
532 adjust_request, lane);
533 pre_emphasis = exynos_dp_get_adjust_request_pre_emphasis(
534 adjust_request, lane);
535 if ((DPCD_VOLTAGE_SWING_GET(training_lane) == voltage_swing) &&
536 (DPCD_PRE_EMPHASIS_GET(training_lane) == pre_emphasis))
537 dp->link_train.cr_loop[lane]++;
538 dp->link_train.training_lane[lane] = training_lane;
541 if (exynos_dp_check_max_cr_loop(dp, voltage_swing) != 0) {
542 exynos_dp_reduce_link_rate(dp);
547 exynos_dp_get_adjust_train(dp, adjust_request);
549 for (lane = 0; lane < lane_count; lane++) {
550 exynos_dp_set_lane_link_training(dp,
551 dp->link_train.training_lane[lane], lane);
552 ret = exynos_dp_write_byte_to_dpcd(dp,
553 DPCD_ADDR_TRAINING_LANE0_SET + lane,
554 dp->link_train.training_lane[lane]);
562 static int exynos_dp_process_equalizer_training(struct exynos_dp_device *dp)
564 int ret, lane, lane_count;
571 ret = exynos_dp_read_bytes_from_dpcd(dp, DPCD_ADDR_LANE0_1_STATUS,
576 adjust_request = link_status + 4;
577 lane_count = dp->link_train.lane_count;
579 if (exynos_dp_clock_recovery_ok(link_status, lane_count)) {
580 exynos_dp_reduce_link_rate(dp);
583 if (exynos_dp_channel_eq_ok(link_status, lane_count) == 0) {
584 /* traing pattern Set to Normal */
585 exynos_dp_training_pattern_dis(dp);
587 dev_info(dp->dev, "Link Training success!\n");
589 exynos_dp_get_link_bandwidth(dp, ®);
590 dp->link_train.link_rate = reg;
591 dev_dbg(dp->dev, "final bandwidth = %.2x\n",
592 dp->link_train.link_rate);
594 exynos_dp_get_lane_count(dp, ®);
595 dp->link_train.lane_count = reg;
596 dev_dbg(dp->dev, "final lane count = %.2x\n",
597 dp->link_train.lane_count);
598 /* set enhanced mode if available */
599 exynos_dp_set_enhanced_mode(dp);
601 dp->link_train.lt_state = FINISHED;
604 dp->link_train.eq_loop++;
606 if (dp->link_train.eq_loop > MAX_EQ_LOOP) {
607 exynos_dp_reduce_link_rate(dp);
609 exynos_dp_get_adjust_train(dp, adjust_request);
611 for (lane = 0; lane < lane_count; lane++) {
612 exynos_dp_set_lane_link_training(dp,
613 dp->link_train.training_lane[lane],
615 ret = exynos_dp_write_byte_to_dpcd(dp,
616 DPCD_ADDR_TRAINING_LANE0_SET + lane,
617 dp->link_train.training_lane[lane]);
627 static void exynos_dp_get_max_rx_bandwidth(struct exynos_dp_device *dp,
633 * For DP rev.1.1, Maximum link rate of Main Link lanes
634 * 0x06 = 1.62 Gbps, 0x0a = 2.7 Gbps
636 exynos_dp_read_byte_from_dpcd(dp, DPCD_ADDR_MAX_LINK_RATE, &data);
640 static void exynos_dp_get_max_rx_lane_count(struct exynos_dp_device *dp,
646 * For DP rev.1.1, Maximum number of Main Link lanes
647 * 0x01 = 1 lane, 0x02 = 2 lanes, 0x04 = 4 lanes
649 exynos_dp_read_byte_from_dpcd(dp, DPCD_ADDR_MAX_LANE_COUNT, &data);
650 *lane_count = DPCD_MAX_LANE_COUNT(data);
653 static void exynos_dp_init_training(struct exynos_dp_device *dp,
654 enum link_lane_count_type max_lane,
655 enum link_rate_type max_rate)
658 * MACRO_RST must be applied after the PLL_LOCK to avoid
659 * the DP inter pair skew issue for at least 10 us
661 exynos_dp_reset_macro(dp);
663 /* Initialize by reading RX's DPCD */
664 exynos_dp_get_max_rx_bandwidth(dp, &dp->link_train.link_rate);
665 exynos_dp_get_max_rx_lane_count(dp, &dp->link_train.lane_count);
667 if ((dp->link_train.link_rate != LINK_RATE_1_62GBPS) &&
668 (dp->link_train.link_rate != LINK_RATE_2_70GBPS)) {
669 dev_err(dp->dev, "Rx Max Link Rate is abnormal :%x !\n",
670 dp->link_train.link_rate);
671 dp->link_train.link_rate = LINK_RATE_1_62GBPS;
674 if (dp->link_train.lane_count == 0) {
675 dev_err(dp->dev, "Rx Max Lane count is abnormal :%x !\n",
676 dp->link_train.lane_count);
677 dp->link_train.lane_count = (u8)LANE_COUNT1;
680 /* Setup TX lane count & rate */
681 if (dp->link_train.lane_count > max_lane)
682 dp->link_train.lane_count = max_lane;
683 if (dp->link_train.link_rate > max_rate)
684 dp->link_train.link_rate = max_rate;
686 /* All DP analog module power up */
687 exynos_dp_set_analog_power_down(dp, POWER_ALL, 0);
690 static int exynos_dp_sw_link_training(struct exynos_dp_device *dp)
692 int ret = 0, training_finished = 0;
694 /* Turn off unnecessary lanes */
695 switch (dp->link_train.lane_count) {
697 exynos_dp_set_analog_power_down(dp, CH1_BLOCK, 1);
699 exynos_dp_set_analog_power_down(dp, CH2_BLOCK, 1);
700 exynos_dp_set_analog_power_down(dp, CH3_BLOCK, 1);
706 dp->link_train.lt_state = START;
709 while (!ret && !training_finished) {
710 switch (dp->link_train.lt_state) {
712 ret = exynos_dp_link_start(dp);
715 ret = exynos_dp_process_clock_recovery(dp);
717 case EQUALIZER_TRAINING:
718 ret = exynos_dp_process_equalizer_training(dp);
721 training_finished = 1;
728 dev_err(dp->dev, "eDP link training failed (%d)\n", ret);
733 static int exynos_dp_set_hw_link_train(struct exynos_dp_device *dp,
740 exynos_dp_stop_video(dp);
742 if (exynos_dp_get_pll_lock_status(dp) == PLL_UNLOCKED) {
743 dev_err(dp->dev, "PLL is not locked yet.\n");
747 exynos_dp_reset_macro(dp);
749 /* Set TX pre-emphasis to minimum */
750 for (lane = 0; lane < max_lane; lane++)
751 exynos_dp_set_lane_lane_pre_emphasis(dp,
752 PRE_EMPHASIS_LEVEL_0, lane);
754 /* All DP analog module power up */
755 exynos_dp_set_analog_power_down(dp, POWER_ALL, 0);
757 /* Initialize by reading RX's DPCD */
758 exynos_dp_get_max_rx_bandwidth(dp, &dp->link_train.link_rate);
759 exynos_dp_get_max_rx_lane_count(dp, &dp->link_train.lane_count);
761 if ((dp->link_train.link_rate != LINK_RATE_1_62GBPS) &&
762 (dp->link_train.link_rate != LINK_RATE_2_70GBPS)) {
763 dev_err(dp->dev, "Rx Max Link Rate is abnormal :%x !\n",
764 dp->link_train.link_rate);
765 dp->link_train.link_rate = LINK_RATE_1_62GBPS;
768 if (dp->link_train.lane_count == 0) {
769 dev_err(dp->dev, "Rx Max Lane count is abnormal :%x !\n",
770 dp->link_train.lane_count);
771 dp->link_train.lane_count = (u8)LANE_COUNT1;
774 /* Setup TX lane count & rate */
775 if (dp->link_train.lane_count > max_lane)
776 dp->link_train.lane_count = max_lane;
777 if (dp->link_train.link_rate > max_rate)
778 dp->link_train.link_rate = max_rate;
780 /* Set link rate and count as you want to establish*/
781 exynos_dp_set_lane_count(dp, dp->video_info->lane_count);
782 exynos_dp_set_link_bandwidth(dp, dp->video_info->link_rate);
784 /* Set sink to D0 (Sink Not Ready) mode. */
785 exynos_dp_write_byte_to_dpcd(dp, DPCD_ADDR_SINK_POWER_STATE,
786 DPCD_SET_POWER_STATE_D0);
788 /* Enable H/W Link Training */
789 ret = exynos_dp_enable_hw_link_training(dp);
792 dev_err(dp->dev, " H/W link training failure: %d\n", ret);
796 exynos_dp_get_link_bandwidth(dp, &status);
797 dp->link_train.link_rate = status;
798 dev_dbg(dp->dev, "final bandwidth = %.2x\n",
799 dp->link_train.link_rate);
801 exynos_dp_get_lane_count(dp, &status);
802 dp->link_train.lane_count = status;
803 dev_dbg(dp->dev, "final lane count = %.2x\n",
804 dp->link_train.lane_count);
809 static int exynos_dp_set_link_train(struct exynos_dp_device *dp,
816 for (i = 0; i < DP_TIMEOUT_LOOP_COUNT; i++) {
817 exynos_dp_init_training(dp, count, bwtype);
818 retval = exynos_dp_sw_link_training(dp);
828 static int exynos_dp_config_video(struct exynos_dp_device *dp)
831 int timeout_loop = 0;
833 exynos_dp_config_video_slave_mode(dp);
835 exynos_dp_set_video_color_format(dp);
837 if (exynos_dp_get_pll_lock_status(dp) == PLL_UNLOCKED) {
838 dev_err(dp->dev, "PLL is not locked yet.\n");
844 if (!exynos_dp_is_slave_video_stream_clock_on(dp))
846 if (DP_TIMEOUT_LOOP_COUNT < timeout_loop) {
847 dev_err(dp->dev, "Wait for stream clock timed out\n");
851 usleep_range(1000, 5000);
854 /* Set to use the register calculated M/N video */
855 exynos_dp_set_video_cr_mn(dp, CALCULATED_M, 0, 0);
857 /* For video bist, Video timing must be generated by register */
858 exynos_dp_set_video_timing_mode(dp, VIDEO_TIMING_FROM_CAPTURE);
860 /* Disable video mute */
861 exynos_dp_enable_video_mute(dp, 0);
863 /* Configure video slave mode */
864 exynos_dp_enable_video_master(dp, 0);
867 exynos_dp_start_video(dp);
873 if (!exynos_dp_is_video_stream_on(dp))
876 if (DP_TIMEOUT_LOOP_COUNT < timeout_loop) {
877 dev_err(dp->dev, "Wait for video stream timed out\n");
881 usleep_range(1000, 5000);
885 dev_err(dp->dev, "Video stream is not detected!\n");
890 static void exynos_dp_enable_scramble(struct exynos_dp_device *dp, bool enable)
895 exynos_dp_enable_scrambling(dp);
897 exynos_dp_read_byte_from_dpcd(dp,
898 DPCD_ADDR_TRAINING_PATTERN_SET,
900 exynos_dp_write_byte_to_dpcd(dp,
901 DPCD_ADDR_TRAINING_PATTERN_SET,
902 (u8)(data & ~DPCD_SCRAMBLING_DISABLED));
904 exynos_dp_disable_scrambling(dp);
906 exynos_dp_read_byte_from_dpcd(dp,
907 DPCD_ADDR_TRAINING_PATTERN_SET,
909 exynos_dp_write_byte_to_dpcd(dp,
910 DPCD_ADDR_TRAINING_PATTERN_SET,
911 (u8)(data | DPCD_SCRAMBLING_DISABLED));
915 static irqreturn_t exynos_dp_irq_handler(int irq, void *arg)
917 struct exynos_dp_device *dp = arg;
918 enum dp_irq_type irq_type;
920 irq_type = exynos_dp_get_irq_type(dp);
922 case DP_IRQ_TYPE_HP_CABLE_IN:
923 case DP_IRQ_TYPE_HP_CABLE_OUT:
924 dev_dbg(dp->dev, "Received irq - type=%d\n", irq_type);
925 schedule_work(&dp->hotplug_work);
926 exynos_dp_clear_hotplug_interrupts(dp);
928 case DP_IRQ_TYPE_HP_CHANGE:
930 * We get these change notifications once in a while, but there
931 * is nothing we can do with them. Just ignore it for now and
932 * only handle cable changes.
934 dev_dbg(dp->dev, "Received irq - hotplug change; ignoring.\n");
935 exynos_dp_clear_hotplug_interrupts(dp);
938 dev_err(dp->dev, "Received irq - unknown type!\n");
944 static void exynos_dp_hotplug(struct work_struct *work)
946 struct exynos_dp_device *dp;
948 dp = container_of(work, struct exynos_dp_device, hotplug_work);
950 drm_helper_hpd_irq_event(dp->drm_dev);
953 static void exynos_dp_train_link(struct exynos_dp_device *dp)
957 #ifdef CONFIG_DRM_PTN3460
958 ret = ptn3460_wait_until_ready(30 * 1000);
960 DRM_ERROR("PTN3460 is not ready, don't plug\n");
965 ret = exynos_dp_handle_edid(dp);
967 dev_err(dp->dev, "unable to handle edid\n");
971 if (dp->training_type == SW_LINK_TRAINING)
972 ret = exynos_dp_set_link_train(dp, dp->video_info->lane_count,
973 dp->video_info->link_rate);
975 ret = exynos_dp_set_hw_link_train(dp,
976 dp->video_info->lane_count, dp->video_info->link_rate);
978 dev_err(dp->dev, "unable to do link train\n");
982 exynos_dp_enable_scramble(dp, 1);
983 exynos_dp_enable_rx_to_enhanced_mode(dp, 1);
984 exynos_dp_enable_enhanced_mode(dp, 1);
986 exynos_dp_set_lane_count(dp, dp->video_info->lane_count);
987 exynos_dp_set_link_bandwidth(dp, dp->video_info->link_rate);
989 exynos_dp_init_video(dp);
992 static void exynos_dp_commit(void *ctx)
994 struct exynos_dp_device *dp = ctx;
996 exynos_dp_config_video(dp);
999 static int exynos_dp_power_off(struct exynos_dp_device *dp)
1004 dp->enabled = false;
1005 exynos_dp_disable_hpd(dp);
1007 if (work_pending(&dp->hotplug_work))
1008 flush_work_sync(&dp->hotplug_work);
1010 if (dp->phy_ops.phy_exit)
1011 dp->phy_ops.phy_exit();
1013 clk_disable(dp->clock);
1017 static int exynos_dp_power_on(struct exynos_dp_device *dp)
1022 if (dp->phy_ops.phy_init)
1023 dp->phy_ops.phy_init();
1025 clk_enable(dp->clock);
1027 exynos_dp_init_dp(dp);
1030 * DP controller is reset and needs HPD interrupt to trigger
1031 * re-configuration. If we don't have valid IRQ, this is never
1032 * going to happen. Let's reconfigure it here in this case.
1034 if (dp->irq < 0 && !exynos_dp_detect_hpd(dp))
1035 schedule_work(&dp->hotplug_work);
1038 * These calls are required to make sure we train the dp link when dpms
1039 * off/on is called from userspace. In the boot and resume cases, the
1040 * link training is handled via the modeset, but unfortunately modeset
1041 * isn't being called in the dpms off/on case.
1043 exynos_dp_train_link(dp);
1044 exynos_dp_commit(dp);
1050 static int exynos_dp_dpms(void *ctx, int mode)
1052 struct exynos_dp_device *dp = ctx;
1055 case DRM_MODE_DPMS_ON:
1056 return exynos_dp_power_on(dp);
1058 case DRM_MODE_DPMS_STANDBY:
1059 case DRM_MODE_DPMS_SUSPEND:
1060 case DRM_MODE_DPMS_OFF:
1061 return exynos_dp_power_off(dp);
1064 DRM_ERROR("Unknown dpms mode %d\n", mode);
1069 static int exynos_dp_check_timing(void *ctx, void *timing)
1072 * TODO(seanpaul): The datasheet isn't terribly descriptive about the
1073 * limitations we have here. It's not vitally important to implement
1074 * this right now, but should be implemented once we use EDID to mode
1080 static bool exynos_dp_is_connected(void *ctx)
1082 struct exynos_dp_device *dp = ctx;
1084 if (dp->force_connected)
1087 return !exynos_dp_detect_hpd(dp);
1090 static int exynos_dp_subdrv_probe(void *ctx, struct drm_device *drm_dev)
1092 struct exynos_dp_device *dp = ctx;
1095 dp->drm_dev = drm_dev;
1098 ret = request_irq(dp->irq, exynos_dp_irq_handler, dp->irq_flags,
1101 dev_err(dp->dev, "failed to request irq\n");
1106 exynos_dp_dpms(dp, DRM_MODE_DPMS_ON);
1111 static void exynos_dp_mode_set(void *ctx, struct drm_display_mode *mode)
1113 struct exynos_dp_device *dp = ctx;
1115 exynos_dp_train_link(dp);
1118 static struct exynos_panel_ops dp_panel_ops = {
1119 .subdrv_probe = exynos_dp_subdrv_probe,
1120 .is_connected = exynos_dp_is_connected,
1121 .check_timing = exynos_dp_check_timing,
1122 .dpms = exynos_dp_dpms,
1123 .mode_set = exynos_dp_mode_set,
1124 .commit = exynos_dp_commit,
1127 static int __devinit exynos_dp_probe(struct platform_device *pdev)
1129 struct resource *res;
1130 struct exynos_dp_device *dp;
1131 struct exynos_dp_platdata *pdata;
1135 pdata = pdev->dev.platform_data;
1137 dev_err(&pdev->dev, "no platform data\n");
1141 dp = kzalloc(sizeof(struct exynos_dp_device), GFP_KERNEL);
1143 dev_err(&pdev->dev, "no memory for device data\n");
1147 dp->dev = &pdev->dev;
1149 dp->clock = clk_get(&pdev->dev, "dp");
1150 if (IS_ERR(dp->clock)) {
1151 dev_err(&pdev->dev, "failed to get clock\n");
1152 ret = PTR_ERR(dp->clock);
1156 clk_enable(dp->clock);
1158 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1160 dev_err(&pdev->dev, "failed to get registers\n");
1165 res = request_mem_region(res->start, resource_size(res),
1166 dev_name(&pdev->dev));
1168 dev_err(&pdev->dev, "failed to request registers region\n");
1175 dp->reg_base = ioremap(res->start, resource_size(res));
1176 if (!dp->reg_base) {
1177 dev_err(&pdev->dev, "failed to ioremap\n");
1179 goto err_req_region;
1182 if (gpio_is_valid(pdata->hpd_gpio)) {
1183 dp->hpd_gpio = pdata->hpd_gpio;
1184 ret = gpio_request_one(dp->hpd_gpio, GPIOF_IN, "dp_hpd");
1187 #ifdef CONFIG_S5P_GPIO_INT
1188 ret = s5p_register_gpio_interrupt(dp->hpd_gpio);
1190 dev_err(&pdev->dev, "cannot register/get GPIO irq\n");
1193 s3c_gpio_cfgpin(dp->hpd_gpio, S3C_GPIO_SFN(0xf));
1195 dp->irq = gpio_to_irq(dp->hpd_gpio);
1196 dp->irq_flags = IRQF_TRIGGER_RISING | IRQF_TRIGGER_FALLING;
1198 dp->hpd_gpio = -ENODEV;
1199 dp->irq = platform_get_irq(pdev, 0);
1203 dp->enabled = false;
1204 dp->training_type = pdata->training_type;
1205 dp->video_info = pdata->video_info;
1206 dp->force_connected = pdata->force_connected;
1207 if (pdata->phy_init) {
1208 dp->phy_ops.phy_init = pdata->phy_init;
1209 dp->phy_ops.phy_init();
1211 if (pdata->phy_exit)
1212 dp->phy_ops.phy_exit = pdata->phy_exit;
1214 INIT_WORK(&dp->hotplug_work, exynos_dp_hotplug);
1216 platform_set_drvdata(pdev, dp);
1218 exynos_display_attach_panel(EXYNOS_DRM_DISPLAY_TYPE_FIMD, &dp_panel_ops,
1224 if (gpio_is_valid(dp->hpd_gpio))
1225 gpio_free(dp->hpd_gpio);
1227 iounmap(dp->reg_base);
1229 release_mem_region(res->start, resource_size(res));
1238 static int __devexit exynos_dp_remove(struct platform_device *pdev)
1240 struct exynos_dp_device *dp = platform_get_drvdata(pdev);
1242 /* power_off will take care of flushing the hotplug_work */
1243 exynos_dp_dpms(dp, DRM_MODE_DPMS_OFF);
1245 if (gpio_is_valid(dp->hpd_gpio))
1246 gpio_free(dp->hpd_gpio);
1248 free_irq(dp->irq, dp);
1249 iounmap(dp->reg_base);
1251 clk_disable(dp->clock);
1254 release_mem_region(dp->res->start, resource_size(dp->res));
1261 struct platform_driver dp_driver = {
1262 .probe = exynos_dp_probe,
1263 .remove = __devexit_p(exynos_dp_remove),
1266 .owner = THIS_MODULE,