2 * Samsung SoC DP (Display Port) interface driver.
4 * Copyright (C) 2012 Samsung Electronics Co., Ltd.
5 * Author: Jingoo Han <jg1.han@samsung.com>
7 * This program is free software; you can redistribute it and/or modify it
8 * under the terms of the GNU General Public License as published by the
9 * Free Software Foundation; either version 2 of the License, or (at your
10 * option) any later version.
14 #include "drm_crtc_helper.h"
16 #include <linux/module.h>
17 #include <linux/platform_device.h>
18 #include <linux/slab.h>
19 #include <linux/err.h>
20 #include <linux/clk.h>
21 #include <linux/gpio.h>
23 #include <linux/interrupt.h>
24 #include <linux/delay.h>
26 #include <linux/workqueue.h>
28 #include <video/exynos_dp.h>
29 #include "exynos_drm_drv.h"
30 #include "exynos_drm_display.h"
33 #include <plat/gpio-cfg.h>
35 #include "exynos_dp_core.h"
37 #define PLL_MAX_TRIES 100
39 static int exynos_dp_init_dp(struct exynos_dp_device *dp)
43 /* SW defined function Normal operation */
44 exynos_dp_enable_sw_function(dp);
46 exynos_dp_init_analog_func(dp);
48 exynos_dp_init_hpd(dp);
49 exynos_dp_init_aux(dp);
54 static int exynos_dp_detect_hpd(struct exynos_dp_device *dp)
58 if (gpio_is_valid(dp->hpd_gpio))
59 return !gpio_get_value(dp->hpd_gpio);
61 while (exynos_dp_get_plug_in_status(dp) != 0) {
63 if (DP_TIMEOUT_LOOP_COUNT < timeout_loop) {
64 dev_err(dp->dev, "failed to get hpd plug status\n");
73 static unsigned char exynos_dp_calc_edid_check_sum(unsigned char *edid_data)
76 unsigned char sum = 0;
78 for (i = 0; i < EDID_BLOCK_LENGTH; i++)
79 sum = sum + edid_data[i];
84 static int exynos_dp_read_edid(struct exynos_dp_device *dp)
86 unsigned char edid[EDID_BLOCK_LENGTH * 2];
87 unsigned int extend_block = 0;
89 unsigned char test_vector;
93 * EDID device address is 0x50.
94 * However, if necessary, you must have set upper address
95 * into E-EDID in I2C device, 0x30.
98 /* Read Extension Flag, Number of 128-byte EDID extension blocks */
99 retval = exynos_dp_read_byte_from_i2c(dp, I2C_EDID_DEVICE_ADDR,
105 if (extend_block > 0) {
106 dev_dbg(dp->dev, "EDID data includes a single extension!\n");
109 retval = exynos_dp_read_bytes_from_i2c(dp, I2C_EDID_DEVICE_ADDR,
112 &edid[EDID_HEADER_PATTERN]);
114 dev_err(dp->dev, "EDID Read failed!\n");
117 sum = exynos_dp_calc_edid_check_sum(edid);
119 dev_err(dp->dev, "EDID bad checksum!\n");
123 /* Read additional EDID data */
124 retval = exynos_dp_read_bytes_from_i2c(dp,
125 I2C_EDID_DEVICE_ADDR,
128 &edid[EDID_BLOCK_LENGTH]);
130 dev_err(dp->dev, "EDID Read failed!\n");
133 sum = exynos_dp_calc_edid_check_sum(&edid[EDID_BLOCK_LENGTH]);
135 dev_err(dp->dev, "EDID bad checksum!\n");
139 exynos_dp_read_byte_from_dpcd(dp, DPCD_ADDR_TEST_REQUEST,
141 if (test_vector & DPCD_TEST_EDID_READ) {
142 exynos_dp_write_byte_to_dpcd(dp,
143 DPCD_ADDR_TEST_EDID_CHECKSUM,
144 edid[EDID_BLOCK_LENGTH + EDID_CHECKSUM]);
145 exynos_dp_write_byte_to_dpcd(dp,
146 DPCD_ADDR_TEST_RESPONSE,
147 DPCD_TEST_EDID_CHECKSUM_WRITE);
150 dev_info(dp->dev, "EDID data does not include any extensions.\n");
153 retval = exynos_dp_read_bytes_from_i2c(dp,
154 I2C_EDID_DEVICE_ADDR,
157 &edid[EDID_HEADER_PATTERN]);
159 dev_err(dp->dev, "EDID Read failed!\n");
162 sum = exynos_dp_calc_edid_check_sum(edid);
164 dev_err(dp->dev, "EDID bad checksum!\n");
168 exynos_dp_read_byte_from_dpcd(dp,
169 DPCD_ADDR_TEST_REQUEST,
171 if (test_vector & DPCD_TEST_EDID_READ) {
172 exynos_dp_write_byte_to_dpcd(dp,
173 DPCD_ADDR_TEST_EDID_CHECKSUM,
174 edid[EDID_CHECKSUM]);
175 exynos_dp_write_byte_to_dpcd(dp,
176 DPCD_ADDR_TEST_RESPONSE,
177 DPCD_TEST_EDID_CHECKSUM_WRITE);
181 dev_err(dp->dev, "EDID Read success!\n");
185 static int exynos_dp_handle_edid(struct exynos_dp_device *dp)
191 /* Read DPCD DPCD_ADDR_DPCD_REV~RECEIVE_PORT1_CAP_1 */
192 ret = exynos_dp_read_bytes_from_dpcd(dp, DPCD_ADDR_DPCD_REV, 12, buf);
197 for (i = 0; i < 3; i++) {
198 ret = exynos_dp_read_edid(dp);
206 static void exynos_dp_enable_rx_to_enhanced_mode(struct exynos_dp_device *dp,
211 exynos_dp_read_byte_from_dpcd(dp, DPCD_ADDR_LANE_COUNT_SET, &data);
214 exynos_dp_write_byte_to_dpcd(dp, DPCD_ADDR_LANE_COUNT_SET,
215 DPCD_ENHANCED_FRAME_EN |
216 DPCD_LANE_COUNT_SET(data));
218 exynos_dp_write_byte_to_dpcd(dp, DPCD_ADDR_LANE_COUNT_SET,
219 DPCD_LANE_COUNT_SET(data));
222 static int exynos_dp_is_enhanced_mode_available(struct exynos_dp_device *dp)
227 exynos_dp_read_byte_from_dpcd(dp, DPCD_ADDR_MAX_LANE_COUNT, &data);
228 retval = DPCD_ENHANCED_FRAME_CAP(data);
233 static void exynos_dp_set_enhanced_mode(struct exynos_dp_device *dp)
237 data = exynos_dp_is_enhanced_mode_available(dp);
238 exynos_dp_enable_rx_to_enhanced_mode(dp, data);
239 exynos_dp_enable_enhanced_mode(dp, data);
242 static void exynos_dp_training_pattern_dis(struct exynos_dp_device *dp)
244 exynos_dp_set_training_pattern(dp, DP_NONE);
246 exynos_dp_write_byte_to_dpcd(dp,
247 DPCD_ADDR_TRAINING_PATTERN_SET,
248 DPCD_TRAINING_PATTERN_DISABLED);
251 static void exynos_dp_set_lane_lane_pre_emphasis(struct exynos_dp_device *dp,
252 int pre_emphasis, int lane)
256 exynos_dp_set_lane0_pre_emphasis(dp, pre_emphasis);
259 exynos_dp_set_lane1_pre_emphasis(dp, pre_emphasis);
263 exynos_dp_set_lane2_pre_emphasis(dp, pre_emphasis);
267 exynos_dp_set_lane3_pre_emphasis(dp, pre_emphasis);
272 static int exynos_dp_link_start(struct exynos_dp_device *dp)
274 int ret, lane, lane_count, pll_tries;
277 lane_count = dp->link_train.lane_count;
279 dp->link_train.lt_state = CLOCK_RECOVERY;
280 dp->link_train.eq_loop = 0;
282 for (lane = 0; lane < lane_count; lane++)
283 dp->link_train.cr_loop[lane] = 0;
285 /* Set link rate and count as you want to establish*/
286 exynos_dp_set_link_bandwidth(dp, dp->link_train.link_rate);
287 exynos_dp_set_lane_count(dp, dp->link_train.lane_count);
289 /* Setup RX configuration */
290 buf[0] = dp->link_train.link_rate;
291 buf[1] = dp->link_train.lane_count;
292 ret = exynos_dp_write_bytes_to_dpcd(dp, DPCD_ADDR_LINK_BW_SET, 2, buf);
296 /* Set TX pre-emphasis to minimum */
297 for (lane = 0; lane < lane_count; lane++)
298 exynos_dp_set_lane_lane_pre_emphasis(dp,
299 PRE_EMPHASIS_LEVEL_0, lane);
301 /* Wait for PLL lock */
303 while (exynos_dp_get_pll_lock_status(dp) == PLL_UNLOCKED) {
304 if (pll_tries == PLL_MAX_TRIES)
311 /* Set training pattern 1 */
312 exynos_dp_set_training_pattern(dp, TRAINING_PTN1);
314 /* Set RX training pattern */
315 ret = exynos_dp_write_byte_to_dpcd(dp, DPCD_ADDR_TRAINING_PATTERN_SET,
316 DPCD_SCRAMBLING_DISABLED | DPCD_TRAINING_PATTERN_1);
320 for (lane = 0; lane < lane_count; lane++)
321 buf[lane] = DPCD_PRE_EMPHASIS_PATTERN2_LEVEL0 |
322 DPCD_VOLTAGE_SWING_PATTERN1_LEVEL0;
323 ret = exynos_dp_write_bytes_to_dpcd(dp, DPCD_ADDR_TRAINING_LANE0_SET,
331 static unsigned char exynos_dp_get_lane_status(u8 link_status[6], int lane)
333 int shift = (lane & 1) * 4;
334 u8 link_value = link_status[lane>>1];
336 return (link_value >> shift) & 0xf;
339 static int exynos_dp_clock_recovery_ok(u8 link_status[6], int lane_count)
344 for (lane = 0; lane < lane_count; lane++) {
345 lane_status = exynos_dp_get_lane_status(link_status, lane);
346 if ((lane_status & DPCD_LANE_CR_DONE) == 0)
352 static int exynos_dp_channel_eq_ok(u8 link_status[6], int lane_count)
358 lane_align = link_status[2];
359 if ((lane_align & DPCD_INTERLANE_ALIGN_DONE) == 0)
362 for (lane = 0; lane < lane_count; lane++) {
363 lane_status = exynos_dp_get_lane_status(link_status, lane);
364 lane_status &= DPCD_CHANNEL_EQ_BITS;
365 if (lane_status != DPCD_CHANNEL_EQ_BITS)
371 static unsigned char exynos_dp_get_adjust_request_voltage(u8 adjust_request[2],
374 int shift = (lane & 1) * 4;
375 u8 link_value = adjust_request[lane>>1];
377 return (link_value >> shift) & 0x3;
380 static unsigned char exynos_dp_get_adjust_request_pre_emphasis(
381 u8 adjust_request[2],
384 int shift = (lane & 1) * 4;
385 u8 link_value = adjust_request[lane>>1];
387 return ((link_value >> shift) & 0xc) >> 2;
390 static void exynos_dp_set_lane_link_training(struct exynos_dp_device *dp,
391 u8 training_lane_set, int lane)
395 exynos_dp_set_lane0_link_training(dp, training_lane_set);
398 exynos_dp_set_lane1_link_training(dp, training_lane_set);
402 exynos_dp_set_lane2_link_training(dp, training_lane_set);
406 exynos_dp_set_lane3_link_training(dp, training_lane_set);
411 static unsigned int exynos_dp_get_lane_link_training(
412 struct exynos_dp_device *dp,
419 reg = exynos_dp_get_lane0_link_training(dp);
422 reg = exynos_dp_get_lane1_link_training(dp);
425 reg = exynos_dp_get_lane2_link_training(dp);
428 reg = exynos_dp_get_lane3_link_training(dp);
435 static void exynos_dp_reduce_link_rate(struct exynos_dp_device *dp)
437 if (dp->link_train.link_rate == LINK_RATE_2_70GBPS) {
438 /* set to reduced bit rate */
439 dp->link_train.link_rate = LINK_RATE_1_62GBPS;
440 dev_err(dp->dev, "set to bandwidth %.2x\n",
441 dp->link_train.link_rate);
442 dp->link_train.lt_state = START;
444 exynos_dp_training_pattern_dis(dp);
445 /* set enhanced mode if available */
446 exynos_dp_set_enhanced_mode(dp);
447 dp->link_train.lt_state = FAILED;
451 static void exynos_dp_get_adjust_train(struct exynos_dp_device *dp,
452 u8 adjust_request[2])
460 lane_count = dp->link_train.lane_count;
461 for (lane = 0; lane < lane_count; lane++) {
462 voltage_swing = exynos_dp_get_adjust_request_voltage(
463 adjust_request, lane);
464 pre_emphasis = exynos_dp_get_adjust_request_pre_emphasis(
465 adjust_request, lane);
466 training_lane = DPCD_VOLTAGE_SWING_SET(voltage_swing) |
467 DPCD_PRE_EMPHASIS_SET(pre_emphasis);
469 if (voltage_swing == VOLTAGE_LEVEL_3 ||
470 pre_emphasis == PRE_EMPHASIS_LEVEL_3) {
471 training_lane |= DPCD_MAX_SWING_REACHED;
472 training_lane |= DPCD_MAX_PRE_EMPHASIS_REACHED;
474 dp->link_train.training_lane[lane] = training_lane;
478 static int exynos_dp_check_max_cr_loop(struct exynos_dp_device *dp,
484 lane_count = dp->link_train.lane_count;
485 for (lane = 0; lane < lane_count; lane++) {
486 if (voltage_swing == VOLTAGE_LEVEL_3 ||
487 dp->link_train.cr_loop[lane] == MAX_CR_LOOP)
493 static int exynos_dp_process_clock_recovery(struct exynos_dp_device *dp)
495 int ret, lane, lane_count;
496 u8 voltage_swing, pre_emphasis, training_lane, link_status[6];
501 ret = exynos_dp_read_bytes_from_dpcd(dp, DPCD_ADDR_LANE0_1_STATUS, 6,
506 lane_count = dp->link_train.lane_count;
510 adjust_request = link_status + 4;
512 if (exynos_dp_clock_recovery_ok(link_status, lane_count) == 0) {
513 /* set training pattern 2 for EQ */
514 exynos_dp_set_training_pattern(dp, TRAINING_PTN2);
516 ret = exynos_dp_write_byte_to_dpcd(dp,
517 DPCD_ADDR_TRAINING_PATTERN_SET,
518 DPCD_SCRAMBLING_DISABLED | DPCD_TRAINING_PATTERN_2);
522 dp->link_train.lt_state = EQUALIZER_TRAINING;
524 for (lane = 0; lane < lane_count; lane++) {
525 training_lane = exynos_dp_get_lane_link_training(
527 voltage_swing = exynos_dp_get_adjust_request_voltage(
528 adjust_request, lane);
529 pre_emphasis = exynos_dp_get_adjust_request_pre_emphasis(
530 adjust_request, lane);
531 if ((DPCD_VOLTAGE_SWING_GET(training_lane) == voltage_swing) &&
532 (DPCD_PRE_EMPHASIS_GET(training_lane) == pre_emphasis))
533 dp->link_train.cr_loop[lane]++;
534 dp->link_train.training_lane[lane] = training_lane;
537 if (exynos_dp_check_max_cr_loop(dp, voltage_swing) != 0) {
538 exynos_dp_reduce_link_rate(dp);
543 exynos_dp_get_adjust_train(dp, adjust_request);
545 for (lane = 0; lane < lane_count; lane++) {
546 exynos_dp_set_lane_link_training(dp,
547 dp->link_train.training_lane[lane], lane);
548 ret = exynos_dp_write_byte_to_dpcd(dp,
549 DPCD_ADDR_TRAINING_LANE0_SET + lane,
550 dp->link_train.training_lane[lane]);
558 static int exynos_dp_process_equalizer_training(struct exynos_dp_device *dp)
560 int ret, lane, lane_count;
567 ret = exynos_dp_read_bytes_from_dpcd(dp, DPCD_ADDR_LANE0_1_STATUS,
572 adjust_request = link_status + 4;
573 lane_count = dp->link_train.lane_count;
575 if (exynos_dp_clock_recovery_ok(link_status, lane_count)) {
576 exynos_dp_reduce_link_rate(dp);
579 if (exynos_dp_channel_eq_ok(link_status, lane_count) == 0) {
580 /* traing pattern Set to Normal */
581 exynos_dp_training_pattern_dis(dp);
583 dev_info(dp->dev, "Link Training success!\n");
585 exynos_dp_get_link_bandwidth(dp, ®);
586 dp->link_train.link_rate = reg;
587 dev_dbg(dp->dev, "final bandwidth = %.2x\n",
588 dp->link_train.link_rate);
590 exynos_dp_get_lane_count(dp, ®);
591 dp->link_train.lane_count = reg;
592 dev_dbg(dp->dev, "final lane count = %.2x\n",
593 dp->link_train.lane_count);
594 /* set enhanced mode if available */
595 exynos_dp_set_enhanced_mode(dp);
597 dp->link_train.lt_state = FINISHED;
600 dp->link_train.eq_loop++;
602 if (dp->link_train.eq_loop > MAX_EQ_LOOP) {
603 exynos_dp_reduce_link_rate(dp);
605 exynos_dp_get_adjust_train(dp, adjust_request);
607 for (lane = 0; lane < lane_count; lane++) {
608 exynos_dp_set_lane_link_training(dp,
609 dp->link_train.training_lane[lane],
611 ret = exynos_dp_write_byte_to_dpcd(dp,
612 DPCD_ADDR_TRAINING_LANE0_SET + lane,
613 dp->link_train.training_lane[lane]);
623 static void exynos_dp_get_max_rx_bandwidth(struct exynos_dp_device *dp,
629 * For DP rev.1.1, Maximum link rate of Main Link lanes
630 * 0x06 = 1.62 Gbps, 0x0a = 2.7 Gbps
632 exynos_dp_read_byte_from_dpcd(dp, DPCD_ADDR_MAX_LINK_RATE, &data);
636 static void exynos_dp_get_max_rx_lane_count(struct exynos_dp_device *dp,
642 * For DP rev.1.1, Maximum number of Main Link lanes
643 * 0x01 = 1 lane, 0x02 = 2 lanes, 0x04 = 4 lanes
645 exynos_dp_read_byte_from_dpcd(dp, DPCD_ADDR_MAX_LANE_COUNT, &data);
646 *lane_count = DPCD_MAX_LANE_COUNT(data);
649 static void exynos_dp_init_training(struct exynos_dp_device *dp,
650 enum link_lane_count_type max_lane,
651 enum link_rate_type max_rate)
654 * MACRO_RST must be applied after the PLL_LOCK to avoid
655 * the DP inter pair skew issue for at least 10 us
657 exynos_dp_reset_macro(dp);
659 /* Initialize by reading RX's DPCD */
660 exynos_dp_get_max_rx_bandwidth(dp, &dp->link_train.link_rate);
661 exynos_dp_get_max_rx_lane_count(dp, &dp->link_train.lane_count);
663 if ((dp->link_train.link_rate != LINK_RATE_1_62GBPS) &&
664 (dp->link_train.link_rate != LINK_RATE_2_70GBPS)) {
665 dev_err(dp->dev, "Rx Max Link Rate is abnormal :%x !\n",
666 dp->link_train.link_rate);
667 dp->link_train.link_rate = LINK_RATE_1_62GBPS;
670 if (dp->link_train.lane_count == 0) {
671 dev_err(dp->dev, "Rx Max Lane count is abnormal :%x !\n",
672 dp->link_train.lane_count);
673 dp->link_train.lane_count = (u8)LANE_COUNT1;
676 /* Setup TX lane count & rate */
677 if (dp->link_train.lane_count > max_lane)
678 dp->link_train.lane_count = max_lane;
679 if (dp->link_train.link_rate > max_rate)
680 dp->link_train.link_rate = max_rate;
682 /* All DP analog module power up */
683 exynos_dp_set_analog_power_down(dp, POWER_ALL, 0);
686 static int exynos_dp_sw_link_training(struct exynos_dp_device *dp)
688 int ret = 0, training_finished = 0;
690 /* Turn off unnecessary lanes */
691 switch (dp->link_train.lane_count) {
693 exynos_dp_set_analog_power_down(dp, CH1_BLOCK, 1);
695 exynos_dp_set_analog_power_down(dp, CH2_BLOCK, 1);
696 exynos_dp_set_analog_power_down(dp, CH3_BLOCK, 1);
702 dp->link_train.lt_state = START;
705 while (!ret && !training_finished) {
706 switch (dp->link_train.lt_state) {
708 ret = exynos_dp_link_start(dp);
711 ret = exynos_dp_process_clock_recovery(dp);
713 case EQUALIZER_TRAINING:
714 ret = exynos_dp_process_equalizer_training(dp);
717 training_finished = 1;
724 dev_err(dp->dev, "eDP link training failed (%d)\n", ret);
729 static int exynos_dp_set_hw_link_train(struct exynos_dp_device *dp,
736 exynos_dp_stop_video(dp);
738 if (exynos_dp_get_pll_lock_status(dp) == PLL_UNLOCKED) {
739 dev_err(dp->dev, "PLL is not locked yet.\n");
743 exynos_dp_reset_macro(dp);
745 /* Set TX pre-emphasis to minimum */
746 for (lane = 0; lane < max_lane; lane++)
747 exynos_dp_set_lane_lane_pre_emphasis(dp,
748 PRE_EMPHASIS_LEVEL_0, lane);
750 /* All DP analog module power up */
751 exynos_dp_set_analog_power_down(dp, POWER_ALL, 0);
753 /* Initialize by reading RX's DPCD */
754 exynos_dp_get_max_rx_bandwidth(dp, &dp->link_train.link_rate);
755 exynos_dp_get_max_rx_lane_count(dp, &dp->link_train.lane_count);
757 if ((dp->link_train.link_rate != LINK_RATE_1_62GBPS) &&
758 (dp->link_train.link_rate != LINK_RATE_2_70GBPS)) {
759 dev_err(dp->dev, "Rx Max Link Rate is abnormal :%x !\n",
760 dp->link_train.link_rate);
761 dp->link_train.link_rate = LINK_RATE_1_62GBPS;
764 if (dp->link_train.lane_count == 0) {
765 dev_err(dp->dev, "Rx Max Lane count is abnormal :%x !\n",
766 dp->link_train.lane_count);
767 dp->link_train.lane_count = (u8)LANE_COUNT1;
770 /* Setup TX lane count & rate */
771 if (dp->link_train.lane_count > max_lane)
772 dp->link_train.lane_count = max_lane;
773 if (dp->link_train.link_rate > max_rate)
774 dp->link_train.link_rate = max_rate;
776 /* Set link rate and count as you want to establish*/
777 exynos_dp_set_lane_count(dp, dp->video_info->lane_count);
778 exynos_dp_set_link_bandwidth(dp, dp->video_info->link_rate);
780 /* Set sink to D0 (Sink Not Ready) mode. */
781 exynos_dp_write_byte_to_dpcd(dp, DPCD_ADDR_SINK_POWER_STATE,
782 DPCD_SET_POWER_STATE_D0);
784 /* Enable H/W Link Training */
785 ret = exynos_dp_enable_hw_link_training(dp);
788 dev_err(dp->dev, " H/W link training failure: %d\n", ret);
792 exynos_dp_get_link_bandwidth(dp, &status);
793 dp->link_train.link_rate = status;
794 dev_dbg(dp->dev, "final bandwidth = %.2x\n",
795 dp->link_train.link_rate);
797 exynos_dp_get_lane_count(dp, &status);
798 dp->link_train.lane_count = status;
799 dev_dbg(dp->dev, "final lane count = %.2x\n",
800 dp->link_train.lane_count);
805 static int exynos_dp_set_link_train(struct exynos_dp_device *dp,
812 for (i = 0; i < DP_TIMEOUT_LOOP_COUNT; i++) {
813 exynos_dp_init_training(dp, count, bwtype);
814 retval = exynos_dp_sw_link_training(dp);
824 static int exynos_dp_config_video(struct exynos_dp_device *dp)
827 int timeout_loop = 0;
829 exynos_dp_config_video_slave_mode(dp);
831 exynos_dp_set_video_color_format(dp);
833 if (exynos_dp_get_pll_lock_status(dp) == PLL_UNLOCKED) {
834 dev_err(dp->dev, "PLL is not locked yet.\n");
840 if (!exynos_dp_is_slave_video_stream_clock_on(dp))
842 if (DP_TIMEOUT_LOOP_COUNT < timeout_loop) {
843 dev_err(dp->dev, "Wait for stream clock timed out\n");
847 usleep_range(1000, 5000);
850 /* Set to use the register calculated M/N video */
851 exynos_dp_set_video_cr_mn(dp, CALCULATED_M, 0, 0);
853 /* For video bist, Video timing must be generated by register */
854 exynos_dp_set_video_timing_mode(dp, VIDEO_TIMING_FROM_CAPTURE);
856 /* Disable video mute */
857 exynos_dp_enable_video_mute(dp, 0);
859 /* Configure video slave mode */
860 exynos_dp_enable_video_master(dp, 0);
863 exynos_dp_start_video(dp);
869 if (!exynos_dp_is_video_stream_on(dp))
872 if (DP_TIMEOUT_LOOP_COUNT < timeout_loop) {
873 dev_err(dp->dev, "Wait for video stream timed out\n");
877 usleep_range(1000, 5000);
881 dev_err(dp->dev, "Video stream is not detected!\n");
886 static void exynos_dp_enable_scramble(struct exynos_dp_device *dp, bool enable)
891 exynos_dp_enable_scrambling(dp);
893 exynos_dp_read_byte_from_dpcd(dp,
894 DPCD_ADDR_TRAINING_PATTERN_SET,
896 exynos_dp_write_byte_to_dpcd(dp,
897 DPCD_ADDR_TRAINING_PATTERN_SET,
898 (u8)(data & ~DPCD_SCRAMBLING_DISABLED));
900 exynos_dp_disable_scrambling(dp);
902 exynos_dp_read_byte_from_dpcd(dp,
903 DPCD_ADDR_TRAINING_PATTERN_SET,
905 exynos_dp_write_byte_to_dpcd(dp,
906 DPCD_ADDR_TRAINING_PATTERN_SET,
907 (u8)(data | DPCD_SCRAMBLING_DISABLED));
911 static irqreturn_t exynos_dp_irq_handler(int irq, void *arg)
913 struct exynos_dp_device *dp = arg;
914 enum dp_irq_type irq_type;
916 irq_type = exynos_dp_get_irq_type(dp);
918 case DP_IRQ_TYPE_HP_CABLE_IN:
919 case DP_IRQ_TYPE_HP_CABLE_OUT:
920 dev_dbg(dp->dev, "Received irq - type=%d\n", irq_type);
921 schedule_work(&dp->hotplug_work);
922 exynos_dp_clear_hotplug_interrupts(dp);
924 case DP_IRQ_TYPE_HP_CHANGE:
926 * We get these change notifications once in a while, but there
927 * is nothing we can do with them. Just ignore it for now and
928 * only handle cable changes.
930 dev_dbg(dp->dev, "Received irq - hotplug change; ignoring.\n");
931 exynos_dp_clear_hotplug_interrupts(dp);
934 dev_err(dp->dev, "Received irq - unknown type!\n");
940 static void exynos_dp_hotplug(struct work_struct *work)
942 struct exynos_dp_device *dp;
944 dp = container_of(work, struct exynos_dp_device, hotplug_work);
946 drm_helper_hpd_irq_event(dp->drm_dev);
949 static void exynos_dp_train_link(struct exynos_dp_device *dp)
953 ret = exynos_dp_handle_edid(dp);
955 dev_err(dp->dev, "unable to handle edid\n");
959 if (dp->training_type == SW_LINK_TRAINING)
960 ret = exynos_dp_set_link_train(dp, dp->video_info->lane_count,
961 dp->video_info->link_rate);
963 ret = exynos_dp_set_hw_link_train(dp,
964 dp->video_info->lane_count, dp->video_info->link_rate);
966 dev_err(dp->dev, "unable to do link train\n");
970 exynos_dp_enable_scramble(dp, 1);
971 exynos_dp_enable_rx_to_enhanced_mode(dp, 1);
972 exynos_dp_enable_enhanced_mode(dp, 1);
974 exynos_dp_set_lane_count(dp, dp->video_info->lane_count);
975 exynos_dp_set_link_bandwidth(dp, dp->video_info->link_rate);
977 exynos_dp_init_video(dp);
980 static int exynos_dp_power_off(struct exynos_dp_device *dp)
986 exynos_dp_disable_hpd(dp);
988 if (work_pending(&dp->hotplug_work))
989 flush_work_sync(&dp->hotplug_work);
991 if (dp->phy_ops.phy_exit)
992 dp->phy_ops.phy_exit();
994 clk_disable(dp->clock);
998 static int exynos_dp_power_on(struct exynos_dp_device *dp)
1003 if (dp->phy_ops.phy_init)
1004 dp->phy_ops.phy_init();
1006 clk_enable(dp->clock);
1008 exynos_dp_init_dp(dp);
1011 * DP controller is reset and needs HPD interrupt to trigger
1012 * re-configuration. If we don't have valid IRQ, this is never
1013 * going to happen. Let's reconfigure it here in this case.
1015 if (dp->irq < 0 && !exynos_dp_detect_hpd(dp))
1016 schedule_work(&dp->hotplug_work);
1018 exynos_dp_train_link(dp);
1019 exynos_dp_config_video(dp);
1025 static int exynos_dp_dpms(void *ctx, int mode)
1027 struct exynos_dp_device *dp = ctx;
1030 case DRM_MODE_DPMS_ON:
1031 return exynos_dp_power_on(dp);
1033 case DRM_MODE_DPMS_STANDBY:
1034 case DRM_MODE_DPMS_SUSPEND:
1035 case DRM_MODE_DPMS_OFF:
1036 return exynos_dp_power_off(dp);
1039 DRM_ERROR("Unknown dpms mode %d\n", mode);
1044 static int exynos_dp_check_timing(void *ctx, void *timing)
1047 * TODO(seanpaul): The datasheet isn't terribly descriptive about the
1048 * limitations we have here. It's not vitally important to implement
1049 * this right now, but should be implemented once we use EDID to mode
1055 static bool exynos_dp_is_connected(void *ctx)
1057 struct exynos_dp_device *dp = ctx;
1059 if (dp->force_connected)
1062 return !exynos_dp_detect_hpd(dp);
1065 static int exynos_dp_subdrv_probe(void *ctx, struct drm_device *drm_dev)
1067 struct exynos_dp_device *dp = ctx;
1070 dp->drm_dev = drm_dev;
1073 ret = request_irq(dp->irq, exynos_dp_irq_handler, dp->irq_flags,
1076 dev_err(dp->dev, "failed to request irq\n");
1081 exynos_dp_dpms(dp, DRM_MODE_DPMS_ON);
1086 static struct exynos_panel_ops dp_panel_ops = {
1087 .subdrv_probe = exynos_dp_subdrv_probe,
1088 .is_connected = exynos_dp_is_connected,
1089 .check_timing = exynos_dp_check_timing,
1090 .dpms = exynos_dp_dpms,
1093 static int __devinit exynos_dp_probe(struct platform_device *pdev)
1095 struct resource *res;
1096 struct exynos_dp_device *dp;
1097 struct exynos_dp_platdata *pdata;
1101 pdata = pdev->dev.platform_data;
1103 dev_err(&pdev->dev, "no platform data\n");
1107 dp = kzalloc(sizeof(struct exynos_dp_device), GFP_KERNEL);
1109 dev_err(&pdev->dev, "no memory for device data\n");
1113 dp->dev = &pdev->dev;
1115 dp->clock = clk_get(&pdev->dev, "dp");
1116 if (IS_ERR(dp->clock)) {
1117 dev_err(&pdev->dev, "failed to get clock\n");
1118 ret = PTR_ERR(dp->clock);
1122 clk_enable(dp->clock);
1124 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1126 dev_err(&pdev->dev, "failed to get registers\n");
1131 res = request_mem_region(res->start, resource_size(res),
1132 dev_name(&pdev->dev));
1134 dev_err(&pdev->dev, "failed to request registers region\n");
1141 dp->reg_base = ioremap(res->start, resource_size(res));
1142 if (!dp->reg_base) {
1143 dev_err(&pdev->dev, "failed to ioremap\n");
1145 goto err_req_region;
1148 if (gpio_is_valid(pdata->hpd_gpio)) {
1149 dp->hpd_gpio = pdata->hpd_gpio;
1150 ret = gpio_request_one(dp->hpd_gpio, GPIOF_IN, "dp_hpd");
1153 #ifdef CONFIG_S5P_GPIO_INT
1154 ret = s5p_register_gpio_interrupt(dp->hpd_gpio);
1156 dev_err(&pdev->dev, "cannot register/get GPIO irq\n");
1159 s3c_gpio_cfgpin(dp->hpd_gpio, S3C_GPIO_SFN(0xf));
1161 dp->irq = gpio_to_irq(dp->hpd_gpio);
1162 dp->irq_flags = IRQF_TRIGGER_RISING | IRQF_TRIGGER_FALLING;
1164 dp->hpd_gpio = -ENODEV;
1165 dp->irq = platform_get_irq(pdev, 0);
1169 dp->enabled = false;
1170 dp->training_type = pdata->training_type;
1171 dp->video_info = pdata->video_info;
1172 dp->force_connected = pdata->force_connected;
1173 if (pdata->phy_init) {
1174 dp->phy_ops.phy_init = pdata->phy_init;
1175 dp->phy_ops.phy_init();
1177 if (pdata->phy_exit)
1178 dp->phy_ops.phy_exit = pdata->phy_exit;
1180 INIT_WORK(&dp->hotplug_work, exynos_dp_hotplug);
1182 platform_set_drvdata(pdev, dp);
1184 exynos_display_attach_panel(EXYNOS_DRM_DISPLAY_TYPE_FIMD, &dp_panel_ops,
1190 if (gpio_is_valid(dp->hpd_gpio))
1191 gpio_free(dp->hpd_gpio);
1193 iounmap(dp->reg_base);
1195 release_mem_region(res->start, resource_size(res));
1204 static int __devexit exynos_dp_remove(struct platform_device *pdev)
1206 struct exynos_dp_device *dp = platform_get_drvdata(pdev);
1208 /* power_off will take care of flushing the hotplug_work */
1209 exynos_dp_dpms(dp, DRM_MODE_DPMS_OFF);
1211 if (gpio_is_valid(dp->hpd_gpio))
1212 gpio_free(dp->hpd_gpio);
1214 free_irq(dp->irq, dp);
1215 iounmap(dp->reg_base);
1217 clk_disable(dp->clock);
1220 release_mem_region(dp->res->start, resource_size(dp->res));
1227 struct platform_driver dp_driver = {
1228 .probe = exynos_dp_probe,
1229 .remove = __devexit_p(exynos_dp_remove),
1232 .owner = THIS_MODULE,