2 * Samsung SoC DP (Display Port) interface driver.
4 * Copyright (C) 2012 Samsung Electronics Co., Ltd.
5 * Author: Jingoo Han <jg1.han@samsung.com>
7 * This program is free software; you can redistribute it and/or modify it
8 * under the terms of the GNU General Public License as published by the
9 * Free Software Foundation; either version 2 of the License, or (at your
10 * option) any later version.
13 #include <linux/module.h>
14 #include <linux/platform_device.h>
15 #include <linux/slab.h>
16 #include <linux/err.h>
17 #include <linux/clk.h>
19 #include <linux/interrupt.h>
20 #include <linux/delay.h>
22 #include <linux/workqueue.h>
24 #include <video/exynos_dp.h>
28 #include "exynos_dp_core.h"
30 #define PLL_MAX_TRIES 100
32 static int exynos_dp_init_dp(struct exynos_dp_device *dp)
36 /* SW defined function Normal operation */
37 exynos_dp_enable_sw_function(dp);
39 exynos_dp_init_analog_func(dp);
41 exynos_dp_init_hpd(dp);
42 exynos_dp_init_aux(dp);
47 static int exynos_dp_detect_hpd(struct exynos_dp_device *dp)
51 exynos_dp_init_hpd(dp);
55 while (exynos_dp_get_plug_in_status(dp) != 0) {
57 if (DP_TIMEOUT_LOOP_COUNT < timeout_loop) {
58 dev_err(dp->dev, "failed to get hpd plug status\n");
67 static unsigned char exynos_dp_calc_edid_check_sum(unsigned char *edid_data)
70 unsigned char sum = 0;
72 for (i = 0; i < EDID_BLOCK_LENGTH; i++)
73 sum = sum + edid_data[i];
78 static int exynos_dp_read_edid(struct exynos_dp_device *dp)
80 unsigned char edid[EDID_BLOCK_LENGTH * 2];
81 unsigned int extend_block = 0;
83 unsigned char test_vector;
87 * EDID device address is 0x50.
88 * However, if necessary, you must have set upper address
89 * into E-EDID in I2C device, 0x30.
92 /* Read Extension Flag, Number of 128-byte EDID extension blocks */
93 retval = exynos_dp_read_byte_from_i2c(dp, I2C_EDID_DEVICE_ADDR,
99 if (extend_block > 0) {
100 dev_dbg(dp->dev, "EDID data includes a single extension!\n");
103 retval = exynos_dp_read_bytes_from_i2c(dp, I2C_EDID_DEVICE_ADDR,
106 &edid[EDID_HEADER_PATTERN]);
108 dev_err(dp->dev, "EDID Read failed!\n");
111 sum = exynos_dp_calc_edid_check_sum(edid);
113 dev_err(dp->dev, "EDID bad checksum!\n");
117 /* Read additional EDID data */
118 retval = exynos_dp_read_bytes_from_i2c(dp,
119 I2C_EDID_DEVICE_ADDR,
122 &edid[EDID_BLOCK_LENGTH]);
124 dev_err(dp->dev, "EDID Read failed!\n");
127 sum = exynos_dp_calc_edid_check_sum(&edid[EDID_BLOCK_LENGTH]);
129 dev_err(dp->dev, "EDID bad checksum!\n");
133 exynos_dp_read_byte_from_dpcd(dp, DPCD_ADDR_TEST_REQUEST,
135 if (test_vector & DPCD_TEST_EDID_READ) {
136 exynos_dp_write_byte_to_dpcd(dp,
137 DPCD_ADDR_TEST_EDID_CHECKSUM,
138 edid[EDID_BLOCK_LENGTH + EDID_CHECKSUM]);
139 exynos_dp_write_byte_to_dpcd(dp,
140 DPCD_ADDR_TEST_RESPONSE,
141 DPCD_TEST_EDID_CHECKSUM_WRITE);
144 dev_info(dp->dev, "EDID data does not include any extensions.\n");
147 retval = exynos_dp_read_bytes_from_i2c(dp,
148 I2C_EDID_DEVICE_ADDR,
151 &edid[EDID_HEADER_PATTERN]);
153 dev_err(dp->dev, "EDID Read failed!\n");
156 sum = exynos_dp_calc_edid_check_sum(edid);
158 dev_err(dp->dev, "EDID bad checksum!\n");
162 exynos_dp_read_byte_from_dpcd(dp,
163 DPCD_ADDR_TEST_REQUEST,
165 if (test_vector & DPCD_TEST_EDID_READ) {
166 exynos_dp_write_byte_to_dpcd(dp,
167 DPCD_ADDR_TEST_EDID_CHECKSUM,
168 edid[EDID_CHECKSUM]);
169 exynos_dp_write_byte_to_dpcd(dp,
170 DPCD_ADDR_TEST_RESPONSE,
171 DPCD_TEST_EDID_CHECKSUM_WRITE);
175 dev_err(dp->dev, "EDID Read success!\n");
179 static int exynos_dp_handle_edid(struct exynos_dp_device *dp)
185 /* Read DPCD DPCD_ADDR_DPCD_REV~RECEIVE_PORT1_CAP_1 */
186 ret = exynos_dp_read_bytes_from_dpcd(dp, DPCD_ADDR_DPCD_REV, 12, buf);
191 for (i = 0; i < 3; i++) {
192 ret = exynos_dp_read_edid(dp);
200 static void exynos_dp_enable_rx_to_enhanced_mode(struct exynos_dp_device *dp,
205 exynos_dp_read_byte_from_dpcd(dp, DPCD_ADDR_LANE_COUNT_SET, &data);
208 exynos_dp_write_byte_to_dpcd(dp, DPCD_ADDR_LANE_COUNT_SET,
209 DPCD_ENHANCED_FRAME_EN |
210 DPCD_LANE_COUNT_SET(data));
212 exynos_dp_write_byte_to_dpcd(dp, DPCD_ADDR_LANE_COUNT_SET,
213 DPCD_LANE_COUNT_SET(data));
216 static int exynos_dp_is_enhanced_mode_available(struct exynos_dp_device *dp)
221 exynos_dp_read_byte_from_dpcd(dp, DPCD_ADDR_MAX_LANE_COUNT, &data);
222 retval = DPCD_ENHANCED_FRAME_CAP(data);
227 static void exynos_dp_set_enhanced_mode(struct exynos_dp_device *dp)
231 data = exynos_dp_is_enhanced_mode_available(dp);
232 exynos_dp_enable_rx_to_enhanced_mode(dp, data);
233 exynos_dp_enable_enhanced_mode(dp, data);
236 static void exynos_dp_training_pattern_dis(struct exynos_dp_device *dp)
238 exynos_dp_set_training_pattern(dp, DP_NONE);
240 exynos_dp_write_byte_to_dpcd(dp,
241 DPCD_ADDR_TRAINING_PATTERN_SET,
242 DPCD_TRAINING_PATTERN_DISABLED);
245 static void exynos_dp_set_lane_lane_pre_emphasis(struct exynos_dp_device *dp,
246 int pre_emphasis, int lane)
250 exynos_dp_set_lane0_pre_emphasis(dp, pre_emphasis);
253 exynos_dp_set_lane1_pre_emphasis(dp, pre_emphasis);
257 exynos_dp_set_lane2_pre_emphasis(dp, pre_emphasis);
261 exynos_dp_set_lane3_pre_emphasis(dp, pre_emphasis);
266 static int exynos_dp_link_start(struct exynos_dp_device *dp)
268 int ret, lane, lane_count, pll_tries;
271 lane_count = dp->link_train.lane_count;
273 dp->link_train.lt_state = CLOCK_RECOVERY;
274 dp->link_train.eq_loop = 0;
276 for (lane = 0; lane < lane_count; lane++)
277 dp->link_train.cr_loop[lane] = 0;
279 /* Set link rate and count as you want to establish*/
280 exynos_dp_set_link_bandwidth(dp, dp->link_train.link_rate);
281 exynos_dp_set_lane_count(dp, dp->link_train.lane_count);
283 /* Setup RX configuration */
284 buf[0] = dp->link_train.link_rate;
285 buf[1] = dp->link_train.lane_count;
286 ret = exynos_dp_write_bytes_to_dpcd(dp, DPCD_ADDR_LINK_BW_SET, 2, buf);
290 /* Set TX pre-emphasis to minimum */
291 for (lane = 0; lane < lane_count; lane++)
292 exynos_dp_set_lane_lane_pre_emphasis(dp,
293 PRE_EMPHASIS_LEVEL_0, lane);
295 /* Wait for PLL lock */
297 while (exynos_dp_get_pll_lock_status(dp) == PLL_UNLOCKED) {
298 if (pll_tries == PLL_MAX_TRIES)
305 /* Set training pattern 1 */
306 exynos_dp_set_training_pattern(dp, TRAINING_PTN1);
308 /* Set RX training pattern */
309 ret = exynos_dp_write_byte_to_dpcd(dp, DPCD_ADDR_TRAINING_PATTERN_SET,
310 DPCD_SCRAMBLING_DISABLED | DPCD_TRAINING_PATTERN_1);
314 for (lane = 0; lane < lane_count; lane++)
315 buf[lane] = DPCD_PRE_EMPHASIS_PATTERN2_LEVEL0 |
316 DPCD_VOLTAGE_SWING_PATTERN1_LEVEL0;
317 ret = exynos_dp_write_bytes_to_dpcd(dp, DPCD_ADDR_TRAINING_LANE0_SET,
325 static unsigned char exynos_dp_get_lane_status(u8 link_status[6], int lane)
327 int shift = (lane & 1) * 4;
328 u8 link_value = link_status[lane>>1];
330 return (link_value >> shift) & 0xf;
333 static int exynos_dp_clock_recovery_ok(u8 link_status[6], int lane_count)
338 for (lane = 0; lane < lane_count; lane++) {
339 lane_status = exynos_dp_get_lane_status(link_status, lane);
340 if ((lane_status & DPCD_LANE_CR_DONE) == 0)
346 static int exynos_dp_channel_eq_ok(u8 link_status[6], int lane_count)
352 lane_align = link_status[2];
353 if ((lane_align & DPCD_INTERLANE_ALIGN_DONE) == 0)
356 for (lane = 0; lane < lane_count; lane++) {
357 lane_status = exynos_dp_get_lane_status(link_status, lane);
358 lane_status &= DPCD_CHANNEL_EQ_BITS;
359 if (lane_status != DPCD_CHANNEL_EQ_BITS)
365 static unsigned char exynos_dp_get_adjust_request_voltage(u8 adjust_request[2],
368 int shift = (lane & 1) * 4;
369 u8 link_value = adjust_request[lane>>1];
371 return (link_value >> shift) & 0x3;
374 static unsigned char exynos_dp_get_adjust_request_pre_emphasis(
375 u8 adjust_request[2],
378 int shift = (lane & 1) * 4;
379 u8 link_value = adjust_request[lane>>1];
381 return ((link_value >> shift) & 0xc) >> 2;
384 static void exynos_dp_set_lane_link_training(struct exynos_dp_device *dp,
385 u8 training_lane_set, int lane)
389 exynos_dp_set_lane0_link_training(dp, training_lane_set);
392 exynos_dp_set_lane1_link_training(dp, training_lane_set);
396 exynos_dp_set_lane2_link_training(dp, training_lane_set);
400 exynos_dp_set_lane3_link_training(dp, training_lane_set);
405 static unsigned int exynos_dp_get_lane_link_training(
406 struct exynos_dp_device *dp,
413 reg = exynos_dp_get_lane0_link_training(dp);
416 reg = exynos_dp_get_lane1_link_training(dp);
419 reg = exynos_dp_get_lane2_link_training(dp);
422 reg = exynos_dp_get_lane3_link_training(dp);
429 static void exynos_dp_reduce_link_rate(struct exynos_dp_device *dp)
431 if (dp->link_train.link_rate == LINK_RATE_2_70GBPS) {
432 /* set to reduced bit rate */
433 dp->link_train.link_rate = LINK_RATE_1_62GBPS;
434 dev_err(dp->dev, "set to bandwidth %.2x\n",
435 dp->link_train.link_rate);
436 dp->link_train.lt_state = START;
438 exynos_dp_training_pattern_dis(dp);
439 /* set enhanced mode if available */
440 exynos_dp_set_enhanced_mode(dp);
441 dp->link_train.lt_state = FAILED;
445 static void exynos_dp_get_adjust_train(struct exynos_dp_device *dp,
446 u8 adjust_request[2])
454 lane_count = dp->link_train.lane_count;
455 for (lane = 0; lane < lane_count; lane++) {
456 voltage_swing = exynos_dp_get_adjust_request_voltage(
457 adjust_request, lane);
458 pre_emphasis = exynos_dp_get_adjust_request_pre_emphasis(
459 adjust_request, lane);
460 training_lane = DPCD_VOLTAGE_SWING_SET(voltage_swing) |
461 DPCD_PRE_EMPHASIS_SET(pre_emphasis);
463 if (voltage_swing == VOLTAGE_LEVEL_3 ||
464 pre_emphasis == PRE_EMPHASIS_LEVEL_3) {
465 training_lane |= DPCD_MAX_SWING_REACHED;
466 training_lane |= DPCD_MAX_PRE_EMPHASIS_REACHED;
468 dp->link_train.training_lane[lane] = training_lane;
472 static int exynos_dp_check_max_cr_loop(struct exynos_dp_device *dp,
478 lane_count = dp->link_train.lane_count;
479 for (lane = 0; lane < lane_count; lane++) {
480 if (voltage_swing == VOLTAGE_LEVEL_3 ||
481 dp->link_train.cr_loop[lane] == MAX_CR_LOOP)
487 static int exynos_dp_process_clock_recovery(struct exynos_dp_device *dp)
489 int ret, lane, lane_count;
490 u8 voltage_swing, pre_emphasis, training_lane, link_status[6];
495 ret = exynos_dp_read_bytes_from_dpcd(dp, DPCD_ADDR_LANE0_1_STATUS, 6,
500 lane_count = dp->link_train.lane_count;
501 adjust_request = link_status + 4;
503 if (exynos_dp_clock_recovery_ok(link_status, lane_count) == 0) {
504 /* set training pattern 2 for EQ */
505 exynos_dp_set_training_pattern(dp, TRAINING_PTN2);
507 ret = exynos_dp_write_byte_to_dpcd(dp,
508 DPCD_ADDR_TRAINING_PATTERN_SET,
509 DPCD_SCRAMBLING_DISABLED | DPCD_TRAINING_PATTERN_2);
513 dp->link_train.lt_state = EQUALIZER_TRAINING;
515 for (lane = 0; lane < lane_count; lane++) {
516 training_lane = exynos_dp_get_lane_link_training(
518 voltage_swing = exynos_dp_get_adjust_request_voltage(
519 adjust_request, lane);
520 pre_emphasis = exynos_dp_get_adjust_request_pre_emphasis(
521 adjust_request, lane);
522 if ((DPCD_VOLTAGE_SWING_GET(training_lane) == voltage_swing) &&
523 (DPCD_PRE_EMPHASIS_GET(training_lane) == pre_emphasis))
524 dp->link_train.cr_loop[lane]++;
525 dp->link_train.training_lane[lane] = training_lane;
528 if (exynos_dp_check_max_cr_loop(dp, voltage_swing) != 0) {
529 exynos_dp_reduce_link_rate(dp);
534 exynos_dp_get_adjust_train(dp, adjust_request);
536 for (lane = 0; lane < lane_count; lane++) {
537 exynos_dp_set_lane_link_training(dp,
538 dp->link_train.training_lane[lane], lane);
539 ret = exynos_dp_write_byte_to_dpcd(dp,
540 DPCD_ADDR_TRAINING_LANE0_SET + lane,
541 dp->link_train.training_lane[lane]);
549 static int exynos_dp_process_equalizer_training(struct exynos_dp_device *dp)
551 int ret, lane, lane_count;
558 ret = exynos_dp_read_bytes_from_dpcd(dp, DPCD_ADDR_LANE0_1_STATUS,
563 adjust_request = link_status + 4;
564 lane_count = dp->link_train.lane_count;
566 if (exynos_dp_clock_recovery_ok(link_status, lane_count)) {
567 exynos_dp_reduce_link_rate(dp);
570 if (exynos_dp_channel_eq_ok(link_status, lane_count) == 0) {
571 /* traing pattern Set to Normal */
572 exynos_dp_training_pattern_dis(dp);
574 dev_info(dp->dev, "Link Training success!\n");
576 exynos_dp_get_link_bandwidth(dp, ®);
577 dp->link_train.link_rate = reg;
578 dev_dbg(dp->dev, "final bandwidth = %.2x\n",
579 dp->link_train.link_rate);
581 exynos_dp_get_lane_count(dp, ®);
582 dp->link_train.lane_count = reg;
583 dev_dbg(dp->dev, "final lane count = %.2x\n",
584 dp->link_train.lane_count);
585 /* set enhanced mode if available */
586 exynos_dp_set_enhanced_mode(dp);
588 dp->link_train.lt_state = FINISHED;
591 dp->link_train.eq_loop++;
593 if (dp->link_train.eq_loop > MAX_EQ_LOOP) {
594 exynos_dp_reduce_link_rate(dp);
596 exynos_dp_get_adjust_train(dp, adjust_request);
598 for (lane = 0; lane < lane_count; lane++) {
599 exynos_dp_set_lane_link_training(dp,
600 dp->link_train.training_lane[lane],
602 ret = exynos_dp_write_byte_to_dpcd(dp,
603 DPCD_ADDR_TRAINING_LANE0_SET + lane,
604 dp->link_train.training_lane[lane]);
614 static void exynos_dp_get_max_rx_bandwidth(struct exynos_dp_device *dp,
620 * For DP rev.1.1, Maximum link rate of Main Link lanes
621 * 0x06 = 1.62 Gbps, 0x0a = 2.7 Gbps
623 exynos_dp_read_byte_from_dpcd(dp, DPCD_ADDR_MAX_LINK_RATE, &data);
627 static void exynos_dp_get_max_rx_lane_count(struct exynos_dp_device *dp,
633 * For DP rev.1.1, Maximum number of Main Link lanes
634 * 0x01 = 1 lane, 0x02 = 2 lanes, 0x04 = 4 lanes
636 exynos_dp_read_byte_from_dpcd(dp, DPCD_ADDR_MAX_LANE_COUNT, &data);
637 *lane_count = DPCD_MAX_LANE_COUNT(data);
640 static void exynos_dp_init_training(struct exynos_dp_device *dp,
641 enum link_lane_count_type max_lane,
642 enum link_rate_type max_rate)
645 * MACRO_RST must be applied after the PLL_LOCK to avoid
646 * the DP inter pair skew issue for at least 10 us
648 exynos_dp_reset_macro(dp);
650 /* Initialize by reading RX's DPCD */
651 exynos_dp_get_max_rx_bandwidth(dp, &dp->link_train.link_rate);
652 exynos_dp_get_max_rx_lane_count(dp, &dp->link_train.lane_count);
654 if ((dp->link_train.link_rate != LINK_RATE_1_62GBPS) &&
655 (dp->link_train.link_rate != LINK_RATE_2_70GBPS)) {
656 dev_err(dp->dev, "Rx Max Link Rate is abnormal :%x !\n",
657 dp->link_train.link_rate);
658 dp->link_train.link_rate = LINK_RATE_1_62GBPS;
661 if (dp->link_train.lane_count == 0) {
662 dev_err(dp->dev, "Rx Max Lane count is abnormal :%x !\n",
663 dp->link_train.lane_count);
664 dp->link_train.lane_count = (u8)LANE_COUNT1;
667 /* Setup TX lane count & rate */
668 if (dp->link_train.lane_count > max_lane)
669 dp->link_train.lane_count = max_lane;
670 if (dp->link_train.link_rate > max_rate)
671 dp->link_train.link_rate = max_rate;
673 /* All DP analog module power up */
674 exynos_dp_set_analog_power_down(dp, POWER_ALL, 0);
677 static int exynos_dp_sw_link_training(struct exynos_dp_device *dp)
679 int ret = 0, training_finished = 0;
681 /* Turn off unnecessary lanes */
682 switch (dp->link_train.lane_count) {
684 exynos_dp_set_analog_power_down(dp, CH1_BLOCK, 1);
686 exynos_dp_set_analog_power_down(dp, CH2_BLOCK, 1);
687 exynos_dp_set_analog_power_down(dp, CH3_BLOCK, 1);
693 dp->link_train.lt_state = START;
696 while (!ret && !training_finished) {
697 switch (dp->link_train.lt_state) {
699 ret = exynos_dp_link_start(dp);
702 ret = exynos_dp_process_clock_recovery(dp);
704 case EQUALIZER_TRAINING:
705 ret = exynos_dp_process_equalizer_training(dp);
708 training_finished = 1;
715 dev_err(dp->dev, "eDP link training failed (%d)\n", ret);
720 static int exynos_dp_set_hw_link_train(struct exynos_dp_device *dp,
727 exynos_dp_stop_video(dp);
729 if (exynos_dp_get_pll_lock_status(dp) == PLL_UNLOCKED) {
730 dev_err(dp->dev, "PLL is not locked yet.\n");
734 exynos_dp_reset_macro(dp);
736 /* Set TX pre-emphasis to minimum */
737 for (lane = 0; lane < max_lane; lane++)
738 exynos_dp_set_lane_lane_pre_emphasis(dp,
739 PRE_EMPHASIS_LEVEL_0, lane);
741 /* All DP analog module power up */
742 exynos_dp_set_analog_power_down(dp, POWER_ALL, 0);
744 /* Initialize by reading RX's DPCD */
745 exynos_dp_get_max_rx_bandwidth(dp, &dp->link_train.link_rate);
746 exynos_dp_get_max_rx_lane_count(dp, &dp->link_train.lane_count);
748 if ((dp->link_train.link_rate != LINK_RATE_1_62GBPS) &&
749 (dp->link_train.link_rate != LINK_RATE_2_70GBPS)) {
750 dev_err(dp->dev, "Rx Max Link Rate is abnormal :%x !\n",
751 dp->link_train.link_rate);
752 dp->link_train.link_rate = LINK_RATE_1_62GBPS;
755 if (dp->link_train.lane_count == 0) {
756 dev_err(dp->dev, "Rx Max Lane count is abnormal :%x !\n",
757 dp->link_train.lane_count);
758 dp->link_train.lane_count = (u8)LANE_COUNT1;
761 /* Setup TX lane count & rate */
762 if (dp->link_train.lane_count > max_lane)
763 dp->link_train.lane_count = max_lane;
764 if (dp->link_train.link_rate > max_rate)
765 dp->link_train.link_rate = max_rate;
767 /* Set link rate and count as you want to establish*/
768 exynos_dp_set_lane_count(dp, dp->video_info->lane_count);
769 exynos_dp_set_link_bandwidth(dp, dp->video_info->link_rate);
771 /* Set sink to D0 (Sink Not Ready) mode. */
772 exynos_dp_write_byte_to_dpcd(dp, DPCD_ADDR_SINK_POWER_STATE,
773 DPCD_SET_POWER_STATE_D0);
775 /* Enable H/W Link Training */
776 ret = exynos_dp_enable_hw_link_training(dp);
779 dev_err(dp->dev, " H/W link training failure: %d\n", ret);
783 exynos_dp_get_link_bandwidth(dp, &status);
784 dp->link_train.link_rate = status;
785 dev_dbg(dp->dev, "final bandwidth = %.2x\n",
786 dp->link_train.link_rate);
788 exynos_dp_get_lane_count(dp, &status);
789 dp->link_train.lane_count = status;
790 dev_dbg(dp->dev, "final lane count = %.2x\n",
791 dp->link_train.lane_count);
796 static int exynos_dp_set_link_train(struct exynos_dp_device *dp,
803 for (i = 0; i < DP_TIMEOUT_LOOP_COUNT; i++) {
804 exynos_dp_init_training(dp, count, bwtype);
805 retval = exynos_dp_sw_link_training(dp);
815 static int exynos_dp_config_video(struct exynos_dp_device *dp)
818 int timeout_loop = 0;
821 exynos_dp_config_video_slave_mode(dp);
823 exynos_dp_set_video_color_format(dp);
825 if (exynos_dp_get_pll_lock_status(dp) == PLL_UNLOCKED) {
826 dev_err(dp->dev, "PLL is not locked yet.\n");
832 if (exynos_dp_is_slave_video_stream_clock_on(dp) == 0)
834 if (DP_TIMEOUT_LOOP_COUNT < timeout_loop) {
835 dev_err(dp->dev, "Timeout of video streamclk ok\n");
842 /* Set to use the register calculated M/N video */
843 exynos_dp_set_video_cr_mn(dp, CALCULATED_M, 0, 0);
845 /* For video bist, Video timing must be generated by register */
846 exynos_dp_set_video_timing_mode(dp, VIDEO_TIMING_FROM_CAPTURE);
848 /* Disable video mute */
849 exynos_dp_enable_video_mute(dp, 0);
851 /* Configure video slave mode */
852 exynos_dp_enable_video_master(dp, 0);
855 exynos_dp_start_video(dp);
861 if (exynos_dp_is_video_stream_on(dp) == 0) {
865 } else if (done_count) {
868 if (DP_TIMEOUT_LOOP_COUNT < timeout_loop) {
869 dev_err(dp->dev, "Timeout of video streamclk ok\n");
877 dev_err(dp->dev, "Video stream is not detected!\n");
882 static void exynos_dp_enable_scramble(struct exynos_dp_device *dp, bool enable)
887 exynos_dp_enable_scrambling(dp);
889 exynos_dp_read_byte_from_dpcd(dp,
890 DPCD_ADDR_TRAINING_PATTERN_SET,
892 exynos_dp_write_byte_to_dpcd(dp,
893 DPCD_ADDR_TRAINING_PATTERN_SET,
894 (u8)(data & ~DPCD_SCRAMBLING_DISABLED));
896 exynos_dp_disable_scrambling(dp);
898 exynos_dp_read_byte_from_dpcd(dp,
899 DPCD_ADDR_TRAINING_PATTERN_SET,
901 exynos_dp_write_byte_to_dpcd(dp,
902 DPCD_ADDR_TRAINING_PATTERN_SET,
903 (u8)(data | DPCD_SCRAMBLING_DISABLED));
907 static irqreturn_t exynos_dp_irq_handler(int irq, void *arg)
909 struct exynos_dp_device *dp = arg;
911 dev_err(dp->dev, "exynos_dp_irq_handler\n");
915 static void exynos_dp_hotplug(struct work_struct *work)
917 struct exynos_dp_device *dp;
920 dp = container_of(work, struct exynos_dp_device, hotplug_work);
922 ret = exynos_dp_detect_hpd(dp);
924 dev_err(dp->dev, "unable to detect hpd\n");
928 ret = exynos_dp_handle_edid(dp);
930 dev_err(dp->dev, "unable to handle edid\n");
934 if (dp->training_type == SW_LINK_TRAINING)
935 ret = exynos_dp_set_link_train(dp, dp->video_info->lane_count,
936 dp->video_info->link_rate);
938 ret = exynos_dp_set_hw_link_train(dp,
939 dp->video_info->lane_count, dp->video_info->link_rate);
941 dev_err(dp->dev, "unable to do link train\n");
945 exynos_dp_enable_scramble(dp, 1);
946 exynos_dp_enable_rx_to_enhanced_mode(dp, 1);
947 exynos_dp_enable_enhanced_mode(dp, 1);
949 exynos_dp_set_lane_count(dp, dp->video_info->lane_count);
950 exynos_dp_set_link_bandwidth(dp, dp->video_info->link_rate);
952 exynos_dp_init_video(dp);
953 exynos_dp_config_video(dp);
956 static int __devinit exynos_dp_probe(struct platform_device *pdev)
958 struct resource *res;
959 struct exynos_dp_device *dp;
960 struct exynos_dp_platdata *pdata;
964 pdata = pdev->dev.platform_data;
966 dev_err(&pdev->dev, "no platform data\n");
970 dp = kzalloc(sizeof(struct exynos_dp_device), GFP_KERNEL);
972 dev_err(&pdev->dev, "no memory for device data\n");
976 dp->dev = &pdev->dev;
978 dp->clock = clk_get(&pdev->dev, "dp");
979 if (IS_ERR(dp->clock)) {
980 dev_err(&pdev->dev, "failed to get clock\n");
981 ret = PTR_ERR(dp->clock);
985 clk_enable(dp->clock);
987 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
989 dev_err(&pdev->dev, "failed to get registers\n");
994 res = request_mem_region(res->start, resource_size(res),
995 dev_name(&pdev->dev));
997 dev_err(&pdev->dev, "failed to request registers region\n");
1004 dp->reg_base = ioremap(res->start, resource_size(res));
1005 if (!dp->reg_base) {
1006 dev_err(&pdev->dev, "failed to ioremap\n");
1008 goto err_req_region;
1011 dp->irq = platform_get_irq(pdev, 0);
1013 dev_err(&pdev->dev, "failed to get irq\n");
1018 dp->training_type = pdata->training_type;
1019 dp->video_info = pdata->video_info;
1020 if (pdata->phy_init)
1023 exynos_dp_init_dp(dp);
1025 ret = request_irq(dp->irq, exynos_dp_irq_handler, 0,
1028 dev_err(&pdev->dev, "failed to request irq\n");
1032 INIT_WORK(&dp->hotplug_work, exynos_dp_hotplug);
1034 platform_set_drvdata(pdev, dp);
1035 schedule_work(&dp->hotplug_work);
1040 iounmap(dp->reg_base);
1042 release_mem_region(res->start, resource_size(res));
1051 static int __devexit exynos_dp_remove(struct platform_device *pdev)
1053 struct exynos_dp_platdata *pdata = pdev->dev.platform_data;
1054 struct exynos_dp_device *dp = platform_get_drvdata(pdev);
1056 if (work_pending(&dp->hotplug_work))
1057 flush_work_sync(&dp->hotplug_work);
1059 if (pdata && pdata->phy_exit)
1062 free_irq(dp->irq, dp);
1063 iounmap(dp->reg_base);
1065 clk_disable(dp->clock);
1068 release_mem_region(dp->res->start, resource_size(dp->res));
1075 #ifdef CONFIG_PM_SLEEP
1076 static int exynos_dp_suspend(struct device *dev)
1078 struct platform_device *pdev = to_platform_device(dev);
1079 struct exynos_dp_platdata *pdata = pdev->dev.platform_data;
1080 struct exynos_dp_device *dp = platform_get_drvdata(pdev);
1082 if (work_pending(&dp->hotplug_work))
1083 flush_work_sync(&dp->hotplug_work);
1085 if (pdata && pdata->phy_exit)
1088 clk_disable(dp->clock);
1093 static int exynos_dp_resume(struct device *dev)
1095 struct platform_device *pdev = to_platform_device(dev);
1096 struct exynos_dp_platdata *pdata = pdev->dev.platform_data;
1097 struct exynos_dp_device *dp = platform_get_drvdata(pdev);
1099 if (pdata && pdata->phy_init)
1102 clk_enable(dp->clock);
1104 exynos_dp_init_dp(dp);
1105 schedule_work(&dp->hotplug_work);
1111 static const struct dev_pm_ops exynos_dp_pm_ops = {
1112 SET_SYSTEM_SLEEP_PM_OPS(exynos_dp_suspend, exynos_dp_resume)
1116 static const struct of_device_id exynos_dp_match[] = {
1117 { .compatible = "samsung,exynos5-dp" },
1120 MODULE_DEVICE_TABLE(of, exynos_dp_match);
1123 static struct platform_driver exynos_dp_driver = {
1124 .probe = exynos_dp_probe,
1125 .remove = __devexit_p(exynos_dp_remove),
1128 .owner = THIS_MODULE,
1129 .pm = &exynos_dp_pm_ops,
1130 .of_match_table = of_match_ptr(exynos_dp_match),
1134 static int __init exynos_dp_init(void)
1136 return platform_driver_probe(&exynos_dp_driver, exynos_dp_probe);
1139 static void __exit exynos_dp_exit(void)
1141 platform_driver_unregister(&exynos_dp_driver);
1143 /* TODO: Register as module_platform_driver */
1144 /* Currently, we make it late_initcall to make */
1145 /* sure that s3c-fb is probed before DP driver */
1146 late_initcall(exynos_dp_init);
1147 module_exit(exynos_dp_exit);
1149 MODULE_AUTHOR("Jingoo Han <jg1.han@samsung.com>");
1150 MODULE_DESCRIPTION("Samsung SoC DP Driver");
1151 MODULE_LICENSE("GPL");