2 * Samsung SoC DP (Display Port) interface driver.
4 * Copyright (C) 2012 Samsung Electronics Co., Ltd.
5 * Author: Jingoo Han <jg1.han@samsung.com>
7 * This program is free software; you can redistribute it and/or modify it
8 * under the terms of the GNU General Public License as published by the
9 * Free Software Foundation; either version 2 of the License, or (at your
10 * option) any later version.
14 #include "drm_crtc_helper.h"
16 #include <linux/module.h>
17 #include <linux/platform_device.h>
18 #include <linux/slab.h>
19 #include <linux/err.h>
20 #include <linux/clk.h>
21 #include <linux/gpio.h>
23 #include <linux/interrupt.h>
24 #include <linux/delay.h>
26 #include <linux/workqueue.h>
28 #include <video/exynos_dp.h>
29 #include "exynos_drm_drv.h"
30 #include "exynos_drm_display.h"
32 #ifdef CONFIG_DRM_PTN3460
33 #include "i2c/ptn3460.h"
38 #include "exynos_dp_core.h"
40 #define PLL_MAX_TRIES 100
42 static int exynos_dp_init_dp(struct exynos_dp_device *dp)
46 /* SW defined function Normal operation */
47 exynos_dp_enable_sw_function(dp);
49 exynos_dp_init_analog_func(dp);
51 exynos_dp_init_hpd(dp);
52 exynos_dp_init_aux(dp);
57 static int exynos_dp_detect_hpd(struct exynos_dp_device *dp)
61 if (gpio_is_valid(dp->hpd_gpio))
62 return !gpio_get_value(dp->hpd_gpio);
64 while (exynos_dp_get_plug_in_status(dp) != 0) {
66 if (DP_TIMEOUT_LOOP_COUNT < timeout_loop) {
67 dev_err(dp->dev, "failed to get hpd plug status\n");
76 static unsigned char exynos_dp_calc_edid_check_sum(unsigned char *edid_data)
79 unsigned char sum = 0;
81 for (i = 0; i < EDID_BLOCK_LENGTH; i++)
82 sum = sum + edid_data[i];
87 static int exynos_dp_read_edid(struct exynos_dp_device *dp)
89 unsigned char edid[EDID_BLOCK_LENGTH * 2];
90 unsigned int extend_block = 0;
92 unsigned char test_vector;
96 * EDID device address is 0x50.
97 * However, if necessary, you must have set upper address
98 * into E-EDID in I2C device, 0x30.
101 /* Read Extension Flag, Number of 128-byte EDID extension blocks */
102 retval = exynos_dp_read_byte_from_i2c(dp, I2C_EDID_DEVICE_ADDR,
108 if (extend_block > 0) {
109 dev_dbg(dp->dev, "EDID data includes a single extension!\n");
112 retval = exynos_dp_read_bytes_from_i2c(dp, I2C_EDID_DEVICE_ADDR,
115 &edid[EDID_HEADER_PATTERN]);
117 dev_err(dp->dev, "EDID Read failed!\n");
120 sum = exynos_dp_calc_edid_check_sum(edid);
122 dev_err(dp->dev, "EDID bad checksum!\n");
126 /* Read additional EDID data */
127 retval = exynos_dp_read_bytes_from_i2c(dp,
128 I2C_EDID_DEVICE_ADDR,
131 &edid[EDID_BLOCK_LENGTH]);
133 dev_err(dp->dev, "EDID Read failed!\n");
136 sum = exynos_dp_calc_edid_check_sum(&edid[EDID_BLOCK_LENGTH]);
138 dev_err(dp->dev, "EDID bad checksum!\n");
142 exynos_dp_read_byte_from_dpcd(dp, DPCD_ADDR_TEST_REQUEST,
144 if (test_vector & DPCD_TEST_EDID_READ) {
145 exynos_dp_write_byte_to_dpcd(dp,
146 DPCD_ADDR_TEST_EDID_CHECKSUM,
147 edid[EDID_BLOCK_LENGTH + EDID_CHECKSUM]);
148 exynos_dp_write_byte_to_dpcd(dp,
149 DPCD_ADDR_TEST_RESPONSE,
150 DPCD_TEST_EDID_CHECKSUM_WRITE);
153 dev_info(dp->dev, "EDID data does not include any extensions.\n");
156 retval = exynos_dp_read_bytes_from_i2c(dp,
157 I2C_EDID_DEVICE_ADDR,
160 &edid[EDID_HEADER_PATTERN]);
162 dev_err(dp->dev, "EDID Read failed!\n");
165 sum = exynos_dp_calc_edid_check_sum(edid);
167 dev_err(dp->dev, "EDID bad checksum!\n");
171 exynos_dp_read_byte_from_dpcd(dp,
172 DPCD_ADDR_TEST_REQUEST,
174 if (test_vector & DPCD_TEST_EDID_READ) {
175 exynos_dp_write_byte_to_dpcd(dp,
176 DPCD_ADDR_TEST_EDID_CHECKSUM,
177 edid[EDID_CHECKSUM]);
178 exynos_dp_write_byte_to_dpcd(dp,
179 DPCD_ADDR_TEST_RESPONSE,
180 DPCD_TEST_EDID_CHECKSUM_WRITE);
184 dev_err(dp->dev, "EDID Read success!\n");
188 static int exynos_dp_handle_edid(struct exynos_dp_device *dp)
194 /* Read DPCD DPCD_ADDR_DPCD_REV~RECEIVE_PORT1_CAP_1 */
195 ret = exynos_dp_read_bytes_from_dpcd(dp, DPCD_ADDR_DPCD_REV, 12, buf);
200 for (i = 0; i < 3; i++) {
201 ret = exynos_dp_read_edid(dp);
209 static void exynos_dp_enable_rx_to_enhanced_mode(struct exynos_dp_device *dp,
214 exynos_dp_read_byte_from_dpcd(dp, DPCD_ADDR_LANE_COUNT_SET, &data);
217 exynos_dp_write_byte_to_dpcd(dp, DPCD_ADDR_LANE_COUNT_SET,
218 DPCD_ENHANCED_FRAME_EN |
219 DPCD_LANE_COUNT_SET(data));
221 exynos_dp_write_byte_to_dpcd(dp, DPCD_ADDR_LANE_COUNT_SET,
222 DPCD_LANE_COUNT_SET(data));
225 static int exynos_dp_is_enhanced_mode_available(struct exynos_dp_device *dp)
230 exynos_dp_read_byte_from_dpcd(dp, DPCD_ADDR_MAX_LANE_COUNT, &data);
231 retval = DPCD_ENHANCED_FRAME_CAP(data);
236 static void exynos_dp_set_enhanced_mode(struct exynos_dp_device *dp)
240 data = exynos_dp_is_enhanced_mode_available(dp);
241 exynos_dp_enable_rx_to_enhanced_mode(dp, data);
242 exynos_dp_enable_enhanced_mode(dp, data);
245 static void exynos_dp_training_pattern_dis(struct exynos_dp_device *dp)
247 exynos_dp_set_training_pattern(dp, DP_NONE);
249 exynos_dp_write_byte_to_dpcd(dp,
250 DPCD_ADDR_TRAINING_PATTERN_SET,
251 DPCD_TRAINING_PATTERN_DISABLED);
254 static void exynos_dp_set_lane_lane_pre_emphasis(struct exynos_dp_device *dp,
255 int pre_emphasis, int lane)
259 exynos_dp_set_lane0_pre_emphasis(dp, pre_emphasis);
262 exynos_dp_set_lane1_pre_emphasis(dp, pre_emphasis);
266 exynos_dp_set_lane2_pre_emphasis(dp, pre_emphasis);
270 exynos_dp_set_lane3_pre_emphasis(dp, pre_emphasis);
275 static int exynos_dp_link_start(struct exynos_dp_device *dp)
277 int ret, lane, lane_count, pll_tries;
280 lane_count = dp->link_train.lane_count;
282 dp->link_train.lt_state = CLOCK_RECOVERY;
283 dp->link_train.eq_loop = 0;
285 for (lane = 0; lane < lane_count; lane++)
286 dp->link_train.cr_loop[lane] = 0;
288 /* Set link rate and count as you want to establish*/
289 exynos_dp_set_link_bandwidth(dp, dp->link_train.link_rate);
290 exynos_dp_set_lane_count(dp, dp->link_train.lane_count);
292 /* Setup RX configuration */
293 buf[0] = dp->link_train.link_rate;
294 buf[1] = dp->link_train.lane_count;
295 ret = exynos_dp_write_bytes_to_dpcd(dp, DPCD_ADDR_LINK_BW_SET, 2, buf);
299 /* Set TX pre-emphasis to minimum */
300 for (lane = 0; lane < lane_count; lane++)
301 exynos_dp_set_lane_lane_pre_emphasis(dp,
302 PRE_EMPHASIS_LEVEL_0, lane);
304 /* Wait for PLL lock */
306 while (exynos_dp_get_pll_lock_status(dp) == PLL_UNLOCKED) {
307 if (pll_tries == PLL_MAX_TRIES)
314 /* Set training pattern 1 */
315 exynos_dp_set_training_pattern(dp, TRAINING_PTN1);
317 /* Set RX training pattern */
318 ret = exynos_dp_write_byte_to_dpcd(dp, DPCD_ADDR_TRAINING_PATTERN_SET,
319 DPCD_SCRAMBLING_DISABLED | DPCD_TRAINING_PATTERN_1);
323 for (lane = 0; lane < lane_count; lane++)
324 buf[lane] = DPCD_PRE_EMPHASIS_PATTERN2_LEVEL0 |
325 DPCD_VOLTAGE_SWING_PATTERN1_LEVEL0;
326 ret = exynos_dp_write_bytes_to_dpcd(dp, DPCD_ADDR_TRAINING_LANE0_SET,
334 static unsigned char exynos_dp_get_lane_status(u8 link_status[6], int lane)
336 int shift = (lane & 1) * 4;
337 u8 link_value = link_status[lane>>1];
339 return (link_value >> shift) & 0xf;
342 static int exynos_dp_clock_recovery_ok(u8 link_status[6], int lane_count)
347 for (lane = 0; lane < lane_count; lane++) {
348 lane_status = exynos_dp_get_lane_status(link_status, lane);
349 if ((lane_status & DPCD_LANE_CR_DONE) == 0)
355 static int exynos_dp_channel_eq_ok(u8 link_status[6], int lane_count)
361 lane_align = link_status[2];
362 if ((lane_align & DPCD_INTERLANE_ALIGN_DONE) == 0)
365 for (lane = 0; lane < lane_count; lane++) {
366 lane_status = exynos_dp_get_lane_status(link_status, lane);
367 lane_status &= DPCD_CHANNEL_EQ_BITS;
368 if (lane_status != DPCD_CHANNEL_EQ_BITS)
374 static unsigned char exynos_dp_get_adjust_request_voltage(u8 adjust_request[2],
377 int shift = (lane & 1) * 4;
378 u8 link_value = adjust_request[lane>>1];
380 return (link_value >> shift) & 0x3;
383 static unsigned char exynos_dp_get_adjust_request_pre_emphasis(
384 u8 adjust_request[2],
387 int shift = (lane & 1) * 4;
388 u8 link_value = adjust_request[lane>>1];
390 return ((link_value >> shift) & 0xc) >> 2;
393 static void exynos_dp_set_lane_link_training(struct exynos_dp_device *dp,
394 u8 training_lane_set, int lane)
398 exynos_dp_set_lane0_link_training(dp, training_lane_set);
401 exynos_dp_set_lane1_link_training(dp, training_lane_set);
405 exynos_dp_set_lane2_link_training(dp, training_lane_set);
409 exynos_dp_set_lane3_link_training(dp, training_lane_set);
414 static unsigned int exynos_dp_get_lane_link_training(
415 struct exynos_dp_device *dp,
422 reg = exynos_dp_get_lane0_link_training(dp);
425 reg = exynos_dp_get_lane1_link_training(dp);
428 reg = exynos_dp_get_lane2_link_training(dp);
431 reg = exynos_dp_get_lane3_link_training(dp);
438 static void exynos_dp_reduce_link_rate(struct exynos_dp_device *dp)
440 if (dp->link_train.link_rate == LINK_RATE_2_70GBPS) {
441 /* set to reduced bit rate */
442 dp->link_train.link_rate = LINK_RATE_1_62GBPS;
443 dev_err(dp->dev, "set to bandwidth %.2x\n",
444 dp->link_train.link_rate);
445 dp->link_train.lt_state = START;
447 exynos_dp_training_pattern_dis(dp);
448 /* set enhanced mode if available */
449 exynos_dp_set_enhanced_mode(dp);
450 dp->link_train.lt_state = FAILED;
454 static void exynos_dp_get_adjust_train(struct exynos_dp_device *dp,
455 u8 adjust_request[2])
463 lane_count = dp->link_train.lane_count;
464 for (lane = 0; lane < lane_count; lane++) {
465 voltage_swing = exynos_dp_get_adjust_request_voltage(
466 adjust_request, lane);
467 pre_emphasis = exynos_dp_get_adjust_request_pre_emphasis(
468 adjust_request, lane);
469 training_lane = DPCD_VOLTAGE_SWING_SET(voltage_swing) |
470 DPCD_PRE_EMPHASIS_SET(pre_emphasis);
472 if (voltage_swing == VOLTAGE_LEVEL_3 ||
473 pre_emphasis == PRE_EMPHASIS_LEVEL_3) {
474 training_lane |= DPCD_MAX_SWING_REACHED;
475 training_lane |= DPCD_MAX_PRE_EMPHASIS_REACHED;
477 dp->link_train.training_lane[lane] = training_lane;
481 static int exynos_dp_check_max_cr_loop(struct exynos_dp_device *dp,
487 lane_count = dp->link_train.lane_count;
488 for (lane = 0; lane < lane_count; lane++) {
489 if (voltage_swing == VOLTAGE_LEVEL_3 ||
490 dp->link_train.cr_loop[lane] == MAX_CR_LOOP)
496 static int exynos_dp_process_clock_recovery(struct exynos_dp_device *dp)
498 int ret, lane, lane_count;
499 u8 voltage_swing, pre_emphasis, training_lane, link_status[6];
504 ret = exynos_dp_read_bytes_from_dpcd(dp, DPCD_ADDR_LANE0_1_STATUS, 6,
509 lane_count = dp->link_train.lane_count;
513 adjust_request = link_status + 4;
515 if (exynos_dp_clock_recovery_ok(link_status, lane_count) == 0) {
516 /* set training pattern 2 for EQ */
517 exynos_dp_set_training_pattern(dp, TRAINING_PTN2);
519 ret = exynos_dp_write_byte_to_dpcd(dp,
520 DPCD_ADDR_TRAINING_PATTERN_SET,
521 DPCD_SCRAMBLING_DISABLED | DPCD_TRAINING_PATTERN_2);
525 dp->link_train.lt_state = EQUALIZER_TRAINING;
527 for (lane = 0; lane < lane_count; lane++) {
528 training_lane = exynos_dp_get_lane_link_training(
530 voltage_swing = exynos_dp_get_adjust_request_voltage(
531 adjust_request, lane);
532 pre_emphasis = exynos_dp_get_adjust_request_pre_emphasis(
533 adjust_request, lane);
534 if ((DPCD_VOLTAGE_SWING_GET(training_lane) == voltage_swing) &&
535 (DPCD_PRE_EMPHASIS_GET(training_lane) == pre_emphasis))
536 dp->link_train.cr_loop[lane]++;
537 dp->link_train.training_lane[lane] = training_lane;
540 if (exynos_dp_check_max_cr_loop(dp, voltage_swing) != 0) {
541 exynos_dp_reduce_link_rate(dp);
546 exynos_dp_get_adjust_train(dp, adjust_request);
548 for (lane = 0; lane < lane_count; lane++) {
549 exynos_dp_set_lane_link_training(dp,
550 dp->link_train.training_lane[lane], lane);
551 ret = exynos_dp_write_byte_to_dpcd(dp,
552 DPCD_ADDR_TRAINING_LANE0_SET + lane,
553 dp->link_train.training_lane[lane]);
561 static int exynos_dp_process_equalizer_training(struct exynos_dp_device *dp)
563 int ret, lane, lane_count;
570 ret = exynos_dp_read_bytes_from_dpcd(dp, DPCD_ADDR_LANE0_1_STATUS,
575 adjust_request = link_status + 4;
576 lane_count = dp->link_train.lane_count;
578 if (exynos_dp_clock_recovery_ok(link_status, lane_count)) {
579 exynos_dp_reduce_link_rate(dp);
582 if (exynos_dp_channel_eq_ok(link_status, lane_count) == 0) {
583 /* traing pattern Set to Normal */
584 exynos_dp_training_pattern_dis(dp);
586 dev_info(dp->dev, "Link Training success!\n");
588 exynos_dp_get_link_bandwidth(dp, ®);
589 dp->link_train.link_rate = reg;
590 dev_dbg(dp->dev, "final bandwidth = %.2x\n",
591 dp->link_train.link_rate);
593 exynos_dp_get_lane_count(dp, ®);
594 dp->link_train.lane_count = reg;
595 dev_dbg(dp->dev, "final lane count = %.2x\n",
596 dp->link_train.lane_count);
597 /* set enhanced mode if available */
598 exynos_dp_set_enhanced_mode(dp);
600 dp->link_train.lt_state = FINISHED;
603 dp->link_train.eq_loop++;
605 if (dp->link_train.eq_loop > MAX_EQ_LOOP) {
606 exynos_dp_reduce_link_rate(dp);
608 exynos_dp_get_adjust_train(dp, adjust_request);
610 for (lane = 0; lane < lane_count; lane++) {
611 exynos_dp_set_lane_link_training(dp,
612 dp->link_train.training_lane[lane],
614 ret = exynos_dp_write_byte_to_dpcd(dp,
615 DPCD_ADDR_TRAINING_LANE0_SET + lane,
616 dp->link_train.training_lane[lane]);
626 static void exynos_dp_get_max_rx_bandwidth(struct exynos_dp_device *dp,
632 * For DP rev.1.1, Maximum link rate of Main Link lanes
633 * 0x06 = 1.62 Gbps, 0x0a = 2.7 Gbps
635 exynos_dp_read_byte_from_dpcd(dp, DPCD_ADDR_MAX_LINK_RATE, &data);
639 static void exynos_dp_get_max_rx_lane_count(struct exynos_dp_device *dp,
645 * For DP rev.1.1, Maximum number of Main Link lanes
646 * 0x01 = 1 lane, 0x02 = 2 lanes, 0x04 = 4 lanes
648 exynos_dp_read_byte_from_dpcd(dp, DPCD_ADDR_MAX_LANE_COUNT, &data);
649 *lane_count = DPCD_MAX_LANE_COUNT(data);
652 static void exynos_dp_init_training(struct exynos_dp_device *dp,
653 enum link_lane_count_type max_lane,
654 enum link_rate_type max_rate)
657 * MACRO_RST must be applied after the PLL_LOCK to avoid
658 * the DP inter pair skew issue for at least 10 us
660 exynos_dp_reset_macro(dp);
662 /* Initialize by reading RX's DPCD */
663 exynos_dp_get_max_rx_bandwidth(dp, &dp->link_train.link_rate);
664 exynos_dp_get_max_rx_lane_count(dp, &dp->link_train.lane_count);
666 if ((dp->link_train.link_rate != LINK_RATE_1_62GBPS) &&
667 (dp->link_train.link_rate != LINK_RATE_2_70GBPS)) {
668 dev_err(dp->dev, "Rx Max Link Rate is abnormal :%x !\n",
669 dp->link_train.link_rate);
670 dp->link_train.link_rate = LINK_RATE_1_62GBPS;
673 if (dp->link_train.lane_count == 0) {
674 dev_err(dp->dev, "Rx Max Lane count is abnormal :%x !\n",
675 dp->link_train.lane_count);
676 dp->link_train.lane_count = (u8)LANE_COUNT1;
679 /* Setup TX lane count & rate */
680 if (dp->link_train.lane_count > max_lane)
681 dp->link_train.lane_count = max_lane;
682 if (dp->link_train.link_rate > max_rate)
683 dp->link_train.link_rate = max_rate;
685 /* All DP analog module power up */
686 exynos_dp_set_analog_power_down(dp, POWER_ALL, 0);
689 static int exynos_dp_sw_link_training(struct exynos_dp_device *dp)
691 int ret = 0, training_finished = 0;
693 /* Turn off unnecessary lanes */
694 switch (dp->link_train.lane_count) {
696 exynos_dp_set_analog_power_down(dp, CH1_BLOCK, 1);
698 exynos_dp_set_analog_power_down(dp, CH2_BLOCK, 1);
699 exynos_dp_set_analog_power_down(dp, CH3_BLOCK, 1);
705 dp->link_train.lt_state = START;
708 while (!ret && !training_finished) {
709 switch (dp->link_train.lt_state) {
711 ret = exynos_dp_link_start(dp);
714 ret = exynos_dp_process_clock_recovery(dp);
716 case EQUALIZER_TRAINING:
717 ret = exynos_dp_process_equalizer_training(dp);
720 training_finished = 1;
727 dev_err(dp->dev, "eDP link training failed (%d)\n", ret);
732 static int exynos_dp_set_hw_link_train(struct exynos_dp_device *dp,
739 exynos_dp_stop_video(dp);
741 if (exynos_dp_get_pll_lock_status(dp) == PLL_UNLOCKED) {
742 dev_err(dp->dev, "PLL is not locked yet.\n");
746 exynos_dp_reset_macro(dp);
748 /* Set TX pre-emphasis to minimum */
749 for (lane = 0; lane < max_lane; lane++)
750 exynos_dp_set_lane_lane_pre_emphasis(dp,
751 PRE_EMPHASIS_LEVEL_0, lane);
753 /* All DP analog module power up */
754 exynos_dp_set_analog_power_down(dp, POWER_ALL, 0);
756 /* Initialize by reading RX's DPCD */
757 exynos_dp_get_max_rx_bandwidth(dp, &dp->link_train.link_rate);
758 exynos_dp_get_max_rx_lane_count(dp, &dp->link_train.lane_count);
760 if ((dp->link_train.link_rate != LINK_RATE_1_62GBPS) &&
761 (dp->link_train.link_rate != LINK_RATE_2_70GBPS)) {
762 dev_err(dp->dev, "Rx Max Link Rate is abnormal :%x !\n",
763 dp->link_train.link_rate);
764 dp->link_train.link_rate = LINK_RATE_1_62GBPS;
767 if (dp->link_train.lane_count == 0) {
768 dev_err(dp->dev, "Rx Max Lane count is abnormal :%x !\n",
769 dp->link_train.lane_count);
770 dp->link_train.lane_count = (u8)LANE_COUNT1;
773 /* Setup TX lane count & rate */
774 if (dp->link_train.lane_count > max_lane)
775 dp->link_train.lane_count = max_lane;
776 if (dp->link_train.link_rate > max_rate)
777 dp->link_train.link_rate = max_rate;
779 /* Set link rate and count as you want to establish*/
780 exynos_dp_set_lane_count(dp, dp->video_info->lane_count);
781 exynos_dp_set_link_bandwidth(dp, dp->video_info->link_rate);
783 /* Set sink to D0 (Sink Not Ready) mode. */
784 exynos_dp_write_byte_to_dpcd(dp, DPCD_ADDR_SINK_POWER_STATE,
785 DPCD_SET_POWER_STATE_D0);
787 /* Enable H/W Link Training */
788 ret = exynos_dp_enable_hw_link_training(dp);
791 dev_err(dp->dev, " H/W link training failure: %d\n", ret);
795 exynos_dp_get_link_bandwidth(dp, &status);
796 dp->link_train.link_rate = status;
797 dev_dbg(dp->dev, "final bandwidth = %.2x\n",
798 dp->link_train.link_rate);
800 exynos_dp_get_lane_count(dp, &status);
801 dp->link_train.lane_count = status;
802 dev_dbg(dp->dev, "final lane count = %.2x\n",
803 dp->link_train.lane_count);
808 static int exynos_dp_set_link_train(struct exynos_dp_device *dp,
815 for (i = 0; i < DP_TIMEOUT_LOOP_COUNT; i++) {
816 exynos_dp_init_training(dp, count, bwtype);
817 retval = exynos_dp_sw_link_training(dp);
827 static int exynos_dp_config_video(struct exynos_dp_device *dp)
830 int timeout_loop = 0;
832 exynos_dp_config_video_slave_mode(dp);
834 exynos_dp_set_video_color_format(dp);
836 if (exynos_dp_get_pll_lock_status(dp) == PLL_UNLOCKED) {
837 dev_err(dp->dev, "PLL is not locked yet.\n");
843 if (!exynos_dp_is_slave_video_stream_clock_on(dp))
845 if (DP_TIMEOUT_LOOP_COUNT < timeout_loop) {
846 dev_err(dp->dev, "Wait for stream clock timed out\n");
850 usleep_range(1000, 5000);
853 /* Set to use the register calculated M/N video */
854 exynos_dp_set_video_cr_mn(dp, CALCULATED_M, 0, 0);
856 /* For video bist, Video timing must be generated by register */
857 exynos_dp_set_video_timing_mode(dp, VIDEO_TIMING_FROM_CAPTURE);
859 /* Disable video mute */
860 exynos_dp_enable_video_mute(dp, 0);
862 /* Configure video slave mode */
863 exynos_dp_enable_video_master(dp, 0);
866 exynos_dp_start_video(dp);
872 if (!exynos_dp_is_video_stream_on(dp))
875 if (DP_TIMEOUT_LOOP_COUNT < timeout_loop) {
876 dev_err(dp->dev, "Wait for video stream timed out\n");
880 usleep_range(1000, 5000);
884 dev_err(dp->dev, "Video stream is not detected!\n");
889 static void exynos_dp_enable_scramble(struct exynos_dp_device *dp, bool enable)
894 exynos_dp_enable_scrambling(dp);
896 exynos_dp_read_byte_from_dpcd(dp,
897 DPCD_ADDR_TRAINING_PATTERN_SET,
899 exynos_dp_write_byte_to_dpcd(dp,
900 DPCD_ADDR_TRAINING_PATTERN_SET,
901 (u8)(data & ~DPCD_SCRAMBLING_DISABLED));
903 exynos_dp_disable_scrambling(dp);
905 exynos_dp_read_byte_from_dpcd(dp,
906 DPCD_ADDR_TRAINING_PATTERN_SET,
908 exynos_dp_write_byte_to_dpcd(dp,
909 DPCD_ADDR_TRAINING_PATTERN_SET,
910 (u8)(data | DPCD_SCRAMBLING_DISABLED));
914 static irqreturn_t exynos_dp_irq_handler(int irq, void *arg)
916 struct exynos_dp_device *dp = arg;
917 enum dp_irq_type irq_type;
919 irq_type = exynos_dp_get_irq_type(dp);
921 case DP_IRQ_TYPE_HP_CABLE_IN:
922 case DP_IRQ_TYPE_HP_CABLE_OUT:
923 dev_dbg(dp->dev, "Received irq - type=%d\n", irq_type);
924 schedule_work(&dp->hotplug_work);
925 exynos_dp_clear_hotplug_interrupts(dp);
927 case DP_IRQ_TYPE_HP_CHANGE:
929 * We get these change notifications once in a while, but there
930 * is nothing we can do with them. Just ignore it for now and
931 * only handle cable changes.
933 dev_dbg(dp->dev, "Received irq - hotplug change; ignoring.\n");
934 exynos_dp_clear_hotplug_interrupts(dp);
937 dev_err(dp->dev, "Received irq - unknown type!\n");
943 static void exynos_dp_hotplug(struct work_struct *work)
945 struct exynos_dp_device *dp;
948 dp = container_of(work, struct exynos_dp_device, hotplug_work);
950 /* Cable is disconnected, skip dp initialization */
951 if (exynos_dp_detect_hpd(dp))
954 #ifdef CONFIG_DRM_PTN3460
955 ret = ptn3460_wait_until_ready(30 * 1000);
957 DRM_ERROR("PTN3460 is not ready, don't plug\n");
962 ret = exynos_dp_handle_edid(dp);
964 dev_err(dp->dev, "unable to handle edid\n");
968 if (dp->training_type == SW_LINK_TRAINING)
969 ret = exynos_dp_set_link_train(dp, dp->video_info->lane_count,
970 dp->video_info->link_rate);
972 ret = exynos_dp_set_hw_link_train(dp,
973 dp->video_info->lane_count, dp->video_info->link_rate);
975 dev_err(dp->dev, "unable to do link train\n");
979 exynos_dp_enable_scramble(dp, 1);
980 exynos_dp_enable_rx_to_enhanced_mode(dp, 1);
981 exynos_dp_enable_enhanced_mode(dp, 1);
983 exynos_dp_set_lane_count(dp, dp->video_info->lane_count);
984 exynos_dp_set_link_bandwidth(dp, dp->video_info->link_rate);
986 exynos_dp_init_video(dp);
987 exynos_dp_config_video(dp);
990 drm_helper_hpd_irq_event(dp->drm_dev);
993 static int exynos_dp_power_off(struct exynos_dp_device *dp)
995 exynos_dp_disable_hpd(dp);
997 if (work_pending(&dp->hotplug_work))
998 flush_work_sync(&dp->hotplug_work);
1000 if (dp->phy_ops.phy_exit)
1001 dp->phy_ops.phy_exit();
1003 clk_disable(dp->clock);
1007 static int exynos_dp_power_on(struct exynos_dp_device *dp)
1009 if (dp->phy_ops.phy_init)
1010 dp->phy_ops.phy_init();
1012 clk_enable(dp->clock);
1014 exynos_dp_init_dp(dp);
1017 * DP controller is reset and needs HPD interrupt to trigger
1018 * re-configuration. If we don't have valid IRQ, this is never
1019 * going to happen. Let's reconfigure it here in this case.
1021 if (dp->irq < 0 && !exynos_dp_detect_hpd(dp))
1022 schedule_work(&dp->hotplug_work);
1027 static int exynos_dp_dpms(void *ctx, int mode)
1029 struct exynos_dp_device *dp = ctx;
1032 case DRM_MODE_DPMS_ON:
1033 return exynos_dp_power_on(dp);
1035 case DRM_MODE_DPMS_STANDBY:
1036 case DRM_MODE_DPMS_SUSPEND:
1037 case DRM_MODE_DPMS_OFF:
1038 return exynos_dp_power_off(dp);
1041 DRM_ERROR("Unknown dpms mode %d\n", mode);
1046 static int exynos_dp_check_timing(void *ctx, void *timing)
1049 * TODO(seanpaul): The datasheet isn't terribly descriptive about the
1050 * limitations we have here. It's not vitally important to implement
1051 * this right now, but should be implemented once we use EDID to mode
1057 static bool exynos_dp_is_connected(void *ctx)
1059 struct exynos_dp_device *dp = ctx;
1061 if (dp->force_connected)
1064 return !exynos_dp_detect_hpd(dp);
1067 static int exynos_dp_subdrv_probe(void *ctx, struct drm_device *drm_dev)
1069 struct exynos_dp_device *dp = ctx;
1071 dp->drm_dev = drm_dev;
1073 exynos_dp_dpms(dp, DRM_MODE_DPMS_ON);
1078 static struct exynos_panel_ops dp_panel_ops = {
1079 .subdrv_probe = exynos_dp_subdrv_probe,
1080 .is_connected = exynos_dp_is_connected,
1081 .check_timing = exynos_dp_check_timing,
1082 .dpms = exynos_dp_dpms,
1085 static int __devinit exynos_dp_probe(struct platform_device *pdev)
1087 struct resource *res;
1088 struct exynos_dp_device *dp;
1089 struct exynos_dp_platdata *pdata;
1094 pdata = pdev->dev.platform_data;
1096 dev_err(&pdev->dev, "no platform data\n");
1100 dp = kzalloc(sizeof(struct exynos_dp_device), GFP_KERNEL);
1102 dev_err(&pdev->dev, "no memory for device data\n");
1106 dp->dev = &pdev->dev;
1108 dp->clock = clk_get(&pdev->dev, "dp");
1109 if (IS_ERR(dp->clock)) {
1110 dev_err(&pdev->dev, "failed to get clock\n");
1111 ret = PTR_ERR(dp->clock);
1115 clk_enable(dp->clock);
1117 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1119 dev_err(&pdev->dev, "failed to get registers\n");
1124 res = request_mem_region(res->start, resource_size(res),
1125 dev_name(&pdev->dev));
1127 dev_err(&pdev->dev, "failed to request registers region\n");
1134 dp->reg_base = ioremap(res->start, resource_size(res));
1135 if (!dp->reg_base) {
1136 dev_err(&pdev->dev, "failed to ioremap\n");
1138 goto err_req_region;
1141 if (gpio_is_valid(pdata->hpd_gpio)) {
1142 dp->hpd_gpio = pdata->hpd_gpio;
1143 ret = gpio_request_one(dp->hpd_gpio, GPIOF_IN, "dp_hpd");
1146 dp->irq = gpio_to_irq(dp->hpd_gpio);
1147 irqflags = IRQF_TRIGGER_RISING | IRQF_TRIGGER_FALLING;
1149 dp->hpd_gpio = -ENODEV;
1150 dp->irq = platform_get_irq(pdev, 0);
1154 dp->training_type = pdata->training_type;
1155 dp->video_info = pdata->video_info;
1156 dp->force_connected = pdata->force_connected;
1157 if (pdata->phy_init) {
1158 dp->phy_ops.phy_init = pdata->phy_init;
1159 dp->phy_ops.phy_init();
1161 if (pdata->phy_exit)
1162 dp->phy_ops.phy_exit = pdata->phy_exit;
1164 INIT_WORK(&dp->hotplug_work, exynos_dp_hotplug);
1167 ret = request_irq(dp->irq, exynos_dp_irq_handler, irqflags,
1170 dev_err(&pdev->dev, "failed to request irq\n");
1175 platform_set_drvdata(pdev, dp);
1177 exynos_display_attach_panel(EXYNOS_DRM_DISPLAY_TYPE_FIMD, &dp_panel_ops,
1183 if (gpio_is_valid(dp->hpd_gpio))
1184 gpio_free(dp->hpd_gpio);
1186 iounmap(dp->reg_base);
1188 release_mem_region(res->start, resource_size(res));
1197 static int __devexit exynos_dp_remove(struct platform_device *pdev)
1199 struct exynos_dp_device *dp = platform_get_drvdata(pdev);
1201 /* power_off will take care of flushing the hotplug_work */
1202 exynos_dp_dpms(dp, DRM_MODE_DPMS_OFF);
1204 if (gpio_is_valid(dp->hpd_gpio))
1205 gpio_free(dp->hpd_gpio);
1207 free_irq(dp->irq, dp);
1208 iounmap(dp->reg_base);
1210 clk_disable(dp->clock);
1213 release_mem_region(dp->res->start, resource_size(dp->res));
1220 struct platform_driver dp_driver = {
1221 .probe = exynos_dp_probe,
1222 .remove = __devexit_p(exynos_dp_remove),
1225 .owner = THIS_MODULE,