diff options
Diffstat (limited to 'drivers/gpu/drm/i915/intel_dp.c')
-rw-r--r-- | drivers/gpu/drm/i915/intel_dp.c | 959 |
1 files changed, 586 insertions, 373 deletions
diff --git a/drivers/gpu/drm/i915/intel_dp.c b/drivers/gpu/drm/i915/intel_dp.c index 368ed8ef1600..a9ba88a9b1ab 100644 --- a/drivers/gpu/drm/i915/intel_dp.c +++ b/drivers/gpu/drm/i915/intel_dp.c | |||
@@ -36,8 +36,6 @@ | |||
36 | #include <drm/i915_drm.h> | 36 | #include <drm/i915_drm.h> |
37 | #include "i915_drv.h" | 37 | #include "i915_drv.h" |
38 | 38 | ||
39 | #define DP_RECEIVER_CAP_SIZE 0xf | ||
40 | #define DP_LINK_STATUS_SIZE 6 | ||
41 | #define DP_LINK_CHECK_TIMEOUT (10 * 1000) | 39 | #define DP_LINK_CHECK_TIMEOUT (10 * 1000) |
42 | 40 | ||
43 | /** | 41 | /** |
@@ -49,7 +47,9 @@ | |||
49 | */ | 47 | */ |
50 | static bool is_edp(struct intel_dp *intel_dp) | 48 | static bool is_edp(struct intel_dp *intel_dp) |
51 | { | 49 | { |
52 | return intel_dp->base.type == INTEL_OUTPUT_EDP; | 50 | struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp); |
51 | |||
52 | return intel_dig_port->base.type == INTEL_OUTPUT_EDP; | ||
53 | } | 53 | } |
54 | 54 | ||
55 | /** | 55 | /** |
@@ -76,15 +76,16 @@ static bool is_cpu_edp(struct intel_dp *intel_dp) | |||
76 | return is_edp(intel_dp) && !is_pch_edp(intel_dp); | 76 | return is_edp(intel_dp) && !is_pch_edp(intel_dp); |
77 | } | 77 | } |
78 | 78 | ||
79 | static struct intel_dp *enc_to_intel_dp(struct drm_encoder *encoder) | 79 | static struct drm_device *intel_dp_to_dev(struct intel_dp *intel_dp) |
80 | { | 80 | { |
81 | return container_of(encoder, struct intel_dp, base.base); | 81 | struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp); |
82 | |||
83 | return intel_dig_port->base.base.dev; | ||
82 | } | 84 | } |
83 | 85 | ||
84 | static struct intel_dp *intel_attached_dp(struct drm_connector *connector) | 86 | static struct intel_dp *intel_attached_dp(struct drm_connector *connector) |
85 | { | 87 | { |
86 | return container_of(intel_attached_encoder(connector), | 88 | return enc_to_intel_dp(&intel_attached_encoder(connector)->base); |
87 | struct intel_dp, base); | ||
88 | } | 89 | } |
89 | 90 | ||
90 | /** | 91 | /** |
@@ -106,49 +107,32 @@ bool intel_encoder_is_pch_edp(struct drm_encoder *encoder) | |||
106 | return is_pch_edp(intel_dp); | 107 | return is_pch_edp(intel_dp); |
107 | } | 108 | } |
108 | 109 | ||
109 | static void intel_dp_start_link_train(struct intel_dp *intel_dp); | ||
110 | static void intel_dp_complete_link_train(struct intel_dp *intel_dp); | ||
111 | static void intel_dp_link_down(struct intel_dp *intel_dp); | 110 | static void intel_dp_link_down(struct intel_dp *intel_dp); |
112 | 111 | ||
113 | void | 112 | void |
114 | intel_edp_link_config(struct intel_encoder *intel_encoder, | 113 | intel_edp_link_config(struct intel_encoder *intel_encoder, |
115 | int *lane_num, int *link_bw) | 114 | int *lane_num, int *link_bw) |
116 | { | 115 | { |
117 | struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base); | 116 | struct intel_dp *intel_dp = enc_to_intel_dp(&intel_encoder->base); |
118 | 117 | ||
119 | *lane_num = intel_dp->lane_count; | 118 | *lane_num = intel_dp->lane_count; |
120 | if (intel_dp->link_bw == DP_LINK_BW_1_62) | 119 | *link_bw = drm_dp_bw_code_to_link_rate(intel_dp->link_bw); |
121 | *link_bw = 162000; | ||
122 | else if (intel_dp->link_bw == DP_LINK_BW_2_7) | ||
123 | *link_bw = 270000; | ||
124 | } | 120 | } |
125 | 121 | ||
126 | int | 122 | int |
127 | intel_edp_target_clock(struct intel_encoder *intel_encoder, | 123 | intel_edp_target_clock(struct intel_encoder *intel_encoder, |
128 | struct drm_display_mode *mode) | 124 | struct drm_display_mode *mode) |
129 | { | 125 | { |
130 | struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base); | 126 | struct intel_dp *intel_dp = enc_to_intel_dp(&intel_encoder->base); |
127 | struct intel_connector *intel_connector = intel_dp->attached_connector; | ||
131 | 128 | ||
132 | if (intel_dp->panel_fixed_mode) | 129 | if (intel_connector->panel.fixed_mode) |
133 | return intel_dp->panel_fixed_mode->clock; | 130 | return intel_connector->panel.fixed_mode->clock; |
134 | else | 131 | else |
135 | return mode->clock; | 132 | return mode->clock; |
136 | } | 133 | } |
137 | 134 | ||
138 | static int | 135 | static int |
139 | intel_dp_max_lane_count(struct intel_dp *intel_dp) | ||
140 | { | ||
141 | int max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f; | ||
142 | switch (max_lane_count) { | ||
143 | case 1: case 2: case 4: | ||
144 | break; | ||
145 | default: | ||
146 | max_lane_count = 4; | ||
147 | } | ||
148 | return max_lane_count; | ||
149 | } | ||
150 | |||
151 | static int | ||
152 | intel_dp_max_link_bw(struct intel_dp *intel_dp) | 136 | intel_dp_max_link_bw(struct intel_dp *intel_dp) |
153 | { | 137 | { |
154 | int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE]; | 138 | int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE]; |
@@ -208,7 +192,7 @@ intel_dp_adjust_dithering(struct intel_dp *intel_dp, | |||
208 | bool adjust_mode) | 192 | bool adjust_mode) |
209 | { | 193 | { |
210 | int max_link_clock = intel_dp_link_clock(intel_dp_max_link_bw(intel_dp)); | 194 | int max_link_clock = intel_dp_link_clock(intel_dp_max_link_bw(intel_dp)); |
211 | int max_lanes = intel_dp_max_lane_count(intel_dp); | 195 | int max_lanes = drm_dp_max_lane_count(intel_dp->dpcd); |
212 | int max_rate, mode_rate; | 196 | int max_rate, mode_rate; |
213 | 197 | ||
214 | mode_rate = intel_dp_link_required(mode->clock, 24); | 198 | mode_rate = intel_dp_link_required(mode->clock, 24); |
@@ -234,12 +218,14 @@ intel_dp_mode_valid(struct drm_connector *connector, | |||
234 | struct drm_display_mode *mode) | 218 | struct drm_display_mode *mode) |
235 | { | 219 | { |
236 | struct intel_dp *intel_dp = intel_attached_dp(connector); | 220 | struct intel_dp *intel_dp = intel_attached_dp(connector); |
221 | struct intel_connector *intel_connector = to_intel_connector(connector); | ||
222 | struct drm_display_mode *fixed_mode = intel_connector->panel.fixed_mode; | ||
237 | 223 | ||
238 | if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) { | 224 | if (is_edp(intel_dp) && fixed_mode) { |
239 | if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay) | 225 | if (mode->hdisplay > fixed_mode->hdisplay) |
240 | return MODE_PANEL; | 226 | return MODE_PANEL; |
241 | 227 | ||
242 | if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay) | 228 | if (mode->vdisplay > fixed_mode->vdisplay) |
243 | return MODE_PANEL; | 229 | return MODE_PANEL; |
244 | } | 230 | } |
245 | 231 | ||
@@ -285,6 +271,10 @@ intel_hrawclk(struct drm_device *dev) | |||
285 | struct drm_i915_private *dev_priv = dev->dev_private; | 271 | struct drm_i915_private *dev_priv = dev->dev_private; |
286 | uint32_t clkcfg; | 272 | uint32_t clkcfg; |
287 | 273 | ||
274 | /* There is no CLKCFG reg in Valleyview. VLV hrawclk is 200 MHz */ | ||
275 | if (IS_VALLEYVIEW(dev)) | ||
276 | return 200; | ||
277 | |||
288 | clkcfg = I915_READ(CLKCFG); | 278 | clkcfg = I915_READ(CLKCFG); |
289 | switch (clkcfg & CLKCFG_FSB_MASK) { | 279 | switch (clkcfg & CLKCFG_FSB_MASK) { |
290 | case CLKCFG_FSB_400: | 280 | case CLKCFG_FSB_400: |
@@ -310,7 +300,7 @@ intel_hrawclk(struct drm_device *dev) | |||
310 | 300 | ||
311 | static bool ironlake_edp_have_panel_power(struct intel_dp *intel_dp) | 301 | static bool ironlake_edp_have_panel_power(struct intel_dp *intel_dp) |
312 | { | 302 | { |
313 | struct drm_device *dev = intel_dp->base.base.dev; | 303 | struct drm_device *dev = intel_dp_to_dev(intel_dp); |
314 | struct drm_i915_private *dev_priv = dev->dev_private; | 304 | struct drm_i915_private *dev_priv = dev->dev_private; |
315 | 305 | ||
316 | return (I915_READ(PCH_PP_STATUS) & PP_ON) != 0; | 306 | return (I915_READ(PCH_PP_STATUS) & PP_ON) != 0; |
@@ -318,7 +308,7 @@ static bool ironlake_edp_have_panel_power(struct intel_dp *intel_dp) | |||
318 | 308 | ||
319 | static bool ironlake_edp_have_panel_vdd(struct intel_dp *intel_dp) | 309 | static bool ironlake_edp_have_panel_vdd(struct intel_dp *intel_dp) |
320 | { | 310 | { |
321 | struct drm_device *dev = intel_dp->base.base.dev; | 311 | struct drm_device *dev = intel_dp_to_dev(intel_dp); |
322 | struct drm_i915_private *dev_priv = dev->dev_private; | 312 | struct drm_i915_private *dev_priv = dev->dev_private; |
323 | 313 | ||
324 | return (I915_READ(PCH_PP_CONTROL) & EDP_FORCE_VDD) != 0; | 314 | return (I915_READ(PCH_PP_CONTROL) & EDP_FORCE_VDD) != 0; |
@@ -327,7 +317,7 @@ static bool ironlake_edp_have_panel_vdd(struct intel_dp *intel_dp) | |||
327 | static void | 317 | static void |
328 | intel_dp_check_edp(struct intel_dp *intel_dp) | 318 | intel_dp_check_edp(struct intel_dp *intel_dp) |
329 | { | 319 | { |
330 | struct drm_device *dev = intel_dp->base.base.dev; | 320 | struct drm_device *dev = intel_dp_to_dev(intel_dp); |
331 | struct drm_i915_private *dev_priv = dev->dev_private; | 321 | struct drm_i915_private *dev_priv = dev->dev_private; |
332 | 322 | ||
333 | if (!is_edp(intel_dp)) | 323 | if (!is_edp(intel_dp)) |
@@ -346,7 +336,8 @@ intel_dp_aux_ch(struct intel_dp *intel_dp, | |||
346 | uint8_t *recv, int recv_size) | 336 | uint8_t *recv, int recv_size) |
347 | { | 337 | { |
348 | uint32_t output_reg = intel_dp->output_reg; | 338 | uint32_t output_reg = intel_dp->output_reg; |
349 | struct drm_device *dev = intel_dp->base.base.dev; | 339 | struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp); |
340 | struct drm_device *dev = intel_dig_port->base.base.dev; | ||
350 | struct drm_i915_private *dev_priv = dev->dev_private; | 341 | struct drm_i915_private *dev_priv = dev->dev_private; |
351 | uint32_t ch_ctl = output_reg + 0x10; | 342 | uint32_t ch_ctl = output_reg + 0x10; |
352 | uint32_t ch_data = ch_ctl + 4; | 343 | uint32_t ch_data = ch_ctl + 4; |
@@ -356,6 +347,29 @@ intel_dp_aux_ch(struct intel_dp *intel_dp, | |||
356 | uint32_t aux_clock_divider; | 347 | uint32_t aux_clock_divider; |
357 | int try, precharge; | 348 | int try, precharge; |
358 | 349 | ||
350 | if (IS_HASWELL(dev)) { | ||
351 | switch (intel_dig_port->port) { | ||
352 | case PORT_A: | ||
353 | ch_ctl = DPA_AUX_CH_CTL; | ||
354 | ch_data = DPA_AUX_CH_DATA1; | ||
355 | break; | ||
356 | case PORT_B: | ||
357 | ch_ctl = PCH_DPB_AUX_CH_CTL; | ||
358 | ch_data = PCH_DPB_AUX_CH_DATA1; | ||
359 | break; | ||
360 | case PORT_C: | ||
361 | ch_ctl = PCH_DPC_AUX_CH_CTL; | ||
362 | ch_data = PCH_DPC_AUX_CH_DATA1; | ||
363 | break; | ||
364 | case PORT_D: | ||
365 | ch_ctl = PCH_DPD_AUX_CH_CTL; | ||
366 | ch_data = PCH_DPD_AUX_CH_DATA1; | ||
367 | break; | ||
368 | default: | ||
369 | BUG(); | ||
370 | } | ||
371 | } | ||
372 | |||
359 | intel_dp_check_edp(intel_dp); | 373 | intel_dp_check_edp(intel_dp); |
360 | /* The clock divider is based off the hrawclk, | 374 | /* The clock divider is based off the hrawclk, |
361 | * and would like to run at 2MHz. So, take the | 375 | * and would like to run at 2MHz. So, take the |
@@ -365,12 +379,16 @@ intel_dp_aux_ch(struct intel_dp *intel_dp, | |||
365 | * clock divider. | 379 | * clock divider. |
366 | */ | 380 | */ |
367 | if (is_cpu_edp(intel_dp)) { | 381 | if (is_cpu_edp(intel_dp)) { |
368 | if (IS_GEN6(dev) || IS_GEN7(dev)) | 382 | if (IS_HASWELL(dev)) |
383 | aux_clock_divider = intel_ddi_get_cdclk_freq(dev_priv) >> 1; | ||
384 | else if (IS_VALLEYVIEW(dev)) | ||
385 | aux_clock_divider = 100; | ||
386 | else if (IS_GEN6(dev) || IS_GEN7(dev)) | ||
369 | aux_clock_divider = 200; /* SNB & IVB eDP input clock at 400Mhz */ | 387 | aux_clock_divider = 200; /* SNB & IVB eDP input clock at 400Mhz */ |
370 | else | 388 | else |
371 | aux_clock_divider = 225; /* eDP input clock at 450Mhz */ | 389 | aux_clock_divider = 225; /* eDP input clock at 450Mhz */ |
372 | } else if (HAS_PCH_SPLIT(dev)) | 390 | } else if (HAS_PCH_SPLIT(dev)) |
373 | aux_clock_divider = 63; /* IRL input clock fixed at 125Mhz */ | 391 | aux_clock_divider = DIV_ROUND_UP(intel_pch_rawclk(dev), 2); |
374 | else | 392 | else |
375 | aux_clock_divider = intel_hrawclk(dev) / 2; | 393 | aux_clock_divider = intel_hrawclk(dev) / 2; |
376 | 394 | ||
@@ -642,9 +660,6 @@ intel_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode, | |||
642 | return -EREMOTEIO; | 660 | return -EREMOTEIO; |
643 | } | 661 | } |
644 | 662 | ||
645 | static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp); | ||
646 | static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync); | ||
647 | |||
648 | static int | 663 | static int |
649 | intel_dp_i2c_init(struct intel_dp *intel_dp, | 664 | intel_dp_i2c_init(struct intel_dp *intel_dp, |
650 | struct intel_connector *intel_connector, const char *name) | 665 | struct intel_connector *intel_connector, const char *name) |
@@ -670,22 +685,25 @@ intel_dp_i2c_init(struct intel_dp *intel_dp, | |||
670 | return ret; | 685 | return ret; |
671 | } | 686 | } |
672 | 687 | ||
673 | static bool | 688 | bool |
674 | intel_dp_mode_fixup(struct drm_encoder *encoder, | 689 | intel_dp_mode_fixup(struct drm_encoder *encoder, |
675 | const struct drm_display_mode *mode, | 690 | const struct drm_display_mode *mode, |
676 | struct drm_display_mode *adjusted_mode) | 691 | struct drm_display_mode *adjusted_mode) |
677 | { | 692 | { |
678 | struct drm_device *dev = encoder->dev; | 693 | struct drm_device *dev = encoder->dev; |
679 | struct intel_dp *intel_dp = enc_to_intel_dp(encoder); | 694 | struct intel_dp *intel_dp = enc_to_intel_dp(encoder); |
695 | struct intel_connector *intel_connector = intel_dp->attached_connector; | ||
680 | int lane_count, clock; | 696 | int lane_count, clock; |
681 | int max_lane_count = intel_dp_max_lane_count(intel_dp); | 697 | int max_lane_count = drm_dp_max_lane_count(intel_dp->dpcd); |
682 | int max_clock = intel_dp_max_link_bw(intel_dp) == DP_LINK_BW_2_7 ? 1 : 0; | 698 | int max_clock = intel_dp_max_link_bw(intel_dp) == DP_LINK_BW_2_7 ? 1 : 0; |
683 | int bpp, mode_rate; | 699 | int bpp, mode_rate; |
684 | static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 }; | 700 | static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 }; |
685 | 701 | ||
686 | if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) { | 702 | if (is_edp(intel_dp) && intel_connector->panel.fixed_mode) { |
687 | intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode); | 703 | intel_fixed_panel_mode(intel_connector->panel.fixed_mode, |
688 | intel_pch_panel_fitting(dev, DRM_MODE_SCALE_FULLSCREEN, | 704 | adjusted_mode); |
705 | intel_pch_panel_fitting(dev, | ||
706 | intel_connector->panel.fitting_mode, | ||
689 | mode, adjusted_mode); | 707 | mode, adjusted_mode); |
690 | } | 708 | } |
691 | 709 | ||
@@ -762,21 +780,23 @@ intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode, | |||
762 | struct drm_display_mode *adjusted_mode) | 780 | struct drm_display_mode *adjusted_mode) |
763 | { | 781 | { |
764 | struct drm_device *dev = crtc->dev; | 782 | struct drm_device *dev = crtc->dev; |
765 | struct intel_encoder *encoder; | 783 | struct intel_encoder *intel_encoder; |
784 | struct intel_dp *intel_dp; | ||
766 | struct drm_i915_private *dev_priv = dev->dev_private; | 785 | struct drm_i915_private *dev_priv = dev->dev_private; |
767 | struct intel_crtc *intel_crtc = to_intel_crtc(crtc); | 786 | struct intel_crtc *intel_crtc = to_intel_crtc(crtc); |
768 | int lane_count = 4; | 787 | int lane_count = 4; |
769 | struct intel_dp_m_n m_n; | 788 | struct intel_dp_m_n m_n; |
770 | int pipe = intel_crtc->pipe; | 789 | int pipe = intel_crtc->pipe; |
790 | enum transcoder cpu_transcoder = intel_crtc->cpu_transcoder; | ||
771 | 791 | ||
772 | /* | 792 | /* |
773 | * Find the lane count in the intel_encoder private | 793 | * Find the lane count in the intel_encoder private |
774 | */ | 794 | */ |
775 | for_each_encoder_on_crtc(dev, crtc, encoder) { | 795 | for_each_encoder_on_crtc(dev, crtc, intel_encoder) { |
776 | struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base); | 796 | intel_dp = enc_to_intel_dp(&intel_encoder->base); |
777 | 797 | ||
778 | if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT || | 798 | if (intel_encoder->type == INTEL_OUTPUT_DISPLAYPORT || |
779 | intel_dp->base.type == INTEL_OUTPUT_EDP) | 799 | intel_encoder->type == INTEL_OUTPUT_EDP) |
780 | { | 800 | { |
781 | lane_count = intel_dp->lane_count; | 801 | lane_count = intel_dp->lane_count; |
782 | break; | 802 | break; |
@@ -791,23 +811,46 @@ intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode, | |||
791 | intel_dp_compute_m_n(intel_crtc->bpp, lane_count, | 811 | intel_dp_compute_m_n(intel_crtc->bpp, lane_count, |
792 | mode->clock, adjusted_mode->clock, &m_n); | 812 | mode->clock, adjusted_mode->clock, &m_n); |
793 | 813 | ||
794 | if (HAS_PCH_SPLIT(dev)) { | 814 | if (IS_HASWELL(dev)) { |
795 | I915_WRITE(TRANSDATA_M1(pipe), | 815 | I915_WRITE(PIPE_DATA_M1(cpu_transcoder), |
796 | ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) | | 816 | TU_SIZE(m_n.tu) | m_n.gmch_m); |
797 | m_n.gmch_m); | 817 | I915_WRITE(PIPE_DATA_N1(cpu_transcoder), m_n.gmch_n); |
818 | I915_WRITE(PIPE_LINK_M1(cpu_transcoder), m_n.link_m); | ||
819 | I915_WRITE(PIPE_LINK_N1(cpu_transcoder), m_n.link_n); | ||
820 | } else if (HAS_PCH_SPLIT(dev)) { | ||
821 | I915_WRITE(TRANSDATA_M1(pipe), TU_SIZE(m_n.tu) | m_n.gmch_m); | ||
798 | I915_WRITE(TRANSDATA_N1(pipe), m_n.gmch_n); | 822 | I915_WRITE(TRANSDATA_N1(pipe), m_n.gmch_n); |
799 | I915_WRITE(TRANSDPLINK_M1(pipe), m_n.link_m); | 823 | I915_WRITE(TRANSDPLINK_M1(pipe), m_n.link_m); |
800 | I915_WRITE(TRANSDPLINK_N1(pipe), m_n.link_n); | 824 | I915_WRITE(TRANSDPLINK_N1(pipe), m_n.link_n); |
825 | } else if (IS_VALLEYVIEW(dev)) { | ||
826 | I915_WRITE(PIPE_DATA_M1(pipe), TU_SIZE(m_n.tu) | m_n.gmch_m); | ||
827 | I915_WRITE(PIPE_DATA_N1(pipe), m_n.gmch_n); | ||
828 | I915_WRITE(PIPE_LINK_M1(pipe), m_n.link_m); | ||
829 | I915_WRITE(PIPE_LINK_N1(pipe), m_n.link_n); | ||
801 | } else { | 830 | } else { |
802 | I915_WRITE(PIPE_GMCH_DATA_M(pipe), | 831 | I915_WRITE(PIPE_GMCH_DATA_M(pipe), |
803 | ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) | | 832 | TU_SIZE(m_n.tu) | m_n.gmch_m); |
804 | m_n.gmch_m); | ||
805 | I915_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n); | 833 | I915_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n); |
806 | I915_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m); | 834 | I915_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m); |
807 | I915_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n); | 835 | I915_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n); |
808 | } | 836 | } |
809 | } | 837 | } |
810 | 838 | ||
839 | void intel_dp_init_link_config(struct intel_dp *intel_dp) | ||
840 | { | ||
841 | memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE); | ||
842 | intel_dp->link_configuration[0] = intel_dp->link_bw; | ||
843 | intel_dp->link_configuration[1] = intel_dp->lane_count; | ||
844 | intel_dp->link_configuration[8] = DP_SET_ANSI_8B10B; | ||
845 | /* | ||
846 | * Check for DPCD version > 1.1 and enhanced framing support | ||
847 | */ | ||
848 | if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 && | ||
849 | (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) { | ||
850 | intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN; | ||
851 | } | ||
852 | } | ||
853 | |||
811 | static void | 854 | static void |
812 | intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode, | 855 | intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode, |
813 | struct drm_display_mode *adjusted_mode) | 856 | struct drm_display_mode *adjusted_mode) |
@@ -815,7 +858,7 @@ intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode, | |||
815 | struct drm_device *dev = encoder->dev; | 858 | struct drm_device *dev = encoder->dev; |
816 | struct drm_i915_private *dev_priv = dev->dev_private; | 859 | struct drm_i915_private *dev_priv = dev->dev_private; |
817 | struct intel_dp *intel_dp = enc_to_intel_dp(encoder); | 860 | struct intel_dp *intel_dp = enc_to_intel_dp(encoder); |
818 | struct drm_crtc *crtc = intel_dp->base.base.crtc; | 861 | struct drm_crtc *crtc = encoder->crtc; |
819 | struct intel_crtc *intel_crtc = to_intel_crtc(crtc); | 862 | struct intel_crtc *intel_crtc = to_intel_crtc(crtc); |
820 | 863 | ||
821 | /* | 864 | /* |
@@ -860,21 +903,12 @@ intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode, | |||
860 | intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE; | 903 | intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE; |
861 | intel_write_eld(encoder, adjusted_mode); | 904 | intel_write_eld(encoder, adjusted_mode); |
862 | } | 905 | } |
863 | memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE); | 906 | |
864 | intel_dp->link_configuration[0] = intel_dp->link_bw; | 907 | intel_dp_init_link_config(intel_dp); |
865 | intel_dp->link_configuration[1] = intel_dp->lane_count; | ||
866 | intel_dp->link_configuration[8] = DP_SET_ANSI_8B10B; | ||
867 | /* | ||
868 | * Check for DPCD version > 1.1 and enhanced framing support | ||
869 | */ | ||
870 | if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 && | ||
871 | (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) { | ||
872 | intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN; | ||
873 | } | ||
874 | 908 | ||
875 | /* Split out the IBX/CPU vs CPT settings */ | 909 | /* Split out the IBX/CPU vs CPT settings */ |
876 | 910 | ||
877 | if (is_cpu_edp(intel_dp) && IS_GEN7(dev)) { | 911 | if (is_cpu_edp(intel_dp) && IS_GEN7(dev) && !IS_VALLEYVIEW(dev)) { |
878 | if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) | 912 | if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) |
879 | intel_dp->DP |= DP_SYNC_HS_HIGH; | 913 | intel_dp->DP |= DP_SYNC_HS_HIGH; |
880 | if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) | 914 | if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) |
@@ -931,7 +965,7 @@ static void ironlake_wait_panel_status(struct intel_dp *intel_dp, | |||
931 | u32 mask, | 965 | u32 mask, |
932 | u32 value) | 966 | u32 value) |
933 | { | 967 | { |
934 | struct drm_device *dev = intel_dp->base.base.dev; | 968 | struct drm_device *dev = intel_dp_to_dev(intel_dp); |
935 | struct drm_i915_private *dev_priv = dev->dev_private; | 969 | struct drm_i915_private *dev_priv = dev->dev_private; |
936 | 970 | ||
937 | DRM_DEBUG_KMS("mask %08x value %08x status %08x control %08x\n", | 971 | DRM_DEBUG_KMS("mask %08x value %08x status %08x control %08x\n", |
@@ -978,9 +1012,9 @@ static u32 ironlake_get_pp_control(struct drm_i915_private *dev_priv) | |||
978 | return control; | 1012 | return control; |
979 | } | 1013 | } |
980 | 1014 | ||
981 | static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp) | 1015 | void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp) |
982 | { | 1016 | { |
983 | struct drm_device *dev = intel_dp->base.base.dev; | 1017 | struct drm_device *dev = intel_dp_to_dev(intel_dp); |
984 | struct drm_i915_private *dev_priv = dev->dev_private; | 1018 | struct drm_i915_private *dev_priv = dev->dev_private; |
985 | u32 pp; | 1019 | u32 pp; |
986 | 1020 | ||
@@ -1019,7 +1053,7 @@ static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp) | |||
1019 | 1053 | ||
1020 | static void ironlake_panel_vdd_off_sync(struct intel_dp *intel_dp) | 1054 | static void ironlake_panel_vdd_off_sync(struct intel_dp *intel_dp) |
1021 | { | 1055 | { |
1022 | struct drm_device *dev = intel_dp->base.base.dev; | 1056 | struct drm_device *dev = intel_dp_to_dev(intel_dp); |
1023 | struct drm_i915_private *dev_priv = dev->dev_private; | 1057 | struct drm_i915_private *dev_priv = dev->dev_private; |
1024 | u32 pp; | 1058 | u32 pp; |
1025 | 1059 | ||
@@ -1041,14 +1075,14 @@ static void ironlake_panel_vdd_work(struct work_struct *__work) | |||
1041 | { | 1075 | { |
1042 | struct intel_dp *intel_dp = container_of(to_delayed_work(__work), | 1076 | struct intel_dp *intel_dp = container_of(to_delayed_work(__work), |
1043 | struct intel_dp, panel_vdd_work); | 1077 | struct intel_dp, panel_vdd_work); |
1044 | struct drm_device *dev = intel_dp->base.base.dev; | 1078 | struct drm_device *dev = intel_dp_to_dev(intel_dp); |
1045 | 1079 | ||
1046 | mutex_lock(&dev->mode_config.mutex); | 1080 | mutex_lock(&dev->mode_config.mutex); |
1047 | ironlake_panel_vdd_off_sync(intel_dp); | 1081 | ironlake_panel_vdd_off_sync(intel_dp); |
1048 | mutex_unlock(&dev->mode_config.mutex); | 1082 | mutex_unlock(&dev->mode_config.mutex); |
1049 | } | 1083 | } |
1050 | 1084 | ||
1051 | static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync) | 1085 | void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync) |
1052 | { | 1086 | { |
1053 | if (!is_edp(intel_dp)) | 1087 | if (!is_edp(intel_dp)) |
1054 | return; | 1088 | return; |
@@ -1071,9 +1105,9 @@ static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync) | |||
1071 | } | 1105 | } |
1072 | } | 1106 | } |
1073 | 1107 | ||
1074 | static void ironlake_edp_panel_on(struct intel_dp *intel_dp) | 1108 | void ironlake_edp_panel_on(struct intel_dp *intel_dp) |
1075 | { | 1109 | { |
1076 | struct drm_device *dev = intel_dp->base.base.dev; | 1110 | struct drm_device *dev = intel_dp_to_dev(intel_dp); |
1077 | struct drm_i915_private *dev_priv = dev->dev_private; | 1111 | struct drm_i915_private *dev_priv = dev->dev_private; |
1078 | u32 pp; | 1112 | u32 pp; |
1079 | 1113 | ||
@@ -1113,9 +1147,9 @@ static void ironlake_edp_panel_on(struct intel_dp *intel_dp) | |||
1113 | } | 1147 | } |
1114 | } | 1148 | } |
1115 | 1149 | ||
1116 | static void ironlake_edp_panel_off(struct intel_dp *intel_dp) | 1150 | void ironlake_edp_panel_off(struct intel_dp *intel_dp) |
1117 | { | 1151 | { |
1118 | struct drm_device *dev = intel_dp->base.base.dev; | 1152 | struct drm_device *dev = intel_dp_to_dev(intel_dp); |
1119 | struct drm_i915_private *dev_priv = dev->dev_private; | 1153 | struct drm_i915_private *dev_priv = dev->dev_private; |
1120 | u32 pp; | 1154 | u32 pp; |
1121 | 1155 | ||
@@ -1138,10 +1172,12 @@ static void ironlake_edp_panel_off(struct intel_dp *intel_dp) | |||
1138 | ironlake_wait_panel_off(intel_dp); | 1172 | ironlake_wait_panel_off(intel_dp); |
1139 | } | 1173 | } |
1140 | 1174 | ||
1141 | static void ironlake_edp_backlight_on(struct intel_dp *intel_dp) | 1175 | void ironlake_edp_backlight_on(struct intel_dp *intel_dp) |
1142 | { | 1176 | { |
1143 | struct drm_device *dev = intel_dp->base.base.dev; | 1177 | struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp); |
1178 | struct drm_device *dev = intel_dig_port->base.base.dev; | ||
1144 | struct drm_i915_private *dev_priv = dev->dev_private; | 1179 | struct drm_i915_private *dev_priv = dev->dev_private; |
1180 | int pipe = to_intel_crtc(intel_dig_port->base.base.crtc)->pipe; | ||
1145 | u32 pp; | 1181 | u32 pp; |
1146 | 1182 | ||
1147 | if (!is_edp(intel_dp)) | 1183 | if (!is_edp(intel_dp)) |
@@ -1159,17 +1195,21 @@ static void ironlake_edp_backlight_on(struct intel_dp *intel_dp) | |||
1159 | pp |= EDP_BLC_ENABLE; | 1195 | pp |= EDP_BLC_ENABLE; |
1160 | I915_WRITE(PCH_PP_CONTROL, pp); | 1196 | I915_WRITE(PCH_PP_CONTROL, pp); |
1161 | POSTING_READ(PCH_PP_CONTROL); | 1197 | POSTING_READ(PCH_PP_CONTROL); |
1198 | |||
1199 | intel_panel_enable_backlight(dev, pipe); | ||
1162 | } | 1200 | } |
1163 | 1201 | ||
1164 | static void ironlake_edp_backlight_off(struct intel_dp *intel_dp) | 1202 | void ironlake_edp_backlight_off(struct intel_dp *intel_dp) |
1165 | { | 1203 | { |
1166 | struct drm_device *dev = intel_dp->base.base.dev; | 1204 | struct drm_device *dev = intel_dp_to_dev(intel_dp); |
1167 | struct drm_i915_private *dev_priv = dev->dev_private; | 1205 | struct drm_i915_private *dev_priv = dev->dev_private; |
1168 | u32 pp; | 1206 | u32 pp; |
1169 | 1207 | ||
1170 | if (!is_edp(intel_dp)) | 1208 | if (!is_edp(intel_dp)) |
1171 | return; | 1209 | return; |
1172 | 1210 | ||
1211 | intel_panel_disable_backlight(dev); | ||
1212 | |||
1173 | DRM_DEBUG_KMS("\n"); | 1213 | DRM_DEBUG_KMS("\n"); |
1174 | pp = ironlake_get_pp_control(dev_priv); | 1214 | pp = ironlake_get_pp_control(dev_priv); |
1175 | pp &= ~EDP_BLC_ENABLE; | 1215 | pp &= ~EDP_BLC_ENABLE; |
@@ -1180,8 +1220,9 @@ static void ironlake_edp_backlight_off(struct intel_dp *intel_dp) | |||
1180 | 1220 | ||
1181 | static void ironlake_edp_pll_on(struct intel_dp *intel_dp) | 1221 | static void ironlake_edp_pll_on(struct intel_dp *intel_dp) |
1182 | { | 1222 | { |
1183 | struct drm_device *dev = intel_dp->base.base.dev; | 1223 | struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp); |
1184 | struct drm_crtc *crtc = intel_dp->base.base.crtc; | 1224 | struct drm_crtc *crtc = intel_dig_port->base.base.crtc; |
1225 | struct drm_device *dev = crtc->dev; | ||
1185 | struct drm_i915_private *dev_priv = dev->dev_private; | 1226 | struct drm_i915_private *dev_priv = dev->dev_private; |
1186 | u32 dpa_ctl; | 1227 | u32 dpa_ctl; |
1187 | 1228 | ||
@@ -1205,8 +1246,9 @@ static void ironlake_edp_pll_on(struct intel_dp *intel_dp) | |||
1205 | 1246 | ||
1206 | static void ironlake_edp_pll_off(struct intel_dp *intel_dp) | 1247 | static void ironlake_edp_pll_off(struct intel_dp *intel_dp) |
1207 | { | 1248 | { |
1208 | struct drm_device *dev = intel_dp->base.base.dev; | 1249 | struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp); |
1209 | struct drm_crtc *crtc = intel_dp->base.base.crtc; | 1250 | struct drm_crtc *crtc = intel_dig_port->base.base.crtc; |
1251 | struct drm_device *dev = crtc->dev; | ||
1210 | struct drm_i915_private *dev_priv = dev->dev_private; | 1252 | struct drm_i915_private *dev_priv = dev->dev_private; |
1211 | u32 dpa_ctl; | 1253 | u32 dpa_ctl; |
1212 | 1254 | ||
@@ -1228,7 +1270,7 @@ static void ironlake_edp_pll_off(struct intel_dp *intel_dp) | |||
1228 | } | 1270 | } |
1229 | 1271 | ||
1230 | /* If the sink supports it, try to set the power state appropriately */ | 1272 | /* If the sink supports it, try to set the power state appropriately */ |
1231 | static void intel_dp_sink_dpms(struct intel_dp *intel_dp, int mode) | 1273 | void intel_dp_sink_dpms(struct intel_dp *intel_dp, int mode) |
1232 | { | 1274 | { |
1233 | int ret, i; | 1275 | int ret, i; |
1234 | 1276 | ||
@@ -1298,9 +1340,10 @@ static bool intel_dp_get_hw_state(struct intel_encoder *encoder, | |||
1298 | return true; | 1340 | return true; |
1299 | } | 1341 | } |
1300 | } | 1342 | } |
1301 | } | ||
1302 | 1343 | ||
1303 | DRM_DEBUG_KMS("No pipe for dp port 0x%x found\n", intel_dp->output_reg); | 1344 | DRM_DEBUG_KMS("No pipe for dp port 0x%x found\n", |
1345 | intel_dp->output_reg); | ||
1346 | } | ||
1304 | 1347 | ||
1305 | return true; | 1348 | return true; |
1306 | } | 1349 | } |
@@ -1396,38 +1439,6 @@ intel_dp_get_link_status(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_ | |||
1396 | DP_LINK_STATUS_SIZE); | 1439 | DP_LINK_STATUS_SIZE); |
1397 | } | 1440 | } |
1398 | 1441 | ||
1399 | static uint8_t | ||
1400 | intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE], | ||
1401 | int r) | ||
1402 | { | ||
1403 | return link_status[r - DP_LANE0_1_STATUS]; | ||
1404 | } | ||
1405 | |||
1406 | static uint8_t | ||
1407 | intel_get_adjust_request_voltage(uint8_t adjust_request[2], | ||
1408 | int lane) | ||
1409 | { | ||
1410 | int s = ((lane & 1) ? | ||
1411 | DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT : | ||
1412 | DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT); | ||
1413 | uint8_t l = adjust_request[lane>>1]; | ||
1414 | |||
1415 | return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT; | ||
1416 | } | ||
1417 | |||
1418 | static uint8_t | ||
1419 | intel_get_adjust_request_pre_emphasis(uint8_t adjust_request[2], | ||
1420 | int lane) | ||
1421 | { | ||
1422 | int s = ((lane & 1) ? | ||
1423 | DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT : | ||
1424 | DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT); | ||
1425 | uint8_t l = adjust_request[lane>>1]; | ||
1426 | |||
1427 | return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT; | ||
1428 | } | ||
1429 | |||
1430 | |||
1431 | #if 0 | 1442 | #if 0 |
1432 | static char *voltage_names[] = { | 1443 | static char *voltage_names[] = { |
1433 | "0.4V", "0.6V", "0.8V", "1.2V" | 1444 | "0.4V", "0.6V", "0.8V", "1.2V" |
@@ -1448,7 +1459,7 @@ static char *link_train_names[] = { | |||
1448 | static uint8_t | 1459 | static uint8_t |
1449 | intel_dp_voltage_max(struct intel_dp *intel_dp) | 1460 | intel_dp_voltage_max(struct intel_dp *intel_dp) |
1450 | { | 1461 | { |
1451 | struct drm_device *dev = intel_dp->base.base.dev; | 1462 | struct drm_device *dev = intel_dp_to_dev(intel_dp); |
1452 | 1463 | ||
1453 | if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) | 1464 | if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) |
1454 | return DP_TRAIN_VOLTAGE_SWING_800; | 1465 | return DP_TRAIN_VOLTAGE_SWING_800; |
@@ -1461,9 +1472,21 @@ intel_dp_voltage_max(struct intel_dp *intel_dp) | |||
1461 | static uint8_t | 1472 | static uint8_t |
1462 | intel_dp_pre_emphasis_max(struct intel_dp *intel_dp, uint8_t voltage_swing) | 1473 | intel_dp_pre_emphasis_max(struct intel_dp *intel_dp, uint8_t voltage_swing) |
1463 | { | 1474 | { |
1464 | struct drm_device *dev = intel_dp->base.base.dev; | 1475 | struct drm_device *dev = intel_dp_to_dev(intel_dp); |
1465 | 1476 | ||
1466 | if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) { | 1477 | if (IS_HASWELL(dev)) { |
1478 | switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) { | ||
1479 | case DP_TRAIN_VOLTAGE_SWING_400: | ||
1480 | return DP_TRAIN_PRE_EMPHASIS_9_5; | ||
1481 | case DP_TRAIN_VOLTAGE_SWING_600: | ||
1482 | return DP_TRAIN_PRE_EMPHASIS_6; | ||
1483 | case DP_TRAIN_VOLTAGE_SWING_800: | ||
1484 | return DP_TRAIN_PRE_EMPHASIS_3_5; | ||
1485 | case DP_TRAIN_VOLTAGE_SWING_1200: | ||
1486 | default: | ||
1487 | return DP_TRAIN_PRE_EMPHASIS_0; | ||
1488 | } | ||
1489 | } else if (IS_GEN7(dev) && is_cpu_edp(intel_dp) && !IS_VALLEYVIEW(dev)) { | ||
1467 | switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) { | 1490 | switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) { |
1468 | case DP_TRAIN_VOLTAGE_SWING_400: | 1491 | case DP_TRAIN_VOLTAGE_SWING_400: |
1469 | return DP_TRAIN_PRE_EMPHASIS_6; | 1492 | return DP_TRAIN_PRE_EMPHASIS_6; |
@@ -1494,13 +1517,12 @@ intel_get_adjust_train(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_ST | |||
1494 | uint8_t v = 0; | 1517 | uint8_t v = 0; |
1495 | uint8_t p = 0; | 1518 | uint8_t p = 0; |
1496 | int lane; | 1519 | int lane; |
1497 | uint8_t *adjust_request = link_status + (DP_ADJUST_REQUEST_LANE0_1 - DP_LANE0_1_STATUS); | ||
1498 | uint8_t voltage_max; | 1520 | uint8_t voltage_max; |
1499 | uint8_t preemph_max; | 1521 | uint8_t preemph_max; |
1500 | 1522 | ||
1501 | for (lane = 0; lane < intel_dp->lane_count; lane++) { | 1523 | for (lane = 0; lane < intel_dp->lane_count; lane++) { |
1502 | uint8_t this_v = intel_get_adjust_request_voltage(adjust_request, lane); | 1524 | uint8_t this_v = drm_dp_get_adjust_request_voltage(link_status, lane); |
1503 | uint8_t this_p = intel_get_adjust_request_pre_emphasis(adjust_request, lane); | 1525 | uint8_t this_p = drm_dp_get_adjust_request_pre_emphasis(link_status, lane); |
1504 | 1526 | ||
1505 | if (this_v > v) | 1527 | if (this_v > v) |
1506 | v = this_v; | 1528 | v = this_v; |
@@ -1617,52 +1639,38 @@ intel_gen7_edp_signal_levels(uint8_t train_set) | |||
1617 | } | 1639 | } |
1618 | } | 1640 | } |
1619 | 1641 | ||
1620 | static uint8_t | 1642 | /* Gen7.5's (HSW) DP voltage swing and pre-emphasis control */ |
1621 | intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE], | 1643 | static uint32_t |
1622 | int lane) | 1644 | intel_dp_signal_levels_hsw(uint8_t train_set) |
1623 | { | ||
1624 | int s = (lane & 1) * 4; | ||
1625 | uint8_t l = link_status[lane>>1]; | ||
1626 | |||
1627 | return (l >> s) & 0xf; | ||
1628 | } | ||
1629 | |||
1630 | /* Check for clock recovery is done on all channels */ | ||
1631 | static bool | ||
1632 | intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count) | ||
1633 | { | 1645 | { |
1634 | int lane; | 1646 | int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | |
1635 | uint8_t lane_status; | 1647 | DP_TRAIN_PRE_EMPHASIS_MASK); |
1636 | 1648 | switch (signal_levels) { | |
1637 | for (lane = 0; lane < lane_count; lane++) { | 1649 | case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0: |
1638 | lane_status = intel_get_lane_status(link_status, lane); | 1650 | return DDI_BUF_EMP_400MV_0DB_HSW; |
1639 | if ((lane_status & DP_LANE_CR_DONE) == 0) | 1651 | case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_3_5: |
1640 | return false; | 1652 | return DDI_BUF_EMP_400MV_3_5DB_HSW; |
1641 | } | 1653 | case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6: |
1642 | return true; | 1654 | return DDI_BUF_EMP_400MV_6DB_HSW; |
1643 | } | 1655 | case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_9_5: |
1656 | return DDI_BUF_EMP_400MV_9_5DB_HSW; | ||
1644 | 1657 | ||
1645 | /* Check to see if channel eq is done on all channels */ | 1658 | case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_0: |
1646 | #define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\ | 1659 | return DDI_BUF_EMP_600MV_0DB_HSW; |
1647 | DP_LANE_CHANNEL_EQ_DONE|\ | 1660 | case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5: |
1648 | DP_LANE_SYMBOL_LOCKED) | 1661 | return DDI_BUF_EMP_600MV_3_5DB_HSW; |
1649 | static bool | 1662 | case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_6: |
1650 | intel_channel_eq_ok(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE]) | 1663 | return DDI_BUF_EMP_600MV_6DB_HSW; |
1651 | { | ||
1652 | uint8_t lane_align; | ||
1653 | uint8_t lane_status; | ||
1654 | int lane; | ||
1655 | 1664 | ||
1656 | lane_align = intel_dp_link_status(link_status, | 1665 | case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0: |
1657 | DP_LANE_ALIGN_STATUS_UPDATED); | 1666 | return DDI_BUF_EMP_800MV_0DB_HSW; |
1658 | if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0) | 1667 | case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_3_5: |
1659 | return false; | 1668 | return DDI_BUF_EMP_800MV_3_5DB_HSW; |
1660 | for (lane = 0; lane < intel_dp->lane_count; lane++) { | 1669 | default: |
1661 | lane_status = intel_get_lane_status(link_status, lane); | 1670 | DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:" |
1662 | if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS) | 1671 | "0x%x\n", signal_levels); |
1663 | return false; | 1672 | return DDI_BUF_EMP_400MV_0DB_HSW; |
1664 | } | 1673 | } |
1665 | return true; | ||
1666 | } | 1674 | } |
1667 | 1675 | ||
1668 | static bool | 1676 | static bool |
@@ -1670,11 +1678,49 @@ intel_dp_set_link_train(struct intel_dp *intel_dp, | |||
1670 | uint32_t dp_reg_value, | 1678 | uint32_t dp_reg_value, |
1671 | uint8_t dp_train_pat) | 1679 | uint8_t dp_train_pat) |
1672 | { | 1680 | { |
1673 | struct drm_device *dev = intel_dp->base.base.dev; | 1681 | struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp); |
1682 | struct drm_device *dev = intel_dig_port->base.base.dev; | ||
1674 | struct drm_i915_private *dev_priv = dev->dev_private; | 1683 | struct drm_i915_private *dev_priv = dev->dev_private; |
1684 | enum port port = intel_dig_port->port; | ||
1675 | int ret; | 1685 | int ret; |
1686 | uint32_t temp; | ||
1676 | 1687 | ||
1677 | if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) { | 1688 | if (IS_HASWELL(dev)) { |
1689 | temp = I915_READ(DP_TP_CTL(port)); | ||
1690 | |||
1691 | if (dp_train_pat & DP_LINK_SCRAMBLING_DISABLE) | ||
1692 | temp |= DP_TP_CTL_SCRAMBLE_DISABLE; | ||
1693 | else | ||
1694 | temp &= ~DP_TP_CTL_SCRAMBLE_DISABLE; | ||
1695 | |||
1696 | temp &= ~DP_TP_CTL_LINK_TRAIN_MASK; | ||
1697 | switch (dp_train_pat & DP_TRAINING_PATTERN_MASK) { | ||
1698 | case DP_TRAINING_PATTERN_DISABLE: | ||
1699 | temp |= DP_TP_CTL_LINK_TRAIN_IDLE; | ||
1700 | I915_WRITE(DP_TP_CTL(port), temp); | ||
1701 | |||
1702 | if (wait_for((I915_READ(DP_TP_STATUS(port)) & | ||
1703 | DP_TP_STATUS_IDLE_DONE), 1)) | ||
1704 | DRM_ERROR("Timed out waiting for DP idle patterns\n"); | ||
1705 | |||
1706 | temp &= ~DP_TP_CTL_LINK_TRAIN_MASK; | ||
1707 | temp |= DP_TP_CTL_LINK_TRAIN_NORMAL; | ||
1708 | |||
1709 | break; | ||
1710 | case DP_TRAINING_PATTERN_1: | ||
1711 | temp |= DP_TP_CTL_LINK_TRAIN_PAT1; | ||
1712 | break; | ||
1713 | case DP_TRAINING_PATTERN_2: | ||
1714 | temp |= DP_TP_CTL_LINK_TRAIN_PAT2; | ||
1715 | break; | ||
1716 | case DP_TRAINING_PATTERN_3: | ||
1717 | temp |= DP_TP_CTL_LINK_TRAIN_PAT3; | ||
1718 | break; | ||
1719 | } | ||
1720 | I915_WRITE(DP_TP_CTL(port), temp); | ||
1721 | |||
1722 | } else if (HAS_PCH_CPT(dev) && | ||
1723 | (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) { | ||
1678 | dp_reg_value &= ~DP_LINK_TRAIN_MASK_CPT; | 1724 | dp_reg_value &= ~DP_LINK_TRAIN_MASK_CPT; |
1679 | 1725 | ||
1680 | switch (dp_train_pat & DP_TRAINING_PATTERN_MASK) { | 1726 | switch (dp_train_pat & DP_TRAINING_PATTERN_MASK) { |
@@ -1734,16 +1780,20 @@ intel_dp_set_link_train(struct intel_dp *intel_dp, | |||
1734 | } | 1780 | } |
1735 | 1781 | ||
1736 | /* Enable corresponding port and start training pattern 1 */ | 1782 | /* Enable corresponding port and start training pattern 1 */ |
1737 | static void | 1783 | void |
1738 | intel_dp_start_link_train(struct intel_dp *intel_dp) | 1784 | intel_dp_start_link_train(struct intel_dp *intel_dp) |
1739 | { | 1785 | { |
1740 | struct drm_device *dev = intel_dp->base.base.dev; | 1786 | struct drm_encoder *encoder = &dp_to_dig_port(intel_dp)->base.base; |
1787 | struct drm_device *dev = encoder->dev; | ||
1741 | int i; | 1788 | int i; |
1742 | uint8_t voltage; | 1789 | uint8_t voltage; |
1743 | bool clock_recovery = false; | 1790 | bool clock_recovery = false; |
1744 | int voltage_tries, loop_tries; | 1791 | int voltage_tries, loop_tries; |
1745 | uint32_t DP = intel_dp->DP; | 1792 | uint32_t DP = intel_dp->DP; |
1746 | 1793 | ||
1794 | if (IS_HASWELL(dev)) | ||
1795 | intel_ddi_prepare_link_retrain(encoder); | ||
1796 | |||
1747 | /* Write the link configuration data */ | 1797 | /* Write the link configuration data */ |
1748 | intel_dp_aux_native_write(intel_dp, DP_LINK_BW_SET, | 1798 | intel_dp_aux_native_write(intel_dp, DP_LINK_BW_SET, |
1749 | intel_dp->link_configuration, | 1799 | intel_dp->link_configuration, |
@@ -1761,8 +1811,11 @@ intel_dp_start_link_train(struct intel_dp *intel_dp) | |||
1761 | uint8_t link_status[DP_LINK_STATUS_SIZE]; | 1811 | uint8_t link_status[DP_LINK_STATUS_SIZE]; |
1762 | uint32_t signal_levels; | 1812 | uint32_t signal_levels; |
1763 | 1813 | ||
1764 | 1814 | if (IS_HASWELL(dev)) { | |
1765 | if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) { | 1815 | signal_levels = intel_dp_signal_levels_hsw( |
1816 | intel_dp->train_set[0]); | ||
1817 | DP = (DP & ~DDI_BUF_EMP_MASK) | signal_levels; | ||
1818 | } else if (IS_GEN7(dev) && is_cpu_edp(intel_dp) && !IS_VALLEYVIEW(dev)) { | ||
1766 | signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]); | 1819 | signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]); |
1767 | DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels; | 1820 | DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels; |
1768 | } else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) { | 1821 | } else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) { |
@@ -1770,23 +1823,24 @@ intel_dp_start_link_train(struct intel_dp *intel_dp) | |||
1770 | DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels; | 1823 | DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels; |
1771 | } else { | 1824 | } else { |
1772 | signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]); | 1825 | signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]); |
1773 | DRM_DEBUG_KMS("training pattern 1 signal levels %08x\n", signal_levels); | ||
1774 | DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels; | 1826 | DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels; |
1775 | } | 1827 | } |
1828 | DRM_DEBUG_KMS("training pattern 1 signal levels %08x\n", | ||
1829 | signal_levels); | ||
1776 | 1830 | ||
1831 | /* Set training pattern 1 */ | ||
1777 | if (!intel_dp_set_link_train(intel_dp, DP, | 1832 | if (!intel_dp_set_link_train(intel_dp, DP, |
1778 | DP_TRAINING_PATTERN_1 | | 1833 | DP_TRAINING_PATTERN_1 | |
1779 | DP_LINK_SCRAMBLING_DISABLE)) | 1834 | DP_LINK_SCRAMBLING_DISABLE)) |
1780 | break; | 1835 | break; |
1781 | /* Set training pattern 1 */ | ||
1782 | 1836 | ||
1783 | udelay(100); | 1837 | drm_dp_link_train_clock_recovery_delay(intel_dp->dpcd); |
1784 | if (!intel_dp_get_link_status(intel_dp, link_status)) { | 1838 | if (!intel_dp_get_link_status(intel_dp, link_status)) { |
1785 | DRM_ERROR("failed to get link status\n"); | 1839 | DRM_ERROR("failed to get link status\n"); |
1786 | break; | 1840 | break; |
1787 | } | 1841 | } |
1788 | 1842 | ||
1789 | if (intel_clock_recovery_ok(link_status, intel_dp->lane_count)) { | 1843 | if (drm_dp_clock_recovery_ok(link_status, intel_dp->lane_count)) { |
1790 | DRM_DEBUG_KMS("clock recovery OK\n"); | 1844 | DRM_DEBUG_KMS("clock recovery OK\n"); |
1791 | clock_recovery = true; | 1845 | clock_recovery = true; |
1792 | break; | 1846 | break; |
@@ -1825,10 +1879,10 @@ intel_dp_start_link_train(struct intel_dp *intel_dp) | |||
1825 | intel_dp->DP = DP; | 1879 | intel_dp->DP = DP; |
1826 | } | 1880 | } |
1827 | 1881 | ||
1828 | static void | 1882 | void |
1829 | intel_dp_complete_link_train(struct intel_dp *intel_dp) | 1883 | intel_dp_complete_link_train(struct intel_dp *intel_dp) |
1830 | { | 1884 | { |
1831 | struct drm_device *dev = intel_dp->base.base.dev; | 1885 | struct drm_device *dev = intel_dp_to_dev(intel_dp); |
1832 | bool channel_eq = false; | 1886 | bool channel_eq = false; |
1833 | int tries, cr_tries; | 1887 | int tries, cr_tries; |
1834 | uint32_t DP = intel_dp->DP; | 1888 | uint32_t DP = intel_dp->DP; |
@@ -1848,7 +1902,10 @@ intel_dp_complete_link_train(struct intel_dp *intel_dp) | |||
1848 | break; | 1902 | break; |
1849 | } | 1903 | } |
1850 | 1904 | ||
1851 | if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) { | 1905 | if (IS_HASWELL(dev)) { |
1906 | signal_levels = intel_dp_signal_levels_hsw(intel_dp->train_set[0]); | ||
1907 | DP = (DP & ~DDI_BUF_EMP_MASK) | signal_levels; | ||
1908 | } else if (IS_GEN7(dev) && is_cpu_edp(intel_dp) && !IS_VALLEYVIEW(dev)) { | ||
1852 | signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]); | 1909 | signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]); |
1853 | DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels; | 1910 | DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels; |
1854 | } else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) { | 1911 | } else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) { |
@@ -1865,18 +1922,18 @@ intel_dp_complete_link_train(struct intel_dp *intel_dp) | |||
1865 | DP_LINK_SCRAMBLING_DISABLE)) | 1922 | DP_LINK_SCRAMBLING_DISABLE)) |
1866 | break; | 1923 | break; |
1867 | 1924 | ||
1868 | udelay(400); | 1925 | drm_dp_link_train_channel_eq_delay(intel_dp->dpcd); |
1869 | if (!intel_dp_get_link_status(intel_dp, link_status)) | 1926 | if (!intel_dp_get_link_status(intel_dp, link_status)) |
1870 | break; | 1927 | break; |
1871 | 1928 | ||
1872 | /* Make sure clock is still ok */ | 1929 | /* Make sure clock is still ok */ |
1873 | if (!intel_clock_recovery_ok(link_status, intel_dp->lane_count)) { | 1930 | if (!drm_dp_clock_recovery_ok(link_status, intel_dp->lane_count)) { |
1874 | intel_dp_start_link_train(intel_dp); | 1931 | intel_dp_start_link_train(intel_dp); |
1875 | cr_tries++; | 1932 | cr_tries++; |
1876 | continue; | 1933 | continue; |
1877 | } | 1934 | } |
1878 | 1935 | ||
1879 | if (intel_channel_eq_ok(intel_dp, link_status)) { | 1936 | if (drm_dp_channel_eq_ok(link_status, intel_dp->lane_count)) { |
1880 | channel_eq = true; | 1937 | channel_eq = true; |
1881 | break; | 1938 | break; |
1882 | } | 1939 | } |
@@ -1895,16 +1952,38 @@ intel_dp_complete_link_train(struct intel_dp *intel_dp) | |||
1895 | ++tries; | 1952 | ++tries; |
1896 | } | 1953 | } |
1897 | 1954 | ||
1955 | if (channel_eq) | ||
1956 | DRM_DEBUG_KMS("Channel EQ done. DP Training successfull\n"); | ||
1957 | |||
1898 | intel_dp_set_link_train(intel_dp, DP, DP_TRAINING_PATTERN_DISABLE); | 1958 | intel_dp_set_link_train(intel_dp, DP, DP_TRAINING_PATTERN_DISABLE); |
1899 | } | 1959 | } |
1900 | 1960 | ||
1901 | static void | 1961 | static void |
1902 | intel_dp_link_down(struct intel_dp *intel_dp) | 1962 | intel_dp_link_down(struct intel_dp *intel_dp) |
1903 | { | 1963 | { |
1904 | struct drm_device *dev = intel_dp->base.base.dev; | 1964 | struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp); |
1965 | struct drm_device *dev = intel_dig_port->base.base.dev; | ||
1905 | struct drm_i915_private *dev_priv = dev->dev_private; | 1966 | struct drm_i915_private *dev_priv = dev->dev_private; |
1906 | uint32_t DP = intel_dp->DP; | 1967 | uint32_t DP = intel_dp->DP; |
1907 | 1968 | ||
1969 | /* | ||
1970 | * DDI code has a strict mode set sequence and we should try to respect | ||
1971 | * it, otherwise we might hang the machine in many different ways. So we | ||
1972 | * really should be disabling the port only on a complete crtc_disable | ||
1973 | * sequence. This function is just called under two conditions on DDI | ||
1974 | * code: | ||
1975 | * - Link train failed while doing crtc_enable, and on this case we | ||
1976 | * really should respect the mode set sequence and wait for a | ||
1977 | * crtc_disable. | ||
1978 | * - Someone turned the monitor off and intel_dp_check_link_status | ||
1979 | * called us. We don't need to disable the whole port on this case, so | ||
1980 | * when someone turns the monitor on again, | ||
1981 | * intel_ddi_prepare_link_retrain will take care of redoing the link | ||
1982 | * train. | ||
1983 | */ | ||
1984 | if (IS_HASWELL(dev)) | ||
1985 | return; | ||
1986 | |||
1908 | if (WARN_ON((I915_READ(intel_dp->output_reg) & DP_PORT_EN) == 0)) | 1987 | if (WARN_ON((I915_READ(intel_dp->output_reg) & DP_PORT_EN) == 0)) |
1909 | return; | 1988 | return; |
1910 | 1989 | ||
@@ -1923,7 +2002,7 @@ intel_dp_link_down(struct intel_dp *intel_dp) | |||
1923 | 2002 | ||
1924 | if (HAS_PCH_IBX(dev) && | 2003 | if (HAS_PCH_IBX(dev) && |
1925 | I915_READ(intel_dp->output_reg) & DP_PIPEB_SELECT) { | 2004 | I915_READ(intel_dp->output_reg) & DP_PIPEB_SELECT) { |
1926 | struct drm_crtc *crtc = intel_dp->base.base.crtc; | 2005 | struct drm_crtc *crtc = intel_dig_port->base.base.crtc; |
1927 | 2006 | ||
1928 | /* Hardware workaround: leaving our transcoder select | 2007 | /* Hardware workaround: leaving our transcoder select |
1929 | * set to transcoder B while it's off will prevent the | 2008 | * set to transcoder B while it's off will prevent the |
@@ -2024,7 +2103,7 @@ static void | |||
2024 | intel_dp_handle_test_request(struct intel_dp *intel_dp) | 2103 | intel_dp_handle_test_request(struct intel_dp *intel_dp) |
2025 | { | 2104 | { |
2026 | /* NAK by default */ | 2105 | /* NAK by default */ |
2027 | intel_dp_aux_native_write_1(intel_dp, DP_TEST_RESPONSE, DP_TEST_ACK); | 2106 | intel_dp_aux_native_write_1(intel_dp, DP_TEST_RESPONSE, DP_TEST_NAK); |
2028 | } | 2107 | } |
2029 | 2108 | ||
2030 | /* | 2109 | /* |
@@ -2036,16 +2115,17 @@ intel_dp_handle_test_request(struct intel_dp *intel_dp) | |||
2036 | * 4. Check link status on receipt of hot-plug interrupt | 2115 | * 4. Check link status on receipt of hot-plug interrupt |
2037 | */ | 2116 | */ |
2038 | 2117 | ||
2039 | static void | 2118 | void |
2040 | intel_dp_check_link_status(struct intel_dp *intel_dp) | 2119 | intel_dp_check_link_status(struct intel_dp *intel_dp) |
2041 | { | 2120 | { |
2121 | struct intel_encoder *intel_encoder = &dp_to_dig_port(intel_dp)->base; | ||
2042 | u8 sink_irq_vector; | 2122 | u8 sink_irq_vector; |
2043 | u8 link_status[DP_LINK_STATUS_SIZE]; | 2123 | u8 link_status[DP_LINK_STATUS_SIZE]; |
2044 | 2124 | ||
2045 | if (!intel_dp->base.connectors_active) | 2125 | if (!intel_encoder->connectors_active) |
2046 | return; | 2126 | return; |
2047 | 2127 | ||
2048 | if (WARN_ON(!intel_dp->base.base.crtc)) | 2128 | if (WARN_ON(!intel_encoder->base.crtc)) |
2049 | return; | 2129 | return; |
2050 | 2130 | ||
2051 | /* Try to read receiver status if the link appears to be up */ | 2131 | /* Try to read receiver status if the link appears to be up */ |
@@ -2074,9 +2154,9 @@ intel_dp_check_link_status(struct intel_dp *intel_dp) | |||
2074 | DRM_DEBUG_DRIVER("CP or sink specific irq unhandled\n"); | 2154 | DRM_DEBUG_DRIVER("CP or sink specific irq unhandled\n"); |
2075 | } | 2155 | } |
2076 | 2156 | ||
2077 | if (!intel_channel_eq_ok(intel_dp, link_status)) { | 2157 | if (!drm_dp_channel_eq_ok(link_status, intel_dp->lane_count)) { |
2078 | DRM_DEBUG_KMS("%s: channel EQ not ok, retraining\n", | 2158 | DRM_DEBUG_KMS("%s: channel EQ not ok, retraining\n", |
2079 | drm_get_encoder_name(&intel_dp->base.base)); | 2159 | drm_get_encoder_name(&intel_encoder->base)); |
2080 | intel_dp_start_link_train(intel_dp); | 2160 | intel_dp_start_link_train(intel_dp); |
2081 | intel_dp_complete_link_train(intel_dp); | 2161 | intel_dp_complete_link_train(intel_dp); |
2082 | } | 2162 | } |
@@ -2125,11 +2205,12 @@ intel_dp_detect_dpcd(struct intel_dp *intel_dp) | |||
2125 | static enum drm_connector_status | 2205 | static enum drm_connector_status |
2126 | ironlake_dp_detect(struct intel_dp *intel_dp) | 2206 | ironlake_dp_detect(struct intel_dp *intel_dp) |
2127 | { | 2207 | { |
2208 | struct drm_device *dev = intel_dp_to_dev(intel_dp); | ||
2128 | enum drm_connector_status status; | 2209 | enum drm_connector_status status; |
2129 | 2210 | ||
2130 | /* Can't disconnect eDP, but you can close the lid... */ | 2211 | /* Can't disconnect eDP, but you can close the lid... */ |
2131 | if (is_edp(intel_dp)) { | 2212 | if (is_edp(intel_dp)) { |
2132 | status = intel_panel_detect(intel_dp->base.base.dev); | 2213 | status = intel_panel_detect(dev); |
2133 | if (status == connector_status_unknown) | 2214 | if (status == connector_status_unknown) |
2134 | status = connector_status_connected; | 2215 | status = connector_status_connected; |
2135 | return status; | 2216 | return status; |
@@ -2141,7 +2222,7 @@ ironlake_dp_detect(struct intel_dp *intel_dp) | |||
2141 | static enum drm_connector_status | 2222 | static enum drm_connector_status |
2142 | g4x_dp_detect(struct intel_dp *intel_dp) | 2223 | g4x_dp_detect(struct intel_dp *intel_dp) |
2143 | { | 2224 | { |
2144 | struct drm_device *dev = intel_dp->base.base.dev; | 2225 | struct drm_device *dev = intel_dp_to_dev(intel_dp); |
2145 | struct drm_i915_private *dev_priv = dev->dev_private; | 2226 | struct drm_i915_private *dev_priv = dev->dev_private; |
2146 | uint32_t bit; | 2227 | uint32_t bit; |
2147 | 2228 | ||
@@ -2168,44 +2249,45 @@ g4x_dp_detect(struct intel_dp *intel_dp) | |||
2168 | static struct edid * | 2249 | static struct edid * |
2169 | intel_dp_get_edid(struct drm_connector *connector, struct i2c_adapter *adapter) | 2250 | intel_dp_get_edid(struct drm_connector *connector, struct i2c_adapter *adapter) |
2170 | { | 2251 | { |
2171 | struct intel_dp *intel_dp = intel_attached_dp(connector); | 2252 | struct intel_connector *intel_connector = to_intel_connector(connector); |
2172 | struct edid *edid; | ||
2173 | int size; | ||
2174 | 2253 | ||
2175 | if (is_edp(intel_dp)) { | 2254 | /* use cached edid if we have one */ |
2176 | if (!intel_dp->edid) | 2255 | if (intel_connector->edid) { |
2256 | struct edid *edid; | ||
2257 | int size; | ||
2258 | |||
2259 | /* invalid edid */ | ||
2260 | if (IS_ERR(intel_connector->edid)) | ||
2177 | return NULL; | 2261 | return NULL; |
2178 | 2262 | ||
2179 | size = (intel_dp->edid->extensions + 1) * EDID_LENGTH; | 2263 | size = (intel_connector->edid->extensions + 1) * EDID_LENGTH; |
2180 | edid = kmalloc(size, GFP_KERNEL); | 2264 | edid = kmalloc(size, GFP_KERNEL); |
2181 | if (!edid) | 2265 | if (!edid) |
2182 | return NULL; | 2266 | return NULL; |
2183 | 2267 | ||
2184 | memcpy(edid, intel_dp->edid, size); | 2268 | memcpy(edid, intel_connector->edid, size); |
2185 | return edid; | 2269 | return edid; |
2186 | } | 2270 | } |
2187 | 2271 | ||
2188 | edid = drm_get_edid(connector, adapter); | 2272 | return drm_get_edid(connector, adapter); |
2189 | return edid; | ||
2190 | } | 2273 | } |
2191 | 2274 | ||
2192 | static int | 2275 | static int |
2193 | intel_dp_get_edid_modes(struct drm_connector *connector, struct i2c_adapter *adapter) | 2276 | intel_dp_get_edid_modes(struct drm_connector *connector, struct i2c_adapter *adapter) |
2194 | { | 2277 | { |
2195 | struct intel_dp *intel_dp = intel_attached_dp(connector); | 2278 | struct intel_connector *intel_connector = to_intel_connector(connector); |
2196 | int ret; | ||
2197 | 2279 | ||
2198 | if (is_edp(intel_dp)) { | 2280 | /* use cached edid if we have one */ |
2199 | drm_mode_connector_update_edid_property(connector, | 2281 | if (intel_connector->edid) { |
2200 | intel_dp->edid); | 2282 | /* invalid edid */ |
2201 | ret = drm_add_edid_modes(connector, intel_dp->edid); | 2283 | if (IS_ERR(intel_connector->edid)) |
2202 | drm_edid_to_eld(connector, | 2284 | return 0; |
2203 | intel_dp->edid); | 2285 | |
2204 | return intel_dp->edid_mode_count; | 2286 | return intel_connector_update_modes(connector, |
2287 | intel_connector->edid); | ||
2205 | } | 2288 | } |
2206 | 2289 | ||
2207 | ret = intel_ddc_get_modes(connector, adapter); | 2290 | return intel_ddc_get_modes(connector, adapter); |
2208 | return ret; | ||
2209 | } | 2291 | } |
2210 | 2292 | ||
2211 | 2293 | ||
@@ -2219,9 +2301,12 @@ static enum drm_connector_status | |||
2219 | intel_dp_detect(struct drm_connector *connector, bool force) | 2301 | intel_dp_detect(struct drm_connector *connector, bool force) |
2220 | { | 2302 | { |
2221 | struct intel_dp *intel_dp = intel_attached_dp(connector); | 2303 | struct intel_dp *intel_dp = intel_attached_dp(connector); |
2222 | struct drm_device *dev = intel_dp->base.base.dev; | 2304 | struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp); |
2305 | struct intel_encoder *intel_encoder = &intel_dig_port->base; | ||
2306 | struct drm_device *dev = connector->dev; | ||
2223 | enum drm_connector_status status; | 2307 | enum drm_connector_status status; |
2224 | struct edid *edid = NULL; | 2308 | struct edid *edid = NULL; |
2309 | char dpcd_hex_dump[sizeof(intel_dp->dpcd) * 3]; | ||
2225 | 2310 | ||
2226 | intel_dp->has_audio = false; | 2311 | intel_dp->has_audio = false; |
2227 | 2312 | ||
@@ -2230,10 +2315,9 @@ intel_dp_detect(struct drm_connector *connector, bool force) | |||
2230 | else | 2315 | else |
2231 | status = g4x_dp_detect(intel_dp); | 2316 | status = g4x_dp_detect(intel_dp); |
2232 | 2317 | ||
2233 | DRM_DEBUG_KMS("DPCD: %02hx%02hx%02hx%02hx%02hx%02hx%02hx%02hx\n", | 2318 | hex_dump_to_buffer(intel_dp->dpcd, sizeof(intel_dp->dpcd), |
2234 | intel_dp->dpcd[0], intel_dp->dpcd[1], intel_dp->dpcd[2], | 2319 | 32, 1, dpcd_hex_dump, sizeof(dpcd_hex_dump), false); |
2235 | intel_dp->dpcd[3], intel_dp->dpcd[4], intel_dp->dpcd[5], | 2320 | DRM_DEBUG_KMS("DPCD: %s\n", dpcd_hex_dump); |
2236 | intel_dp->dpcd[6], intel_dp->dpcd[7]); | ||
2237 | 2321 | ||
2238 | if (status != connector_status_connected) | 2322 | if (status != connector_status_connected) |
2239 | return status; | 2323 | return status; |
@@ -2250,49 +2334,31 @@ intel_dp_detect(struct drm_connector *connector, bool force) | |||
2250 | } | 2334 | } |
2251 | } | 2335 | } |
2252 | 2336 | ||
2337 | if (intel_encoder->type != INTEL_OUTPUT_EDP) | ||
2338 | intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT; | ||
2253 | return connector_status_connected; | 2339 | return connector_status_connected; |
2254 | } | 2340 | } |
2255 | 2341 | ||
2256 | static int intel_dp_get_modes(struct drm_connector *connector) | 2342 | static int intel_dp_get_modes(struct drm_connector *connector) |
2257 | { | 2343 | { |
2258 | struct intel_dp *intel_dp = intel_attached_dp(connector); | 2344 | struct intel_dp *intel_dp = intel_attached_dp(connector); |
2259 | struct drm_device *dev = intel_dp->base.base.dev; | 2345 | struct intel_connector *intel_connector = to_intel_connector(connector); |
2260 | struct drm_i915_private *dev_priv = dev->dev_private; | 2346 | struct drm_device *dev = connector->dev; |
2261 | int ret; | 2347 | int ret; |
2262 | 2348 | ||
2263 | /* We should parse the EDID data and find out if it has an audio sink | 2349 | /* We should parse the EDID data and find out if it has an audio sink |
2264 | */ | 2350 | */ |
2265 | 2351 | ||
2266 | ret = intel_dp_get_edid_modes(connector, &intel_dp->adapter); | 2352 | ret = intel_dp_get_edid_modes(connector, &intel_dp->adapter); |
2267 | if (ret) { | 2353 | if (ret) |
2268 | if (is_edp(intel_dp) && !intel_dp->panel_fixed_mode) { | ||
2269 | struct drm_display_mode *newmode; | ||
2270 | list_for_each_entry(newmode, &connector->probed_modes, | ||
2271 | head) { | ||
2272 | if ((newmode->type & DRM_MODE_TYPE_PREFERRED)) { | ||
2273 | intel_dp->panel_fixed_mode = | ||
2274 | drm_mode_duplicate(dev, newmode); | ||
2275 | break; | ||
2276 | } | ||
2277 | } | ||
2278 | } | ||
2279 | return ret; | 2354 | return ret; |
2280 | } | ||
2281 | 2355 | ||
2282 | /* if eDP has no EDID, try to use fixed panel mode from VBT */ | 2356 | /* if eDP has no EDID, fall back to fixed mode */ |
2283 | if (is_edp(intel_dp)) { | 2357 | if (is_edp(intel_dp) && intel_connector->panel.fixed_mode) { |
2284 | /* initialize panel mode from VBT if available for eDP */ | 2358 | struct drm_display_mode *mode; |
2285 | if (intel_dp->panel_fixed_mode == NULL && dev_priv->lfp_lvds_vbt_mode != NULL) { | 2359 | mode = drm_mode_duplicate(dev, |
2286 | intel_dp->panel_fixed_mode = | 2360 | intel_connector->panel.fixed_mode); |
2287 | drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode); | 2361 | if (mode) { |
2288 | if (intel_dp->panel_fixed_mode) { | ||
2289 | intel_dp->panel_fixed_mode->type |= | ||
2290 | DRM_MODE_TYPE_PREFERRED; | ||
2291 | } | ||
2292 | } | ||
2293 | if (intel_dp->panel_fixed_mode) { | ||
2294 | struct drm_display_mode *mode; | ||
2295 | mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode); | ||
2296 | drm_mode_probed_add(connector, mode); | 2362 | drm_mode_probed_add(connector, mode); |
2297 | return 1; | 2363 | return 1; |
2298 | } | 2364 | } |
@@ -2322,7 +2388,9 @@ intel_dp_set_property(struct drm_connector *connector, | |||
2322 | uint64_t val) | 2388 | uint64_t val) |
2323 | { | 2389 | { |
2324 | struct drm_i915_private *dev_priv = connector->dev->dev_private; | 2390 | struct drm_i915_private *dev_priv = connector->dev->dev_private; |
2325 | struct intel_dp *intel_dp = intel_attached_dp(connector); | 2391 | struct intel_connector *intel_connector = to_intel_connector(connector); |
2392 | struct intel_encoder *intel_encoder = intel_attached_encoder(connector); | ||
2393 | struct intel_dp *intel_dp = enc_to_intel_dp(&intel_encoder->base); | ||
2326 | int ret; | 2394 | int ret; |
2327 | 2395 | ||
2328 | ret = drm_connector_property_set_value(connector, property, val); | 2396 | ret = drm_connector_property_set_value(connector, property, val); |
@@ -2358,11 +2426,27 @@ intel_dp_set_property(struct drm_connector *connector, | |||
2358 | goto done; | 2426 | goto done; |
2359 | } | 2427 | } |
2360 | 2428 | ||
2429 | if (is_edp(intel_dp) && | ||
2430 | property == connector->dev->mode_config.scaling_mode_property) { | ||
2431 | if (val == DRM_MODE_SCALE_NONE) { | ||
2432 | DRM_DEBUG_KMS("no scaling not supported\n"); | ||
2433 | return -EINVAL; | ||
2434 | } | ||
2435 | |||
2436 | if (intel_connector->panel.fitting_mode == val) { | ||
2437 | /* the eDP scaling property is not changed */ | ||
2438 | return 0; | ||
2439 | } | ||
2440 | intel_connector->panel.fitting_mode = val; | ||
2441 | |||
2442 | goto done; | ||
2443 | } | ||
2444 | |||
2361 | return -EINVAL; | 2445 | return -EINVAL; |
2362 | 2446 | ||
2363 | done: | 2447 | done: |
2364 | if (intel_dp->base.base.crtc) { | 2448 | if (intel_encoder->base.crtc) { |
2365 | struct drm_crtc *crtc = intel_dp->base.base.crtc; | 2449 | struct drm_crtc *crtc = intel_encoder->base.crtc; |
2366 | intel_set_mode(crtc, &crtc->mode, | 2450 | intel_set_mode(crtc, &crtc->mode, |
2367 | crtc->x, crtc->y, crtc->fb); | 2451 | crtc->x, crtc->y, crtc->fb); |
2368 | } | 2452 | } |
@@ -2375,27 +2459,33 @@ intel_dp_destroy(struct drm_connector *connector) | |||
2375 | { | 2459 | { |
2376 | struct drm_device *dev = connector->dev; | 2460 | struct drm_device *dev = connector->dev; |
2377 | struct intel_dp *intel_dp = intel_attached_dp(connector); | 2461 | struct intel_dp *intel_dp = intel_attached_dp(connector); |
2462 | struct intel_connector *intel_connector = to_intel_connector(connector); | ||
2378 | 2463 | ||
2379 | if (is_edp(intel_dp)) | 2464 | if (!IS_ERR_OR_NULL(intel_connector->edid)) |
2465 | kfree(intel_connector->edid); | ||
2466 | |||
2467 | if (is_edp(intel_dp)) { | ||
2380 | intel_panel_destroy_backlight(dev); | 2468 | intel_panel_destroy_backlight(dev); |
2469 | intel_panel_fini(&intel_connector->panel); | ||
2470 | } | ||
2381 | 2471 | ||
2382 | drm_sysfs_connector_remove(connector); | 2472 | drm_sysfs_connector_remove(connector); |
2383 | drm_connector_cleanup(connector); | 2473 | drm_connector_cleanup(connector); |
2384 | kfree(connector); | 2474 | kfree(connector); |
2385 | } | 2475 | } |
2386 | 2476 | ||
2387 | static void intel_dp_encoder_destroy(struct drm_encoder *encoder) | 2477 | void intel_dp_encoder_destroy(struct drm_encoder *encoder) |
2388 | { | 2478 | { |
2389 | struct intel_dp *intel_dp = enc_to_intel_dp(encoder); | 2479 | struct intel_digital_port *intel_dig_port = enc_to_dig_port(encoder); |
2480 | struct intel_dp *intel_dp = &intel_dig_port->dp; | ||
2390 | 2481 | ||
2391 | i2c_del_adapter(&intel_dp->adapter); | 2482 | i2c_del_adapter(&intel_dp->adapter); |
2392 | drm_encoder_cleanup(encoder); | 2483 | drm_encoder_cleanup(encoder); |
2393 | if (is_edp(intel_dp)) { | 2484 | if (is_edp(intel_dp)) { |
2394 | kfree(intel_dp->edid); | ||
2395 | cancel_delayed_work_sync(&intel_dp->panel_vdd_work); | 2485 | cancel_delayed_work_sync(&intel_dp->panel_vdd_work); |
2396 | ironlake_panel_vdd_off_sync(intel_dp); | 2486 | ironlake_panel_vdd_off_sync(intel_dp); |
2397 | } | 2487 | } |
2398 | kfree(intel_dp); | 2488 | kfree(intel_dig_port); |
2399 | } | 2489 | } |
2400 | 2490 | ||
2401 | static const struct drm_encoder_helper_funcs intel_dp_helper_funcs = { | 2491 | static const struct drm_encoder_helper_funcs intel_dp_helper_funcs = { |
@@ -2425,7 +2515,7 @@ static const struct drm_encoder_funcs intel_dp_enc_funcs = { | |||
2425 | static void | 2515 | static void |
2426 | intel_dp_hot_plug(struct intel_encoder *intel_encoder) | 2516 | intel_dp_hot_plug(struct intel_encoder *intel_encoder) |
2427 | { | 2517 | { |
2428 | struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base); | 2518 | struct intel_dp *intel_dp = enc_to_intel_dp(&intel_encoder->base); |
2429 | 2519 | ||
2430 | intel_dp_check_link_status(intel_dp); | 2520 | intel_dp_check_link_status(intel_dp); |
2431 | } | 2521 | } |
@@ -2435,13 +2525,14 @@ int | |||
2435 | intel_trans_dp_port_sel(struct drm_crtc *crtc) | 2525 | intel_trans_dp_port_sel(struct drm_crtc *crtc) |
2436 | { | 2526 | { |
2437 | struct drm_device *dev = crtc->dev; | 2527 | struct drm_device *dev = crtc->dev; |
2438 | struct intel_encoder *encoder; | 2528 | struct intel_encoder *intel_encoder; |
2529 | struct intel_dp *intel_dp; | ||
2439 | 2530 | ||
2440 | for_each_encoder_on_crtc(dev, crtc, encoder) { | 2531 | for_each_encoder_on_crtc(dev, crtc, intel_encoder) { |
2441 | struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base); | 2532 | intel_dp = enc_to_intel_dp(&intel_encoder->base); |
2442 | 2533 | ||
2443 | if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT || | 2534 | if (intel_encoder->type == INTEL_OUTPUT_DISPLAYPORT || |
2444 | intel_dp->base.type == INTEL_OUTPUT_EDP) | 2535 | intel_encoder->type == INTEL_OUTPUT_EDP) |
2445 | return intel_dp->output_reg; | 2536 | return intel_dp->output_reg; |
2446 | } | 2537 | } |
2447 | 2538 | ||
@@ -2471,78 +2562,191 @@ bool intel_dpd_is_edp(struct drm_device *dev) | |||
2471 | static void | 2562 | static void |
2472 | intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *connector) | 2563 | intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *connector) |
2473 | { | 2564 | { |
2565 | struct intel_connector *intel_connector = to_intel_connector(connector); | ||
2566 | |||
2474 | intel_attach_force_audio_property(connector); | 2567 | intel_attach_force_audio_property(connector); |
2475 | intel_attach_broadcast_rgb_property(connector); | 2568 | intel_attach_broadcast_rgb_property(connector); |
2569 | |||
2570 | if (is_edp(intel_dp)) { | ||
2571 | drm_mode_create_scaling_mode_property(connector->dev); | ||
2572 | drm_connector_attach_property( | ||
2573 | connector, | ||
2574 | connector->dev->mode_config.scaling_mode_property, | ||
2575 | DRM_MODE_SCALE_ASPECT); | ||
2576 | intel_connector->panel.fitting_mode = DRM_MODE_SCALE_ASPECT; | ||
2577 | } | ||
2578 | } | ||
2579 | |||
2580 | static void | ||
2581 | intel_dp_init_panel_power_sequencer(struct drm_device *dev, | ||
2582 | struct intel_dp *intel_dp) | ||
2583 | { | ||
2584 | struct drm_i915_private *dev_priv = dev->dev_private; | ||
2585 | struct edp_power_seq cur, vbt, spec, final; | ||
2586 | u32 pp_on, pp_off, pp_div, pp; | ||
2587 | |||
2588 | /* Workaround: Need to write PP_CONTROL with the unlock key as | ||
2589 | * the very first thing. */ | ||
2590 | pp = ironlake_get_pp_control(dev_priv); | ||
2591 | I915_WRITE(PCH_PP_CONTROL, pp); | ||
2592 | |||
2593 | pp_on = I915_READ(PCH_PP_ON_DELAYS); | ||
2594 | pp_off = I915_READ(PCH_PP_OFF_DELAYS); | ||
2595 | pp_div = I915_READ(PCH_PP_DIVISOR); | ||
2596 | |||
2597 | /* Pull timing values out of registers */ | ||
2598 | cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >> | ||
2599 | PANEL_POWER_UP_DELAY_SHIFT; | ||
2600 | |||
2601 | cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >> | ||
2602 | PANEL_LIGHT_ON_DELAY_SHIFT; | ||
2603 | |||
2604 | cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >> | ||
2605 | PANEL_LIGHT_OFF_DELAY_SHIFT; | ||
2606 | |||
2607 | cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >> | ||
2608 | PANEL_POWER_DOWN_DELAY_SHIFT; | ||
2609 | |||
2610 | cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >> | ||
2611 | PANEL_POWER_CYCLE_DELAY_SHIFT) * 1000; | ||
2612 | |||
2613 | DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n", | ||
2614 | cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12); | ||
2615 | |||
2616 | vbt = dev_priv->edp.pps; | ||
2617 | |||
2618 | /* Upper limits from eDP 1.3 spec. Note that we use the clunky units of | ||
2619 | * our hw here, which are all in 100usec. */ | ||
2620 | spec.t1_t3 = 210 * 10; | ||
2621 | spec.t8 = 50 * 10; /* no limit for t8, use t7 instead */ | ||
2622 | spec.t9 = 50 * 10; /* no limit for t9, make it symmetric with t8 */ | ||
2623 | spec.t10 = 500 * 10; | ||
2624 | /* This one is special and actually in units of 100ms, but zero | ||
2625 | * based in the hw (so we need to add 100 ms). But the sw vbt | ||
2626 | * table multiplies it with 1000 to make it in units of 100usec, | ||
2627 | * too. */ | ||
2628 | spec.t11_t12 = (510 + 100) * 10; | ||
2629 | |||
2630 | DRM_DEBUG_KMS("vbt t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n", | ||
2631 | vbt.t1_t3, vbt.t8, vbt.t9, vbt.t10, vbt.t11_t12); | ||
2632 | |||
2633 | /* Use the max of the register settings and vbt. If both are | ||
2634 | * unset, fall back to the spec limits. */ | ||
2635 | #define assign_final(field) final.field = (max(cur.field, vbt.field) == 0 ? \ | ||
2636 | spec.field : \ | ||
2637 | max(cur.field, vbt.field)) | ||
2638 | assign_final(t1_t3); | ||
2639 | assign_final(t8); | ||
2640 | assign_final(t9); | ||
2641 | assign_final(t10); | ||
2642 | assign_final(t11_t12); | ||
2643 | #undef assign_final | ||
2644 | |||
2645 | #define get_delay(field) (DIV_ROUND_UP(final.field, 10)) | ||
2646 | intel_dp->panel_power_up_delay = get_delay(t1_t3); | ||
2647 | intel_dp->backlight_on_delay = get_delay(t8); | ||
2648 | intel_dp->backlight_off_delay = get_delay(t9); | ||
2649 | intel_dp->panel_power_down_delay = get_delay(t10); | ||
2650 | intel_dp->panel_power_cycle_delay = get_delay(t11_t12); | ||
2651 | #undef get_delay | ||
2652 | |||
2653 | /* And finally store the new values in the power sequencer. */ | ||
2654 | pp_on = (final.t1_t3 << PANEL_POWER_UP_DELAY_SHIFT) | | ||
2655 | (final.t8 << PANEL_LIGHT_ON_DELAY_SHIFT); | ||
2656 | pp_off = (final.t9 << PANEL_LIGHT_OFF_DELAY_SHIFT) | | ||
2657 | (final.t10 << PANEL_POWER_DOWN_DELAY_SHIFT); | ||
2658 | /* Compute the divisor for the pp clock, simply match the Bspec | ||
2659 | * formula. */ | ||
2660 | pp_div = ((100 * intel_pch_rawclk(dev))/2 - 1) | ||
2661 | << PP_REFERENCE_DIVIDER_SHIFT; | ||
2662 | pp_div |= (DIV_ROUND_UP(final.t11_t12, 1000) | ||
2663 | << PANEL_POWER_CYCLE_DELAY_SHIFT); | ||
2664 | |||
2665 | /* Haswell doesn't have any port selection bits for the panel | ||
2666 | * power sequencer any more. */ | ||
2667 | if (HAS_PCH_IBX(dev) || HAS_PCH_CPT(dev)) { | ||
2668 | if (is_cpu_edp(intel_dp)) | ||
2669 | pp_on |= PANEL_POWER_PORT_DP_A; | ||
2670 | else | ||
2671 | pp_on |= PANEL_POWER_PORT_DP_D; | ||
2672 | } | ||
2673 | |||
2674 | I915_WRITE(PCH_PP_ON_DELAYS, pp_on); | ||
2675 | I915_WRITE(PCH_PP_OFF_DELAYS, pp_off); | ||
2676 | I915_WRITE(PCH_PP_DIVISOR, pp_div); | ||
2677 | |||
2678 | |||
2679 | DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n", | ||
2680 | intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay, | ||
2681 | intel_dp->panel_power_cycle_delay); | ||
2682 | |||
2683 | DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n", | ||
2684 | intel_dp->backlight_on_delay, intel_dp->backlight_off_delay); | ||
2685 | |||
2686 | DRM_DEBUG_KMS("panel power sequencer register settings: PP_ON %#x, PP_OFF %#x, PP_DIV %#x\n", | ||
2687 | I915_READ(PCH_PP_ON_DELAYS), | ||
2688 | I915_READ(PCH_PP_OFF_DELAYS), | ||
2689 | I915_READ(PCH_PP_DIVISOR)); | ||
2476 | } | 2690 | } |
2477 | 2691 | ||
2478 | void | 2692 | void |
2479 | intel_dp_init(struct drm_device *dev, int output_reg, enum port port) | 2693 | intel_dp_init_connector(struct intel_digital_port *intel_dig_port, |
2694 | struct intel_connector *intel_connector) | ||
2480 | { | 2695 | { |
2696 | struct drm_connector *connector = &intel_connector->base; | ||
2697 | struct intel_dp *intel_dp = &intel_dig_port->dp; | ||
2698 | struct intel_encoder *intel_encoder = &intel_dig_port->base; | ||
2699 | struct drm_device *dev = intel_encoder->base.dev; | ||
2481 | struct drm_i915_private *dev_priv = dev->dev_private; | 2700 | struct drm_i915_private *dev_priv = dev->dev_private; |
2482 | struct drm_connector *connector; | 2701 | struct drm_display_mode *fixed_mode = NULL; |
2483 | struct intel_dp *intel_dp; | 2702 | enum port port = intel_dig_port->port; |
2484 | struct intel_encoder *intel_encoder; | ||
2485 | struct intel_connector *intel_connector; | ||
2486 | const char *name = NULL; | 2703 | const char *name = NULL; |
2487 | int type; | 2704 | int type; |
2488 | 2705 | ||
2489 | intel_dp = kzalloc(sizeof(struct intel_dp), GFP_KERNEL); | ||
2490 | if (!intel_dp) | ||
2491 | return; | ||
2492 | |||
2493 | intel_dp->output_reg = output_reg; | ||
2494 | intel_dp->port = port; | ||
2495 | /* Preserve the current hw state. */ | 2706 | /* Preserve the current hw state. */ |
2496 | intel_dp->DP = I915_READ(intel_dp->output_reg); | 2707 | intel_dp->DP = I915_READ(intel_dp->output_reg); |
2708 | intel_dp->attached_connector = intel_connector; | ||
2497 | 2709 | ||
2498 | intel_connector = kzalloc(sizeof(struct intel_connector), GFP_KERNEL); | 2710 | if (HAS_PCH_SPLIT(dev) && port == PORT_D) |
2499 | if (!intel_connector) { | ||
2500 | kfree(intel_dp); | ||
2501 | return; | ||
2502 | } | ||
2503 | intel_encoder = &intel_dp->base; | ||
2504 | |||
2505 | if (HAS_PCH_SPLIT(dev) && output_reg == PCH_DP_D) | ||
2506 | if (intel_dpd_is_edp(dev)) | 2711 | if (intel_dpd_is_edp(dev)) |
2507 | intel_dp->is_pch_edp = true; | 2712 | intel_dp->is_pch_edp = true; |
2508 | 2713 | ||
2509 | if (output_reg == DP_A || is_pch_edp(intel_dp)) { | 2714 | /* |
2715 | * FIXME : We need to initialize built-in panels before external panels. | ||
2716 | * For X0, DP_C is fixed as eDP. Revisit this as part of VLV eDP cleanup | ||
2717 | */ | ||
2718 | if (IS_VALLEYVIEW(dev) && port == PORT_C) { | ||
2719 | type = DRM_MODE_CONNECTOR_eDP; | ||
2720 | intel_encoder->type = INTEL_OUTPUT_EDP; | ||
2721 | } else if (port == PORT_A || is_pch_edp(intel_dp)) { | ||
2510 | type = DRM_MODE_CONNECTOR_eDP; | 2722 | type = DRM_MODE_CONNECTOR_eDP; |
2511 | intel_encoder->type = INTEL_OUTPUT_EDP; | 2723 | intel_encoder->type = INTEL_OUTPUT_EDP; |
2512 | } else { | 2724 | } else { |
2725 | /* The intel_encoder->type value may be INTEL_OUTPUT_UNKNOWN for | ||
2726 | * DDI or INTEL_OUTPUT_DISPLAYPORT for the older gens, so don't | ||
2727 | * rewrite it. | ||
2728 | */ | ||
2513 | type = DRM_MODE_CONNECTOR_DisplayPort; | 2729 | type = DRM_MODE_CONNECTOR_DisplayPort; |
2514 | intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT; | ||
2515 | } | 2730 | } |
2516 | 2731 | ||
2517 | connector = &intel_connector->base; | ||
2518 | drm_connector_init(dev, connector, &intel_dp_connector_funcs, type); | 2732 | drm_connector_init(dev, connector, &intel_dp_connector_funcs, type); |
2519 | drm_connector_helper_add(connector, &intel_dp_connector_helper_funcs); | 2733 | drm_connector_helper_add(connector, &intel_dp_connector_helper_funcs); |
2520 | 2734 | ||
2521 | connector->polled = DRM_CONNECTOR_POLL_HPD; | 2735 | connector->polled = DRM_CONNECTOR_POLL_HPD; |
2522 | |||
2523 | intel_encoder->cloneable = false; | ||
2524 | |||
2525 | INIT_DELAYED_WORK(&intel_dp->panel_vdd_work, | ||
2526 | ironlake_panel_vdd_work); | ||
2527 | |||
2528 | intel_encoder->crtc_mask = (1 << 0) | (1 << 1) | (1 << 2); | ||
2529 | |||
2530 | connector->interlace_allowed = true; | 2736 | connector->interlace_allowed = true; |
2531 | connector->doublescan_allowed = 0; | 2737 | connector->doublescan_allowed = 0; |
2532 | 2738 | ||
2533 | drm_encoder_init(dev, &intel_encoder->base, &intel_dp_enc_funcs, | 2739 | INIT_DELAYED_WORK(&intel_dp->panel_vdd_work, |
2534 | DRM_MODE_ENCODER_TMDS); | 2740 | ironlake_panel_vdd_work); |
2535 | drm_encoder_helper_add(&intel_encoder->base, &intel_dp_helper_funcs); | ||
2536 | 2741 | ||
2537 | intel_connector_attach_encoder(intel_connector, intel_encoder); | 2742 | intel_connector_attach_encoder(intel_connector, intel_encoder); |
2538 | drm_sysfs_connector_add(connector); | 2743 | drm_sysfs_connector_add(connector); |
2539 | 2744 | ||
2540 | intel_encoder->enable = intel_enable_dp; | 2745 | if (IS_HASWELL(dev)) |
2541 | intel_encoder->pre_enable = intel_pre_enable_dp; | 2746 | intel_connector->get_hw_state = intel_ddi_connector_get_hw_state; |
2542 | intel_encoder->disable = intel_disable_dp; | 2747 | else |
2543 | intel_encoder->post_disable = intel_post_disable_dp; | 2748 | intel_connector->get_hw_state = intel_connector_get_hw_state; |
2544 | intel_encoder->get_hw_state = intel_dp_get_hw_state; | 2749 | |
2545 | intel_connector->get_hw_state = intel_connector_get_hw_state; | ||
2546 | 2750 | ||
2547 | /* Set up the DDC bus. */ | 2751 | /* Set up the DDC bus. */ |
2548 | switch (port) { | 2752 | switch (port) { |
@@ -2566,66 +2770,15 @@ intel_dp_init(struct drm_device *dev, int output_reg, enum port port) | |||
2566 | break; | 2770 | break; |
2567 | } | 2771 | } |
2568 | 2772 | ||
2569 | /* Cache some DPCD data in the eDP case */ | 2773 | if (is_edp(intel_dp)) |
2570 | if (is_edp(intel_dp)) { | 2774 | intel_dp_init_panel_power_sequencer(dev, intel_dp); |
2571 | struct edp_power_seq cur, vbt; | ||
2572 | u32 pp_on, pp_off, pp_div; | ||
2573 | |||
2574 | pp_on = I915_READ(PCH_PP_ON_DELAYS); | ||
2575 | pp_off = I915_READ(PCH_PP_OFF_DELAYS); | ||
2576 | pp_div = I915_READ(PCH_PP_DIVISOR); | ||
2577 | |||
2578 | if (!pp_on || !pp_off || !pp_div) { | ||
2579 | DRM_INFO("bad panel power sequencing delays, disabling panel\n"); | ||
2580 | intel_dp_encoder_destroy(&intel_dp->base.base); | ||
2581 | intel_dp_destroy(&intel_connector->base); | ||
2582 | return; | ||
2583 | } | ||
2584 | |||
2585 | /* Pull timing values out of registers */ | ||
2586 | cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >> | ||
2587 | PANEL_POWER_UP_DELAY_SHIFT; | ||
2588 | |||
2589 | cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >> | ||
2590 | PANEL_LIGHT_ON_DELAY_SHIFT; | ||
2591 | |||
2592 | cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >> | ||
2593 | PANEL_LIGHT_OFF_DELAY_SHIFT; | ||
2594 | |||
2595 | cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >> | ||
2596 | PANEL_POWER_DOWN_DELAY_SHIFT; | ||
2597 | |||
2598 | cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >> | ||
2599 | PANEL_POWER_CYCLE_DELAY_SHIFT) * 1000; | ||
2600 | |||
2601 | DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n", | ||
2602 | cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12); | ||
2603 | |||
2604 | vbt = dev_priv->edp.pps; | ||
2605 | |||
2606 | DRM_DEBUG_KMS("vbt t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n", | ||
2607 | vbt.t1_t3, vbt.t8, vbt.t9, vbt.t10, vbt.t11_t12); | ||
2608 | |||
2609 | #define get_delay(field) ((max(cur.field, vbt.field) + 9) / 10) | ||
2610 | |||
2611 | intel_dp->panel_power_up_delay = get_delay(t1_t3); | ||
2612 | intel_dp->backlight_on_delay = get_delay(t8); | ||
2613 | intel_dp->backlight_off_delay = get_delay(t9); | ||
2614 | intel_dp->panel_power_down_delay = get_delay(t10); | ||
2615 | intel_dp->panel_power_cycle_delay = get_delay(t11_t12); | ||
2616 | |||
2617 | DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n", | ||
2618 | intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay, | ||
2619 | intel_dp->panel_power_cycle_delay); | ||
2620 | |||
2621 | DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n", | ||
2622 | intel_dp->backlight_on_delay, intel_dp->backlight_off_delay); | ||
2623 | } | ||
2624 | 2775 | ||
2625 | intel_dp_i2c_init(intel_dp, intel_connector, name); | 2776 | intel_dp_i2c_init(intel_dp, intel_connector, name); |
2626 | 2777 | ||
2778 | /* Cache DPCD and EDID for edp. */ | ||
2627 | if (is_edp(intel_dp)) { | 2779 | if (is_edp(intel_dp)) { |
2628 | bool ret; | 2780 | bool ret; |
2781 | struct drm_display_mode *scan; | ||
2629 | struct edid *edid; | 2782 | struct edid *edid; |
2630 | 2783 | ||
2631 | ironlake_edp_panel_vdd_on(intel_dp); | 2784 | ironlake_edp_panel_vdd_on(intel_dp); |
@@ -2640,29 +2793,47 @@ intel_dp_init(struct drm_device *dev, int output_reg, enum port port) | |||
2640 | } else { | 2793 | } else { |
2641 | /* if this fails, presume the device is a ghost */ | 2794 | /* if this fails, presume the device is a ghost */ |
2642 | DRM_INFO("failed to retrieve link info, disabling eDP\n"); | 2795 | DRM_INFO("failed to retrieve link info, disabling eDP\n"); |
2643 | intel_dp_encoder_destroy(&intel_dp->base.base); | 2796 | intel_dp_encoder_destroy(&intel_encoder->base); |
2644 | intel_dp_destroy(&intel_connector->base); | 2797 | intel_dp_destroy(connector); |
2645 | return; | 2798 | return; |
2646 | } | 2799 | } |
2647 | 2800 | ||
2648 | ironlake_edp_panel_vdd_on(intel_dp); | 2801 | ironlake_edp_panel_vdd_on(intel_dp); |
2649 | edid = drm_get_edid(connector, &intel_dp->adapter); | 2802 | edid = drm_get_edid(connector, &intel_dp->adapter); |
2650 | if (edid) { | 2803 | if (edid) { |
2651 | drm_mode_connector_update_edid_property(connector, | 2804 | if (drm_add_edid_modes(connector, edid)) { |
2652 | edid); | 2805 | drm_mode_connector_update_edid_property(connector, edid); |
2653 | intel_dp->edid_mode_count = | 2806 | drm_edid_to_eld(connector, edid); |
2654 | drm_add_edid_modes(connector, edid); | 2807 | } else { |
2655 | drm_edid_to_eld(connector, edid); | 2808 | kfree(edid); |
2656 | intel_dp->edid = edid; | 2809 | edid = ERR_PTR(-EINVAL); |
2810 | } | ||
2811 | } else { | ||
2812 | edid = ERR_PTR(-ENOENT); | ||
2813 | } | ||
2814 | intel_connector->edid = edid; | ||
2815 | |||
2816 | /* prefer fixed mode from EDID if available */ | ||
2817 | list_for_each_entry(scan, &connector->probed_modes, head) { | ||
2818 | if ((scan->type & DRM_MODE_TYPE_PREFERRED)) { | ||
2819 | fixed_mode = drm_mode_duplicate(dev, scan); | ||
2820 | break; | ||
2821 | } | ||
2657 | } | 2822 | } |
2823 | |||
2824 | /* fallback to VBT if available for eDP */ | ||
2825 | if (!fixed_mode && dev_priv->lfp_lvds_vbt_mode) { | ||
2826 | fixed_mode = drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode); | ||
2827 | if (fixed_mode) | ||
2828 | fixed_mode->type |= DRM_MODE_TYPE_PREFERRED; | ||
2829 | } | ||
2830 | |||
2658 | ironlake_edp_panel_vdd_off(intel_dp, false); | 2831 | ironlake_edp_panel_vdd_off(intel_dp, false); |
2659 | } | 2832 | } |
2660 | 2833 | ||
2661 | intel_encoder->hot_plug = intel_dp_hot_plug; | ||
2662 | |||
2663 | if (is_edp(intel_dp)) { | 2834 | if (is_edp(intel_dp)) { |
2664 | dev_priv->int_edp_connector = connector; | 2835 | intel_panel_init(&intel_connector->panel, fixed_mode); |
2665 | intel_panel_setup_backlight(dev); | 2836 | intel_panel_setup_backlight(connector); |
2666 | } | 2837 | } |
2667 | 2838 | ||
2668 | intel_dp_add_properties(intel_dp, connector); | 2839 | intel_dp_add_properties(intel_dp, connector); |
@@ -2676,3 +2847,45 @@ intel_dp_init(struct drm_device *dev, int output_reg, enum port port) | |||
2676 | I915_WRITE(PEG_BAND_GAP_DATA, (temp & ~0xf) | 0xd); | 2847 | I915_WRITE(PEG_BAND_GAP_DATA, (temp & ~0xf) | 0xd); |
2677 | } | 2848 | } |
2678 | } | 2849 | } |
2850 | |||
2851 | void | ||
2852 | intel_dp_init(struct drm_device *dev, int output_reg, enum port port) | ||
2853 | { | ||
2854 | struct intel_digital_port *intel_dig_port; | ||
2855 | struct intel_encoder *intel_encoder; | ||
2856 | struct drm_encoder *encoder; | ||
2857 | struct intel_connector *intel_connector; | ||
2858 | |||
2859 | intel_dig_port = kzalloc(sizeof(struct intel_digital_port), GFP_KERNEL); | ||
2860 | if (!intel_dig_port) | ||
2861 | return; | ||
2862 | |||
2863 | intel_connector = kzalloc(sizeof(struct intel_connector), GFP_KERNEL); | ||
2864 | if (!intel_connector) { | ||
2865 | kfree(intel_dig_port); | ||
2866 | return; | ||
2867 | } | ||
2868 | |||
2869 | intel_encoder = &intel_dig_port->base; | ||
2870 | encoder = &intel_encoder->base; | ||
2871 | |||
2872 | drm_encoder_init(dev, &intel_encoder->base, &intel_dp_enc_funcs, | ||
2873 | DRM_MODE_ENCODER_TMDS); | ||
2874 | drm_encoder_helper_add(&intel_encoder->base, &intel_dp_helper_funcs); | ||
2875 | |||
2876 | intel_encoder->enable = intel_enable_dp; | ||
2877 | intel_encoder->pre_enable = intel_pre_enable_dp; | ||
2878 | intel_encoder->disable = intel_disable_dp; | ||
2879 | intel_encoder->post_disable = intel_post_disable_dp; | ||
2880 | intel_encoder->get_hw_state = intel_dp_get_hw_state; | ||
2881 | |||
2882 | intel_dig_port->port = port; | ||
2883 | intel_dig_port->dp.output_reg = output_reg; | ||
2884 | |||
2885 | intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT; | ||
2886 | intel_encoder->crtc_mask = (1 << 0) | (1 << 1) | (1 << 2); | ||
2887 | intel_encoder->cloneable = false; | ||
2888 | intel_encoder->hot_plug = intel_dp_hot_plug; | ||
2889 | |||
2890 | intel_dp_init_connector(intel_dig_port, intel_connector); | ||
2891 | } | ||