aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/i915/intel_dp.c
diff options
context:
space:
mode:
authorDave Airlie <airlied@redhat.com>2013-09-30 20:00:50 -0400
committerDave Airlie <airlied@redhat.com>2013-09-30 20:00:50 -0400
commit4821ff14a32886ee95c8b2850746ec020ca65189 (patch)
treedad7b2c822e55669b687ae754cf1b958316ef538 /drivers/gpu/drm/i915/intel_dp.c
parent15c03dd4859ab16f9212238f29dd315654aa94f6 (diff)
parentb599c89e8c5cf0c37352e0871be240291f8ce922 (diff)
Merge tag 'drm-intel-next-2013-09-21-merged' of git://people.freedesktop.org/~danvet/drm-intel into drm-next
drm-intel-next-2013-09-21: - clock state handling rework from Ville - l3 parity handling fixes for hsw from Ben - some more watermark improvements from Ville - ban badly behaved context from Mika - a few vlv improvements from Jesse - VGA power domain handling from Ville drm-intel-next-2013-09-06: - Basic mipi dsi support from Jani. Not yet converted over to drm_bridge since that was too fresh, but the porting is in progress already. - More vma patches from Ben, this time the code to convert the execbuffer code. Now that the shrinker recursion bug is tracked down we can move ahead here again. Yay! - Optimize hw context switching to not generate needless interrupts (Chris Wilson). Also some shuffling for the oustanding request allocation. - Opregion support for SWSCI, although not yet fully wired up (we need a bit of runtime D3 support for that apparently, due to Windows design deficiencies), from Jani Nikula. - A few smaller changes all over. [airlied: merge conflict fix in i9xx_set_pipeconf] * tag 'drm-intel-next-2013-09-21-merged' of git://people.freedesktop.org/~danvet/drm-intel: (119 commits) drm/i915: assume all GM45 Acer laptops use inverted backlight PWM drm/i915: cleanup a min_t() cast drm/i915: Pull intel_init_power_well() out of intel_modeset_init_hw() drm/i915: Add POWER_DOMAIN_VGA drm/i915: Refactor power well refcount inc/dec operations drm/i915: Add intel_display_power_{get, put} to request power for specific domains drm/i915: Change i915_request power well handling drm/i915: POSTING_READ IPS_CTL before waiting for the vblank drm/i915: don't disable ERR_INT on the IRQ handler drm/i915/vlv: disable rc6p and rc6pp residency reporting on BYT drm/i915/vlv: honor i915_enable_rc6 boot param on VLV drm/i915: s/HAS_L3_GPU_CACHE/HAS_L3_DPF drm/i915: Do remaps for all contexts drm/i915: Keep a list of all contexts drm/i915: Make l3 remapping use the ring drm/i915: Add second slice l3 remapping drm/i915: Fix HSW parity test drm/i915: dump crtc timings from the pipe config drm/i915: register backlight device also when backlight class is a module drm/i915: write D_COMP using the mailbox ... Conflicts: drivers/gpu/drm/i915/intel_display.c
Diffstat (limited to 'drivers/gpu/drm/i915/intel_dp.c')
-rw-r--r--drivers/gpu/drm/i915/intel_dp.c313
1 files changed, 211 insertions, 102 deletions
diff --git a/drivers/gpu/drm/i915/intel_dp.c b/drivers/gpu/drm/i915/intel_dp.c
index 79c14e298ba6..60d2006fe15d 100644
--- a/drivers/gpu/drm/i915/intel_dp.c
+++ b/drivers/gpu/drm/i915/intel_dp.c
@@ -38,6 +38,32 @@
38 38
39#define DP_LINK_CHECK_TIMEOUT (10 * 1000) 39#define DP_LINK_CHECK_TIMEOUT (10 * 1000)
40 40
41struct dp_link_dpll {
42 int link_bw;
43 struct dpll dpll;
44};
45
46static const struct dp_link_dpll gen4_dpll[] = {
47 { DP_LINK_BW_1_62,
48 { .p1 = 2, .p2 = 10, .n = 2, .m1 = 23, .m2 = 8 } },
49 { DP_LINK_BW_2_7,
50 { .p1 = 1, .p2 = 10, .n = 1, .m1 = 14, .m2 = 2 } }
51};
52
53static const struct dp_link_dpll pch_dpll[] = {
54 { DP_LINK_BW_1_62,
55 { .p1 = 2, .p2 = 10, .n = 1, .m1 = 12, .m2 = 9 } },
56 { DP_LINK_BW_2_7,
57 { .p1 = 1, .p2 = 10, .n = 2, .m1 = 14, .m2 = 8 } }
58};
59
60static const struct dp_link_dpll vlv_dpll[] = {
61 { DP_LINK_BW_1_62,
62 { .p1 = 3, .p2 = 2, .n = 5, .m1 = 5, .m2 = 3 } },
63 { DP_LINK_BW_2_7,
64 { .p1 = 2, .p2 = 2, .n = 1, .m1 = 2, .m2 = 27 } }
65};
66
41/** 67/**
42 * is_edp - is the given port attached to an eDP panel (either CPU or PCH) 68 * is_edp - is the given port attached to an eDP panel (either CPU or PCH)
43 * @intel_dp: DP struct 69 * @intel_dp: DP struct
@@ -211,24 +237,77 @@ intel_hrawclk(struct drm_device *dev)
211 } 237 }
212} 238}
213 239
240static void
241intel_dp_init_panel_power_sequencer(struct drm_device *dev,
242 struct intel_dp *intel_dp,
243 struct edp_power_seq *out);
244static void
245intel_dp_init_panel_power_sequencer_registers(struct drm_device *dev,
246 struct intel_dp *intel_dp,
247 struct edp_power_seq *out);
248
249static enum pipe
250vlv_power_sequencer_pipe(struct intel_dp *intel_dp)
251{
252 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
253 struct drm_crtc *crtc = intel_dig_port->base.base.crtc;
254 struct drm_device *dev = intel_dig_port->base.base.dev;
255 struct drm_i915_private *dev_priv = dev->dev_private;
256 enum port port = intel_dig_port->port;
257 enum pipe pipe;
258
259 /* modeset should have pipe */
260 if (crtc)
261 return to_intel_crtc(crtc)->pipe;
262
263 /* init time, try to find a pipe with this port selected */
264 for (pipe = PIPE_A; pipe <= PIPE_B; pipe++) {
265 u32 port_sel = I915_READ(VLV_PIPE_PP_ON_DELAYS(pipe)) &
266 PANEL_PORT_SELECT_MASK;
267 if (port_sel == PANEL_PORT_SELECT_DPB_VLV && port == PORT_B)
268 return pipe;
269 if (port_sel == PANEL_PORT_SELECT_DPC_VLV && port == PORT_C)
270 return pipe;
271 }
272
273 /* shrug */
274 return PIPE_A;
275}
276
277static u32 _pp_ctrl_reg(struct intel_dp *intel_dp)
278{
279 struct drm_device *dev = intel_dp_to_dev(intel_dp);
280
281 if (HAS_PCH_SPLIT(dev))
282 return PCH_PP_CONTROL;
283 else
284 return VLV_PIPE_PP_CONTROL(vlv_power_sequencer_pipe(intel_dp));
285}
286
287static u32 _pp_stat_reg(struct intel_dp *intel_dp)
288{
289 struct drm_device *dev = intel_dp_to_dev(intel_dp);
290
291 if (HAS_PCH_SPLIT(dev))
292 return PCH_PP_STATUS;
293 else
294 return VLV_PIPE_PP_STATUS(vlv_power_sequencer_pipe(intel_dp));
295}
296
214static bool ironlake_edp_have_panel_power(struct intel_dp *intel_dp) 297static bool ironlake_edp_have_panel_power(struct intel_dp *intel_dp)
215{ 298{
216 struct drm_device *dev = intel_dp_to_dev(intel_dp); 299 struct drm_device *dev = intel_dp_to_dev(intel_dp);
217 struct drm_i915_private *dev_priv = dev->dev_private; 300 struct drm_i915_private *dev_priv = dev->dev_private;
218 u32 pp_stat_reg;
219 301
220 pp_stat_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_STATUS : PCH_PP_STATUS; 302 return (I915_READ(_pp_stat_reg(intel_dp)) & PP_ON) != 0;
221 return (I915_READ(pp_stat_reg) & PP_ON) != 0;
222} 303}
223 304
224static bool ironlake_edp_have_panel_vdd(struct intel_dp *intel_dp) 305static bool ironlake_edp_have_panel_vdd(struct intel_dp *intel_dp)
225{ 306{
226 struct drm_device *dev = intel_dp_to_dev(intel_dp); 307 struct drm_device *dev = intel_dp_to_dev(intel_dp);
227 struct drm_i915_private *dev_priv = dev->dev_private; 308 struct drm_i915_private *dev_priv = dev->dev_private;
228 u32 pp_ctrl_reg;
229 309
230 pp_ctrl_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_CONTROL : PCH_PP_CONTROL; 310 return (I915_READ(_pp_ctrl_reg(intel_dp)) & EDP_FORCE_VDD) != 0;
231 return (I915_READ(pp_ctrl_reg) & EDP_FORCE_VDD) != 0;
232} 311}
233 312
234static void 313static void
@@ -236,19 +315,15 @@ intel_dp_check_edp(struct intel_dp *intel_dp)
236{ 315{
237 struct drm_device *dev = intel_dp_to_dev(intel_dp); 316 struct drm_device *dev = intel_dp_to_dev(intel_dp);
238 struct drm_i915_private *dev_priv = dev->dev_private; 317 struct drm_i915_private *dev_priv = dev->dev_private;
239 u32 pp_stat_reg, pp_ctrl_reg;
240 318
241 if (!is_edp(intel_dp)) 319 if (!is_edp(intel_dp))
242 return; 320 return;
243 321
244 pp_stat_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_STATUS : PCH_PP_STATUS;
245 pp_ctrl_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_CONTROL : PCH_PP_CONTROL;
246
247 if (!ironlake_edp_have_panel_power(intel_dp) && !ironlake_edp_have_panel_vdd(intel_dp)) { 322 if (!ironlake_edp_have_panel_power(intel_dp) && !ironlake_edp_have_panel_vdd(intel_dp)) {
248 WARN(1, "eDP powered off while attempting aux channel communication.\n"); 323 WARN(1, "eDP powered off while attempting aux channel communication.\n");
249 DRM_DEBUG_KMS("Status 0x%08x Control 0x%08x\n", 324 DRM_DEBUG_KMS("Status 0x%08x Control 0x%08x\n",
250 I915_READ(pp_stat_reg), 325 I915_READ(_pp_stat_reg(intel_dp)),
251 I915_READ(pp_ctrl_reg)); 326 I915_READ(_pp_ctrl_reg(intel_dp)));
252 } 327 }
253} 328}
254 329
@@ -361,6 +436,12 @@ intel_dp_aux_ch(struct intel_dp *intel_dp,
361 goto out; 436 goto out;
362 } 437 }
363 438
439 /* Only 5 data registers! */
440 if (WARN_ON(send_bytes > 20 || recv_size > 20)) {
441 ret = -E2BIG;
442 goto out;
443 }
444
364 while ((aux_clock_divider = get_aux_clock_divider(intel_dp, clock++))) { 445 while ((aux_clock_divider = get_aux_clock_divider(intel_dp, clock++))) {
365 /* Must try at least 3 times according to DP spec */ 446 /* Must try at least 3 times according to DP spec */
366 for (try = 0; try < 5; try++) { 447 for (try = 0; try < 5; try++) {
@@ -451,9 +532,10 @@ intel_dp_aux_native_write(struct intel_dp *intel_dp,
451 int msg_bytes; 532 int msg_bytes;
452 uint8_t ack; 533 uint8_t ack;
453 534
535 if (WARN_ON(send_bytes > 16))
536 return -E2BIG;
537
454 intel_dp_check_edp(intel_dp); 538 intel_dp_check_edp(intel_dp);
455 if (send_bytes > 16)
456 return -1;
457 msg[0] = AUX_NATIVE_WRITE << 4; 539 msg[0] = AUX_NATIVE_WRITE << 4;
458 msg[1] = address >> 8; 540 msg[1] = address >> 8;
459 msg[2] = address & 0xff; 541 msg[2] = address & 0xff;
@@ -494,6 +576,9 @@ intel_dp_aux_native_read(struct intel_dp *intel_dp,
494 uint8_t ack; 576 uint8_t ack;
495 int ret; 577 int ret;
496 578
579 if (WARN_ON(recv_bytes > 19))
580 return -E2BIG;
581
497 intel_dp_check_edp(intel_dp); 582 intel_dp_check_edp(intel_dp);
498 msg[0] = AUX_NATIVE_READ << 4; 583 msg[0] = AUX_NATIVE_READ << 4;
499 msg[1] = address >> 8; 584 msg[1] = address >> 8;
@@ -660,41 +745,30 @@ intel_dp_set_clock(struct intel_encoder *encoder,
660 struct intel_crtc_config *pipe_config, int link_bw) 745 struct intel_crtc_config *pipe_config, int link_bw)
661{ 746{
662 struct drm_device *dev = encoder->base.dev; 747 struct drm_device *dev = encoder->base.dev;
748 const struct dp_link_dpll *divisor = NULL;
749 int i, count = 0;
663 750
664 if (IS_G4X(dev)) { 751 if (IS_G4X(dev)) {
665 if (link_bw == DP_LINK_BW_1_62) { 752 divisor = gen4_dpll;
666 pipe_config->dpll.p1 = 2; 753 count = ARRAY_SIZE(gen4_dpll);
667 pipe_config->dpll.p2 = 10;
668 pipe_config->dpll.n = 2;
669 pipe_config->dpll.m1 = 23;
670 pipe_config->dpll.m2 = 8;
671 } else {
672 pipe_config->dpll.p1 = 1;
673 pipe_config->dpll.p2 = 10;
674 pipe_config->dpll.n = 1;
675 pipe_config->dpll.m1 = 14;
676 pipe_config->dpll.m2 = 2;
677 }
678 pipe_config->clock_set = true;
679 } else if (IS_HASWELL(dev)) { 754 } else if (IS_HASWELL(dev)) {
680 /* Haswell has special-purpose DP DDI clocks. */ 755 /* Haswell has special-purpose DP DDI clocks. */
681 } else if (HAS_PCH_SPLIT(dev)) { 756 } else if (HAS_PCH_SPLIT(dev)) {
682 if (link_bw == DP_LINK_BW_1_62) { 757 divisor = pch_dpll;
683 pipe_config->dpll.n = 1; 758 count = ARRAY_SIZE(pch_dpll);
684 pipe_config->dpll.p1 = 2;
685 pipe_config->dpll.p2 = 10;
686 pipe_config->dpll.m1 = 12;
687 pipe_config->dpll.m2 = 9;
688 } else {
689 pipe_config->dpll.n = 2;
690 pipe_config->dpll.p1 = 1;
691 pipe_config->dpll.p2 = 10;
692 pipe_config->dpll.m1 = 14;
693 pipe_config->dpll.m2 = 8;
694 }
695 pipe_config->clock_set = true;
696 } else if (IS_VALLEYVIEW(dev)) { 759 } else if (IS_VALLEYVIEW(dev)) {
697 /* FIXME: Need to figure out optimized DP clocks for vlv. */ 760 divisor = vlv_dpll;
761 count = ARRAY_SIZE(vlv_dpll);
762 }
763
764 if (divisor && count) {
765 for (i = 0; i < count; i++) {
766 if (link_bw == divisor[i].link_bw) {
767 pipe_config->dpll = divisor[i].dpll;
768 pipe_config->clock_set = true;
769 break;
770 }
771 }
698 } 772 }
699} 773}
700 774
@@ -944,8 +1018,8 @@ static void ironlake_wait_panel_status(struct intel_dp *intel_dp,
944 struct drm_i915_private *dev_priv = dev->dev_private; 1018 struct drm_i915_private *dev_priv = dev->dev_private;
945 u32 pp_stat_reg, pp_ctrl_reg; 1019 u32 pp_stat_reg, pp_ctrl_reg;
946 1020
947 pp_stat_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_STATUS : PCH_PP_STATUS; 1021 pp_stat_reg = _pp_stat_reg(intel_dp);
948 pp_ctrl_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_CONTROL : PCH_PP_CONTROL; 1022 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
949 1023
950 DRM_DEBUG_KMS("mask %08x value %08x status %08x control %08x\n", 1024 DRM_DEBUG_KMS("mask %08x value %08x status %08x control %08x\n",
951 mask, value, 1025 mask, value,
@@ -987,11 +1061,8 @@ static u32 ironlake_get_pp_control(struct intel_dp *intel_dp)
987 struct drm_device *dev = intel_dp_to_dev(intel_dp); 1061 struct drm_device *dev = intel_dp_to_dev(intel_dp);
988 struct drm_i915_private *dev_priv = dev->dev_private; 1062 struct drm_i915_private *dev_priv = dev->dev_private;
989 u32 control; 1063 u32 control;
990 u32 pp_ctrl_reg;
991
992 pp_ctrl_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_CONTROL : PCH_PP_CONTROL;
993 control = I915_READ(pp_ctrl_reg);
994 1064
1065 control = I915_READ(_pp_ctrl_reg(intel_dp));
995 control &= ~PANEL_UNLOCK_MASK; 1066 control &= ~PANEL_UNLOCK_MASK;
996 control |= PANEL_UNLOCK_REGS; 1067 control |= PANEL_UNLOCK_REGS;
997 return control; 1068 return control;
@@ -1024,8 +1095,8 @@ void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp)
1024 pp = ironlake_get_pp_control(intel_dp); 1095 pp = ironlake_get_pp_control(intel_dp);
1025 pp |= EDP_FORCE_VDD; 1096 pp |= EDP_FORCE_VDD;
1026 1097
1027 pp_stat_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_STATUS : PCH_PP_STATUS; 1098 pp_stat_reg = _pp_stat_reg(intel_dp);
1028 pp_ctrl_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_CONTROL : PCH_PP_CONTROL; 1099 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
1029 1100
1030 I915_WRITE(pp_ctrl_reg, pp); 1101 I915_WRITE(pp_ctrl_reg, pp);
1031 POSTING_READ(pp_ctrl_reg); 1102 POSTING_READ(pp_ctrl_reg);
@@ -1053,8 +1124,8 @@ static void ironlake_panel_vdd_off_sync(struct intel_dp *intel_dp)
1053 pp = ironlake_get_pp_control(intel_dp); 1124 pp = ironlake_get_pp_control(intel_dp);
1054 pp &= ~EDP_FORCE_VDD; 1125 pp &= ~EDP_FORCE_VDD;
1055 1126
1056 pp_stat_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_STATUS : PCH_PP_STATUS; 1127 pp_stat_reg = _pp_ctrl_reg(intel_dp);
1057 pp_ctrl_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_CONTROL : PCH_PP_CONTROL; 1128 pp_ctrl_reg = _pp_stat_reg(intel_dp);
1058 1129
1059 I915_WRITE(pp_ctrl_reg, pp); 1130 I915_WRITE(pp_ctrl_reg, pp);
1060 POSTING_READ(pp_ctrl_reg); 1131 POSTING_READ(pp_ctrl_reg);
@@ -1119,20 +1190,19 @@ void ironlake_edp_panel_on(struct intel_dp *intel_dp)
1119 1190
1120 ironlake_wait_panel_power_cycle(intel_dp); 1191 ironlake_wait_panel_power_cycle(intel_dp);
1121 1192
1193 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
1122 pp = ironlake_get_pp_control(intel_dp); 1194 pp = ironlake_get_pp_control(intel_dp);
1123 if (IS_GEN5(dev)) { 1195 if (IS_GEN5(dev)) {
1124 /* ILK workaround: disable reset around power sequence */ 1196 /* ILK workaround: disable reset around power sequence */
1125 pp &= ~PANEL_POWER_RESET; 1197 pp &= ~PANEL_POWER_RESET;
1126 I915_WRITE(PCH_PP_CONTROL, pp); 1198 I915_WRITE(pp_ctrl_reg, pp);
1127 POSTING_READ(PCH_PP_CONTROL); 1199 POSTING_READ(pp_ctrl_reg);
1128 } 1200 }
1129 1201
1130 pp |= POWER_TARGET_ON; 1202 pp |= POWER_TARGET_ON;
1131 if (!IS_GEN5(dev)) 1203 if (!IS_GEN5(dev))
1132 pp |= PANEL_POWER_RESET; 1204 pp |= PANEL_POWER_RESET;
1133 1205
1134 pp_ctrl_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_CONTROL : PCH_PP_CONTROL;
1135
1136 I915_WRITE(pp_ctrl_reg, pp); 1206 I915_WRITE(pp_ctrl_reg, pp);
1137 POSTING_READ(pp_ctrl_reg); 1207 POSTING_READ(pp_ctrl_reg);
1138 1208
@@ -1140,8 +1210,8 @@ void ironlake_edp_panel_on(struct intel_dp *intel_dp)
1140 1210
1141 if (IS_GEN5(dev)) { 1211 if (IS_GEN5(dev)) {
1142 pp |= PANEL_POWER_RESET; /* restore panel reset bit */ 1212 pp |= PANEL_POWER_RESET; /* restore panel reset bit */
1143 I915_WRITE(PCH_PP_CONTROL, pp); 1213 I915_WRITE(pp_ctrl_reg, pp);
1144 POSTING_READ(PCH_PP_CONTROL); 1214 POSTING_READ(pp_ctrl_reg);
1145 } 1215 }
1146} 1216}
1147 1217
@@ -1164,7 +1234,7 @@ void ironlake_edp_panel_off(struct intel_dp *intel_dp)
1164 * panels get very unhappy and cease to work. */ 1234 * panels get very unhappy and cease to work. */
1165 pp &= ~(POWER_TARGET_ON | EDP_FORCE_VDD | PANEL_POWER_RESET | EDP_BLC_ENABLE); 1235 pp &= ~(POWER_TARGET_ON | EDP_FORCE_VDD | PANEL_POWER_RESET | EDP_BLC_ENABLE);
1166 1236
1167 pp_ctrl_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_CONTROL : PCH_PP_CONTROL; 1237 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
1168 1238
1169 I915_WRITE(pp_ctrl_reg, pp); 1239 I915_WRITE(pp_ctrl_reg, pp);
1170 POSTING_READ(pp_ctrl_reg); 1240 POSTING_READ(pp_ctrl_reg);
@@ -1197,7 +1267,7 @@ void ironlake_edp_backlight_on(struct intel_dp *intel_dp)
1197 pp = ironlake_get_pp_control(intel_dp); 1267 pp = ironlake_get_pp_control(intel_dp);
1198 pp |= EDP_BLC_ENABLE; 1268 pp |= EDP_BLC_ENABLE;
1199 1269
1200 pp_ctrl_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_CONTROL : PCH_PP_CONTROL; 1270 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
1201 1271
1202 I915_WRITE(pp_ctrl_reg, pp); 1272 I915_WRITE(pp_ctrl_reg, pp);
1203 POSTING_READ(pp_ctrl_reg); 1273 POSTING_READ(pp_ctrl_reg);
@@ -1221,7 +1291,7 @@ void ironlake_edp_backlight_off(struct intel_dp *intel_dp)
1221 pp = ironlake_get_pp_control(intel_dp); 1291 pp = ironlake_get_pp_control(intel_dp);
1222 pp &= ~EDP_BLC_ENABLE; 1292 pp &= ~EDP_BLC_ENABLE;
1223 1293
1224 pp_ctrl_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_CONTROL : PCH_PP_CONTROL; 1294 pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
1225 1295
1226 I915_WRITE(pp_ctrl_reg, pp); 1296 I915_WRITE(pp_ctrl_reg, pp);
1227 POSTING_READ(pp_ctrl_reg); 1297 POSTING_READ(pp_ctrl_reg);
@@ -1368,6 +1438,7 @@ static void intel_dp_get_config(struct intel_encoder *encoder,
1368 struct drm_i915_private *dev_priv = dev->dev_private; 1438 struct drm_i915_private *dev_priv = dev->dev_private;
1369 enum port port = dp_to_dig_port(intel_dp)->port; 1439 enum port port = dp_to_dig_port(intel_dp)->port;
1370 struct intel_crtc *crtc = to_intel_crtc(encoder->base.crtc); 1440 struct intel_crtc *crtc = to_intel_crtc(encoder->base.crtc);
1441 int dotclock;
1371 1442
1372 if ((port == PORT_A) || !HAS_PCH_CPT(dev)) { 1443 if ((port == PORT_A) || !HAS_PCH_CPT(dev)) {
1373 tmp = I915_READ(intel_dp->output_reg); 1444 tmp = I915_READ(intel_dp->output_reg);
@@ -1395,12 +1466,24 @@ static void intel_dp_get_config(struct intel_encoder *encoder,
1395 1466
1396 pipe_config->adjusted_mode.flags |= flags; 1467 pipe_config->adjusted_mode.flags |= flags;
1397 1468
1398 if (dp_to_dig_port(intel_dp)->port == PORT_A) { 1469 pipe_config->has_dp_encoder = true;
1470
1471 intel_dp_get_m_n(crtc, pipe_config);
1472
1473 if (port == PORT_A) {
1399 if ((I915_READ(DP_A) & DP_PLL_FREQ_MASK) == DP_PLL_FREQ_160MHZ) 1474 if ((I915_READ(DP_A) & DP_PLL_FREQ_MASK) == DP_PLL_FREQ_160MHZ)
1400 pipe_config->port_clock = 162000; 1475 pipe_config->port_clock = 162000;
1401 else 1476 else
1402 pipe_config->port_clock = 270000; 1477 pipe_config->port_clock = 270000;
1403 } 1478 }
1479
1480 dotclock = intel_dotclock_calculate(pipe_config->port_clock,
1481 &pipe_config->dp_m_n);
1482
1483 if (HAS_PCH_SPLIT(dev_priv->dev) && port != PORT_A)
1484 ironlake_check_encoder_dotclock(pipe_config, dotclock);
1485
1486 pipe_config->adjusted_mode.clock = dotclock;
1404} 1487}
1405 1488
1406static bool is_edp_psr(struct intel_dp *intel_dp) 1489static bool is_edp_psr(struct intel_dp *intel_dp)
@@ -1566,7 +1649,7 @@ static bool intel_edp_psr_match_conditions(struct intel_dp *intel_dp)
1566 } 1649 }
1567 1650
1568 intel_crtc = to_intel_crtc(crtc); 1651 intel_crtc = to_intel_crtc(crtc);
1569 if (!intel_crtc->active || !crtc->fb || !crtc->mode.clock) { 1652 if (!intel_crtc_active(crtc)) {
1570 DRM_DEBUG_KMS("crtc not active for PSR\n"); 1653 DRM_DEBUG_KMS("crtc not active for PSR\n");
1571 dev_priv->no_psr_reason = PSR_CRTC_NOT_ACTIVE; 1654 dev_priv->no_psr_reason = PSR_CRTC_NOT_ACTIVE;
1572 return false; 1655 return false;
@@ -1593,7 +1676,7 @@ static bool intel_edp_psr_match_conditions(struct intel_dp *intel_dp)
1593 return false; 1676 return false;
1594 } 1677 }
1595 1678
1596 if (crtc->mode.flags & DRM_MODE_FLAG_INTERLACE) { 1679 if (intel_crtc->config.adjusted_mode.flags & DRM_MODE_FLAG_INTERLACE) {
1597 DRM_DEBUG_KMS("PSR condition failed: Interlaced is Enabled\n"); 1680 DRM_DEBUG_KMS("PSR condition failed: Interlaced is Enabled\n");
1598 dev_priv->no_psr_reason = PSR_INTERLACED_ENABLED; 1681 dev_priv->no_psr_reason = PSR_INTERLACED_ENABLED;
1599 return false; 1682 return false;
@@ -1713,14 +1796,24 @@ static void intel_enable_dp(struct intel_encoder *encoder)
1713 ironlake_edp_panel_vdd_off(intel_dp, true); 1796 ironlake_edp_panel_vdd_off(intel_dp, true);
1714 intel_dp_complete_link_train(intel_dp); 1797 intel_dp_complete_link_train(intel_dp);
1715 intel_dp_stop_link_train(intel_dp); 1798 intel_dp_stop_link_train(intel_dp);
1799}
1800
1801static void g4x_enable_dp(struct intel_encoder *encoder)
1802{
1803 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
1804
1805 intel_enable_dp(encoder);
1716 ironlake_edp_backlight_on(intel_dp); 1806 ironlake_edp_backlight_on(intel_dp);
1717} 1807}
1718 1808
1719static void vlv_enable_dp(struct intel_encoder *encoder) 1809static void vlv_enable_dp(struct intel_encoder *encoder)
1720{ 1810{
1811 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
1812
1813 ironlake_edp_backlight_on(intel_dp);
1721} 1814}
1722 1815
1723static void intel_pre_enable_dp(struct intel_encoder *encoder) 1816static void g4x_pre_enable_dp(struct intel_encoder *encoder)
1724{ 1817{
1725 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base); 1818 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
1726 struct intel_digital_port *dport = dp_to_dig_port(intel_dp); 1819 struct intel_digital_port *dport = dp_to_dig_port(intel_dp);
@@ -1738,53 +1831,59 @@ static void vlv_pre_enable_dp(struct intel_encoder *encoder)
1738 struct intel_crtc *intel_crtc = to_intel_crtc(encoder->base.crtc); 1831 struct intel_crtc *intel_crtc = to_intel_crtc(encoder->base.crtc);
1739 int port = vlv_dport_to_channel(dport); 1832 int port = vlv_dport_to_channel(dport);
1740 int pipe = intel_crtc->pipe; 1833 int pipe = intel_crtc->pipe;
1834 struct edp_power_seq power_seq;
1741 u32 val; 1835 u32 val;
1742 1836
1743 mutex_lock(&dev_priv->dpio_lock); 1837 mutex_lock(&dev_priv->dpio_lock);
1744 1838
1745 val = vlv_dpio_read(dev_priv, DPIO_DATA_LANE_A(port)); 1839 val = vlv_dpio_read(dev_priv, pipe, DPIO_DATA_LANE_A(port));
1746 val = 0; 1840 val = 0;
1747 if (pipe) 1841 if (pipe)
1748 val |= (1<<21); 1842 val |= (1<<21);
1749 else 1843 else
1750 val &= ~(1<<21); 1844 val &= ~(1<<21);
1751 val |= 0x001000c4; 1845 val |= 0x001000c4;
1752 vlv_dpio_write(dev_priv, DPIO_DATA_CHANNEL(port), val); 1846 vlv_dpio_write(dev_priv, pipe, DPIO_DATA_CHANNEL(port), val);
1753 vlv_dpio_write(dev_priv, DPIO_PCS_CLOCKBUF0(port), 0x00760018); 1847 vlv_dpio_write(dev_priv, pipe, DPIO_PCS_CLOCKBUF0(port), 0x00760018);
1754 vlv_dpio_write(dev_priv, DPIO_PCS_CLOCKBUF8(port), 0x00400888); 1848 vlv_dpio_write(dev_priv, pipe, DPIO_PCS_CLOCKBUF8(port), 0x00400888);
1755 1849
1756 mutex_unlock(&dev_priv->dpio_lock); 1850 mutex_unlock(&dev_priv->dpio_lock);
1757 1851
1852 /* init power sequencer on this pipe and port */
1853 intel_dp_init_panel_power_sequencer(dev, intel_dp, &power_seq);
1854 intel_dp_init_panel_power_sequencer_registers(dev, intel_dp,
1855 &power_seq);
1856
1758 intel_enable_dp(encoder); 1857 intel_enable_dp(encoder);
1759 1858
1760 vlv_wait_port_ready(dev_priv, port); 1859 vlv_wait_port_ready(dev_priv, port);
1761} 1860}
1762 1861
1763static void intel_dp_pre_pll_enable(struct intel_encoder *encoder) 1862static void vlv_dp_pre_pll_enable(struct intel_encoder *encoder)
1764{ 1863{
1765 struct intel_digital_port *dport = enc_to_dig_port(&encoder->base); 1864 struct intel_digital_port *dport = enc_to_dig_port(&encoder->base);
1766 struct drm_device *dev = encoder->base.dev; 1865 struct drm_device *dev = encoder->base.dev;
1767 struct drm_i915_private *dev_priv = dev->dev_private; 1866 struct drm_i915_private *dev_priv = dev->dev_private;
1867 struct intel_crtc *intel_crtc =
1868 to_intel_crtc(encoder->base.crtc);
1768 int port = vlv_dport_to_channel(dport); 1869 int port = vlv_dport_to_channel(dport);
1769 1870 int pipe = intel_crtc->pipe;
1770 if (!IS_VALLEYVIEW(dev))
1771 return;
1772 1871
1773 /* Program Tx lane resets to default */ 1872 /* Program Tx lane resets to default */
1774 mutex_lock(&dev_priv->dpio_lock); 1873 mutex_lock(&dev_priv->dpio_lock);
1775 vlv_dpio_write(dev_priv, DPIO_PCS_TX(port), 1874 vlv_dpio_write(dev_priv, pipe, DPIO_PCS_TX(port),
1776 DPIO_PCS_TX_LANE2_RESET | 1875 DPIO_PCS_TX_LANE2_RESET |
1777 DPIO_PCS_TX_LANE1_RESET); 1876 DPIO_PCS_TX_LANE1_RESET);
1778 vlv_dpio_write(dev_priv, DPIO_PCS_CLK(port), 1877 vlv_dpio_write(dev_priv, pipe, DPIO_PCS_CLK(port),
1779 DPIO_PCS_CLK_CRI_RXEB_EIOS_EN | 1878 DPIO_PCS_CLK_CRI_RXEB_EIOS_EN |
1780 DPIO_PCS_CLK_CRI_RXDIGFILTSG_EN | 1879 DPIO_PCS_CLK_CRI_RXDIGFILTSG_EN |
1781 (1<<DPIO_PCS_CLK_DATAWIDTH_SHIFT) | 1880 (1<<DPIO_PCS_CLK_DATAWIDTH_SHIFT) |
1782 DPIO_PCS_CLK_SOFT_RESET); 1881 DPIO_PCS_CLK_SOFT_RESET);
1783 1882
1784 /* Fix up inter-pair skew failure */ 1883 /* Fix up inter-pair skew failure */
1785 vlv_dpio_write(dev_priv, DPIO_PCS_STAGGER1(port), 0x00750f00); 1884 vlv_dpio_write(dev_priv, pipe, DPIO_PCS_STAGGER1(port), 0x00750f00);
1786 vlv_dpio_write(dev_priv, DPIO_TX_CTL(port), 0x00001500); 1885 vlv_dpio_write(dev_priv, pipe, DPIO_TX_CTL(port), 0x00001500);
1787 vlv_dpio_write(dev_priv, DPIO_TX_LANE(port), 0x40400000); 1886 vlv_dpio_write(dev_priv, pipe, DPIO_TX_LANE(port), 0x40400000);
1788 mutex_unlock(&dev_priv->dpio_lock); 1887 mutex_unlock(&dev_priv->dpio_lock);
1789} 1888}
1790 1889
@@ -1919,10 +2018,13 @@ static uint32_t intel_vlv_signal_levels(struct intel_dp *intel_dp)
1919 struct drm_device *dev = intel_dp_to_dev(intel_dp); 2018 struct drm_device *dev = intel_dp_to_dev(intel_dp);
1920 struct drm_i915_private *dev_priv = dev->dev_private; 2019 struct drm_i915_private *dev_priv = dev->dev_private;
1921 struct intel_digital_port *dport = dp_to_dig_port(intel_dp); 2020 struct intel_digital_port *dport = dp_to_dig_port(intel_dp);
2021 struct intel_crtc *intel_crtc =
2022 to_intel_crtc(dport->base.base.crtc);
1922 unsigned long demph_reg_value, preemph_reg_value, 2023 unsigned long demph_reg_value, preemph_reg_value,
1923 uniqtranscale_reg_value; 2024 uniqtranscale_reg_value;
1924 uint8_t train_set = intel_dp->train_set[0]; 2025 uint8_t train_set = intel_dp->train_set[0];
1925 int port = vlv_dport_to_channel(dport); 2026 int port = vlv_dport_to_channel(dport);
2027 int pipe = intel_crtc->pipe;
1926 2028
1927 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 2029 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
1928 case DP_TRAIN_PRE_EMPHASIS_0: 2030 case DP_TRAIN_PRE_EMPHASIS_0:
@@ -1998,14 +2100,14 @@ static uint32_t intel_vlv_signal_levels(struct intel_dp *intel_dp)
1998 } 2100 }
1999 2101
2000 mutex_lock(&dev_priv->dpio_lock); 2102 mutex_lock(&dev_priv->dpio_lock);
2001 vlv_dpio_write(dev_priv, DPIO_TX_OCALINIT(port), 0x00000000); 2103 vlv_dpio_write(dev_priv, pipe, DPIO_TX_OCALINIT(port), 0x00000000);
2002 vlv_dpio_write(dev_priv, DPIO_TX_SWING_CTL4(port), demph_reg_value); 2104 vlv_dpio_write(dev_priv, pipe, DPIO_TX_SWING_CTL4(port), demph_reg_value);
2003 vlv_dpio_write(dev_priv, DPIO_TX_SWING_CTL2(port), 2105 vlv_dpio_write(dev_priv, pipe, DPIO_TX_SWING_CTL2(port),
2004 uniqtranscale_reg_value); 2106 uniqtranscale_reg_value);
2005 vlv_dpio_write(dev_priv, DPIO_TX_SWING_CTL3(port), 0x0C782040); 2107 vlv_dpio_write(dev_priv, pipe, DPIO_TX_SWING_CTL3(port), 0x0C782040);
2006 vlv_dpio_write(dev_priv, DPIO_PCS_STAGGER0(port), 0x00030000); 2108 vlv_dpio_write(dev_priv, pipe, DPIO_PCS_STAGGER0(port), 0x00030000);
2007 vlv_dpio_write(dev_priv, DPIO_PCS_CTL_OVER1(port), preemph_reg_value); 2109 vlv_dpio_write(dev_priv, pipe, DPIO_PCS_CTL_OVER1(port), preemph_reg_value);
2008 vlv_dpio_write(dev_priv, DPIO_TX_OCALINIT(port), 0x80000000); 2110 vlv_dpio_write(dev_priv, pipe, DPIO_TX_OCALINIT(port), 0x80000000);
2009 mutex_unlock(&dev_priv->dpio_lock); 2111 mutex_unlock(&dev_priv->dpio_lock);
2010 2112
2011 return 0; 2113 return 0;
@@ -3144,24 +3246,26 @@ intel_dp_init_panel_power_sequencer(struct drm_device *dev,
3144 struct drm_i915_private *dev_priv = dev->dev_private; 3246 struct drm_i915_private *dev_priv = dev->dev_private;
3145 struct edp_power_seq cur, vbt, spec, final; 3247 struct edp_power_seq cur, vbt, spec, final;
3146 u32 pp_on, pp_off, pp_div, pp; 3248 u32 pp_on, pp_off, pp_div, pp;
3147 int pp_control_reg, pp_on_reg, pp_off_reg, pp_div_reg; 3249 int pp_ctrl_reg, pp_on_reg, pp_off_reg, pp_div_reg;
3148 3250
3149 if (HAS_PCH_SPLIT(dev)) { 3251 if (HAS_PCH_SPLIT(dev)) {
3150 pp_control_reg = PCH_PP_CONTROL; 3252 pp_ctrl_reg = PCH_PP_CONTROL;
3151 pp_on_reg = PCH_PP_ON_DELAYS; 3253 pp_on_reg = PCH_PP_ON_DELAYS;
3152 pp_off_reg = PCH_PP_OFF_DELAYS; 3254 pp_off_reg = PCH_PP_OFF_DELAYS;
3153 pp_div_reg = PCH_PP_DIVISOR; 3255 pp_div_reg = PCH_PP_DIVISOR;
3154 } else { 3256 } else {
3155 pp_control_reg = PIPEA_PP_CONTROL; 3257 enum pipe pipe = vlv_power_sequencer_pipe(intel_dp);
3156 pp_on_reg = PIPEA_PP_ON_DELAYS; 3258
3157 pp_off_reg = PIPEA_PP_OFF_DELAYS; 3259 pp_ctrl_reg = VLV_PIPE_PP_CONTROL(pipe);
3158 pp_div_reg = PIPEA_PP_DIVISOR; 3260 pp_on_reg = VLV_PIPE_PP_ON_DELAYS(pipe);
3261 pp_off_reg = VLV_PIPE_PP_OFF_DELAYS(pipe);
3262 pp_div_reg = VLV_PIPE_PP_DIVISOR(pipe);
3159 } 3263 }
3160 3264
3161 /* Workaround: Need to write PP_CONTROL with the unlock key as 3265 /* Workaround: Need to write PP_CONTROL with the unlock key as
3162 * the very first thing. */ 3266 * the very first thing. */
3163 pp = ironlake_get_pp_control(intel_dp); 3267 pp = ironlake_get_pp_control(intel_dp);
3164 I915_WRITE(pp_control_reg, pp); 3268 I915_WRITE(pp_ctrl_reg, pp);
3165 3269
3166 pp_on = I915_READ(pp_on_reg); 3270 pp_on = I915_READ(pp_on_reg);
3167 pp_off = I915_READ(pp_off_reg); 3271 pp_off = I915_READ(pp_off_reg);
@@ -3249,9 +3353,11 @@ intel_dp_init_panel_power_sequencer_registers(struct drm_device *dev,
3249 pp_off_reg = PCH_PP_OFF_DELAYS; 3353 pp_off_reg = PCH_PP_OFF_DELAYS;
3250 pp_div_reg = PCH_PP_DIVISOR; 3354 pp_div_reg = PCH_PP_DIVISOR;
3251 } else { 3355 } else {
3252 pp_on_reg = PIPEA_PP_ON_DELAYS; 3356 enum pipe pipe = vlv_power_sequencer_pipe(intel_dp);
3253 pp_off_reg = PIPEA_PP_OFF_DELAYS; 3357
3254 pp_div_reg = PIPEA_PP_DIVISOR; 3358 pp_on_reg = VLV_PIPE_PP_ON_DELAYS(pipe);
3359 pp_off_reg = VLV_PIPE_PP_OFF_DELAYS(pipe);
3360 pp_div_reg = VLV_PIPE_PP_DIVISOR(pipe);
3255 } 3361 }
3256 3362
3257 /* And finally store the new values in the power sequencer. */ 3363 /* And finally store the new values in the power sequencer. */
@@ -3268,12 +3374,15 @@ intel_dp_init_panel_power_sequencer_registers(struct drm_device *dev,
3268 /* Haswell doesn't have any port selection bits for the panel 3374 /* Haswell doesn't have any port selection bits for the panel
3269 * power sequencer any more. */ 3375 * power sequencer any more. */
3270 if (IS_VALLEYVIEW(dev)) { 3376 if (IS_VALLEYVIEW(dev)) {
3271 port_sel = I915_READ(pp_on_reg) & 0xc0000000; 3377 if (dp_to_dig_port(intel_dp)->port == PORT_B)
3378 port_sel = PANEL_PORT_SELECT_DPB_VLV;
3379 else
3380 port_sel = PANEL_PORT_SELECT_DPC_VLV;
3272 } else if (HAS_PCH_IBX(dev) || HAS_PCH_CPT(dev)) { 3381 } else if (HAS_PCH_IBX(dev) || HAS_PCH_CPT(dev)) {
3273 if (dp_to_dig_port(intel_dp)->port == PORT_A) 3382 if (dp_to_dig_port(intel_dp)->port == PORT_A)
3274 port_sel = PANEL_POWER_PORT_DP_A; 3383 port_sel = PANEL_PORT_SELECT_DPA;
3275 else 3384 else
3276 port_sel = PANEL_POWER_PORT_DP_D; 3385 port_sel = PANEL_PORT_SELECT_DPD;
3277 } 3386 }
3278 3387
3279 pp_on |= port_sel; 3388 pp_on |= port_sel;
@@ -3539,12 +3648,12 @@ intel_dp_init(struct drm_device *dev, int output_reg, enum port port)
3539 intel_encoder->get_hw_state = intel_dp_get_hw_state; 3648 intel_encoder->get_hw_state = intel_dp_get_hw_state;
3540 intel_encoder->get_config = intel_dp_get_config; 3649 intel_encoder->get_config = intel_dp_get_config;
3541 if (IS_VALLEYVIEW(dev)) { 3650 if (IS_VALLEYVIEW(dev)) {
3542 intel_encoder->pre_pll_enable = intel_dp_pre_pll_enable; 3651 intel_encoder->pre_pll_enable = vlv_dp_pre_pll_enable;
3543 intel_encoder->pre_enable = vlv_pre_enable_dp; 3652 intel_encoder->pre_enable = vlv_pre_enable_dp;
3544 intel_encoder->enable = vlv_enable_dp; 3653 intel_encoder->enable = vlv_enable_dp;
3545 } else { 3654 } else {
3546 intel_encoder->pre_enable = intel_pre_enable_dp; 3655 intel_encoder->pre_enable = g4x_pre_enable_dp;
3547 intel_encoder->enable = intel_enable_dp; 3656 intel_encoder->enable = g4x_enable_dp;
3548 } 3657 }
3549 3658
3550 intel_dig_port->port = port; 3659 intel_dig_port->port = port;