aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/i915/intel_dp.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/drm/i915/intel_dp.c')
-rw-r--r--drivers/gpu/drm/i915/intel_dp.c546
1 files changed, 351 insertions, 195 deletions
diff --git a/drivers/gpu/drm/i915/intel_dp.c b/drivers/gpu/drm/i915/intel_dp.c
index 09b318b0227f..92b041b66e49 100644
--- a/drivers/gpu/drm/i915/intel_dp.c
+++ b/drivers/gpu/drm/i915/intel_dp.c
@@ -59,7 +59,6 @@ struct intel_dp {
59 struct i2c_algo_dp_aux_data algo; 59 struct i2c_algo_dp_aux_data algo;
60 bool is_pch_edp; 60 bool is_pch_edp;
61 uint8_t train_set[4]; 61 uint8_t train_set[4];
62 uint8_t link_status[DP_LINK_STATUS_SIZE];
63 int panel_power_up_delay; 62 int panel_power_up_delay;
64 int panel_power_down_delay; 63 int panel_power_down_delay;
65 int panel_power_cycle_delay; 64 int panel_power_cycle_delay;
@@ -68,7 +67,6 @@ struct intel_dp {
68 struct drm_display_mode *panel_fixed_mode; /* for eDP */ 67 struct drm_display_mode *panel_fixed_mode; /* for eDP */
69 struct delayed_work panel_vdd_work; 68 struct delayed_work panel_vdd_work;
70 bool want_panel_vdd; 69 bool want_panel_vdd;
71 unsigned long panel_off_jiffies;
72}; 70};
73 71
74/** 72/**
@@ -157,16 +155,12 @@ intel_edp_link_config(struct intel_encoder *intel_encoder,
157static int 155static int
158intel_dp_max_lane_count(struct intel_dp *intel_dp) 156intel_dp_max_lane_count(struct intel_dp *intel_dp)
159{ 157{
160 int max_lane_count = 4; 158 int max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f;
161 159 switch (max_lane_count) {
162 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) { 160 case 1: case 2: case 4:
163 max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f; 161 break;
164 switch (max_lane_count) { 162 default:
165 case 1: case 2: case 4: 163 max_lane_count = 4;
166 break;
167 default:
168 max_lane_count = 4;
169 }
170 } 164 }
171 return max_lane_count; 165 return max_lane_count;
172} 166}
@@ -214,13 +208,15 @@ intel_dp_link_clock(uint8_t link_bw)
214 */ 208 */
215 209
216static int 210static int
217intel_dp_link_required(struct intel_dp *intel_dp, int pixel_clock) 211intel_dp_link_required(struct intel_dp *intel_dp, int pixel_clock, int check_bpp)
218{ 212{
219 struct drm_crtc *crtc = intel_dp->base.base.crtc; 213 struct drm_crtc *crtc = intel_dp->base.base.crtc;
220 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 214 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
221 int bpp = 24; 215 int bpp = 24;
222 216
223 if (intel_crtc) 217 if (check_bpp)
218 bpp = check_bpp;
219 else if (intel_crtc)
224 bpp = intel_crtc->bpp; 220 bpp = intel_crtc->bpp;
225 221
226 return (pixel_clock * bpp + 9) / 10; 222 return (pixel_clock * bpp + 9) / 10;
@@ -239,6 +235,7 @@ intel_dp_mode_valid(struct drm_connector *connector,
239 struct intel_dp *intel_dp = intel_attached_dp(connector); 235 struct intel_dp *intel_dp = intel_attached_dp(connector);
240 int max_link_clock = intel_dp_link_clock(intel_dp_max_link_bw(intel_dp)); 236 int max_link_clock = intel_dp_link_clock(intel_dp_max_link_bw(intel_dp));
241 int max_lanes = intel_dp_max_lane_count(intel_dp); 237 int max_lanes = intel_dp_max_lane_count(intel_dp);
238 int max_rate, mode_rate;
242 239
243 if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) { 240 if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) {
244 if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay) 241 if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay)
@@ -248,9 +245,17 @@ intel_dp_mode_valid(struct drm_connector *connector,
248 return MODE_PANEL; 245 return MODE_PANEL;
249 } 246 }
250 247
251 if (intel_dp_link_required(intel_dp, mode->clock) 248 mode_rate = intel_dp_link_required(intel_dp, mode->clock, 0);
252 > intel_dp_max_data_rate(max_link_clock, max_lanes)) 249 max_rate = intel_dp_max_data_rate(max_link_clock, max_lanes);
253 return MODE_CLOCK_HIGH; 250
251 if (mode_rate > max_rate) {
252 mode_rate = intel_dp_link_required(intel_dp,
253 mode->clock, 18);
254 if (mode_rate > max_rate)
255 return MODE_CLOCK_HIGH;
256 else
257 mode->private_flags |= INTEL_MODE_DP_FORCE_6BPC;
258 }
254 259
255 if (mode->clock < 10000) 260 if (mode->clock < 10000)
256 return MODE_CLOCK_LOW; 261 return MODE_CLOCK_LOW;
@@ -368,8 +373,8 @@ intel_dp_aux_ch(struct intel_dp *intel_dp,
368 * clock divider. 373 * clock divider.
369 */ 374 */
370 if (is_cpu_edp(intel_dp)) { 375 if (is_cpu_edp(intel_dp)) {
371 if (IS_GEN6(dev)) 376 if (IS_GEN6(dev) || IS_GEN7(dev))
372 aux_clock_divider = 200; /* SNB eDP input clock at 400Mhz */ 377 aux_clock_divider = 200; /* SNB & IVB eDP input clock at 400Mhz */
373 else 378 else
374 aux_clock_divider = 225; /* eDP input clock at 450Mhz */ 379 aux_clock_divider = 225; /* eDP input clock at 450Mhz */
375 } else if (HAS_PCH_SPLIT(dev)) 380 } else if (HAS_PCH_SPLIT(dev))
@@ -678,6 +683,7 @@ intel_dp_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
678 int lane_count, clock; 683 int lane_count, clock;
679 int max_lane_count = intel_dp_max_lane_count(intel_dp); 684 int max_lane_count = intel_dp_max_lane_count(intel_dp);
680 int max_clock = intel_dp_max_link_bw(intel_dp) == DP_LINK_BW_2_7 ? 1 : 0; 685 int max_clock = intel_dp_max_link_bw(intel_dp) == DP_LINK_BW_2_7 ? 1 : 0;
686 int bpp = mode->private_flags & INTEL_MODE_DP_FORCE_6BPC ? 18 : 0;
681 static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 }; 687 static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 };
682 688
683 if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) { 689 if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) {
@@ -695,7 +701,7 @@ intel_dp_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
695 for (clock = 0; clock <= max_clock; clock++) { 701 for (clock = 0; clock <= max_clock; clock++) {
696 int link_avail = intel_dp_max_data_rate(intel_dp_link_clock(bws[clock]), lane_count); 702 int link_avail = intel_dp_max_data_rate(intel_dp_link_clock(bws[clock]), lane_count);
697 703
698 if (intel_dp_link_required(intel_dp, mode->clock) 704 if (intel_dp_link_required(intel_dp, mode->clock, bpp)
699 <= link_avail) { 705 <= link_avail) {
700 intel_dp->link_bw = bws[clock]; 706 intel_dp->link_bw = bws[clock];
701 intel_dp->lane_count = lane_count; 707 intel_dp->lane_count = lane_count;
@@ -768,12 +774,11 @@ intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode,
768 continue; 774 continue;
769 775
770 intel_dp = enc_to_intel_dp(encoder); 776 intel_dp = enc_to_intel_dp(encoder);
771 if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT) { 777 if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT ||
778 intel_dp->base.type == INTEL_OUTPUT_EDP)
779 {
772 lane_count = intel_dp->lane_count; 780 lane_count = intel_dp->lane_count;
773 break; 781 break;
774 } else if (is_edp(intel_dp)) {
775 lane_count = dev_priv->edp.lanes;
776 break;
777 } 782 }
778 } 783 }
779 784
@@ -810,6 +815,7 @@ intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
810 struct drm_display_mode *adjusted_mode) 815 struct drm_display_mode *adjusted_mode)
811{ 816{
812 struct drm_device *dev = encoder->dev; 817 struct drm_device *dev = encoder->dev;
818 struct drm_i915_private *dev_priv = dev->dev_private;
813 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 819 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
814 struct drm_crtc *crtc = intel_dp->base.base.crtc; 820 struct drm_crtc *crtc = intel_dp->base.base.crtc;
815 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 821 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
@@ -822,18 +828,32 @@ intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
822 ironlake_edp_pll_off(encoder); 828 ironlake_edp_pll_off(encoder);
823 } 829 }
824 830
825 intel_dp->DP = DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0; 831 /*
826 intel_dp->DP |= intel_dp->color_range; 832 * There are four kinds of DP registers:
833 *
834 * IBX PCH
835 * SNB CPU
836 * IVB CPU
837 * CPT PCH
838 *
839 * IBX PCH and CPU are the same for almost everything,
840 * except that the CPU DP PLL is configured in this
841 * register
842 *
843 * CPT PCH is quite different, having many bits moved
844 * to the TRANS_DP_CTL register instead. That
845 * configuration happens (oddly) in ironlake_pch_enable
846 */
847
848 /* Preserve the BIOS-computed detected bit. This is
849 * supposed to be read-only.
850 */
851 intel_dp->DP = I915_READ(intel_dp->output_reg) & DP_DETECTED;
852 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
827 853
828 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 854 /* Handle DP bits in common between all three register formats */
829 intel_dp->DP |= DP_SYNC_HS_HIGH;
830 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
831 intel_dp->DP |= DP_SYNC_VS_HIGH;
832 855
833 if (HAS_PCH_CPT(dev) && !is_cpu_edp(intel_dp)) 856 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
834 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
835 else
836 intel_dp->DP |= DP_LINK_TRAIN_OFF;
837 857
838 switch (intel_dp->lane_count) { 858 switch (intel_dp->lane_count) {
839 case 1: 859 case 1:
@@ -852,59 +872,124 @@ intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
852 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE; 872 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
853 intel_write_eld(encoder, adjusted_mode); 873 intel_write_eld(encoder, adjusted_mode);
854 } 874 }
855
856 memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE); 875 memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
857 intel_dp->link_configuration[0] = intel_dp->link_bw; 876 intel_dp->link_configuration[0] = intel_dp->link_bw;
858 intel_dp->link_configuration[1] = intel_dp->lane_count; 877 intel_dp->link_configuration[1] = intel_dp->lane_count;
859 intel_dp->link_configuration[8] = DP_SET_ANSI_8B10B; 878 intel_dp->link_configuration[8] = DP_SET_ANSI_8B10B;
860
861 /* 879 /*
862 * Check for DPCD version > 1.1 and enhanced framing support 880 * Check for DPCD version > 1.1 and enhanced framing support
863 */ 881 */
864 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 && 882 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
865 (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) { 883 (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) {
866 intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN; 884 intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
867 intel_dp->DP |= DP_ENHANCED_FRAMING;
868 } 885 }
869 886
870 /* CPT DP's pipe select is decided in TRANS_DP_CTL */ 887 /* Split out the IBX/CPU vs CPT settings */
871 if (intel_crtc->pipe == 1 && !HAS_PCH_CPT(dev)) 888
872 intel_dp->DP |= DP_PIPEB_SELECT; 889 if (is_cpu_edp(intel_dp) && IS_GEN7(dev)) {
890 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
891 intel_dp->DP |= DP_SYNC_HS_HIGH;
892 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
893 intel_dp->DP |= DP_SYNC_VS_HIGH;
894 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
895
896 if (intel_dp->link_configuration[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN)
897 intel_dp->DP |= DP_ENHANCED_FRAMING;
898
899 intel_dp->DP |= intel_crtc->pipe << 29;
873 900
874 if (is_cpu_edp(intel_dp)) {
875 /* don't miss out required setting for eDP */ 901 /* don't miss out required setting for eDP */
876 intel_dp->DP |= DP_PLL_ENABLE; 902 intel_dp->DP |= DP_PLL_ENABLE;
877 if (adjusted_mode->clock < 200000) 903 if (adjusted_mode->clock < 200000)
878 intel_dp->DP |= DP_PLL_FREQ_160MHZ; 904 intel_dp->DP |= DP_PLL_FREQ_160MHZ;
879 else 905 else
880 intel_dp->DP |= DP_PLL_FREQ_270MHZ; 906 intel_dp->DP |= DP_PLL_FREQ_270MHZ;
907 } else if (!HAS_PCH_CPT(dev) || is_cpu_edp(intel_dp)) {
908 intel_dp->DP |= intel_dp->color_range;
909
910 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
911 intel_dp->DP |= DP_SYNC_HS_HIGH;
912 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
913 intel_dp->DP |= DP_SYNC_VS_HIGH;
914 intel_dp->DP |= DP_LINK_TRAIN_OFF;
915
916 if (intel_dp->link_configuration[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN)
917 intel_dp->DP |= DP_ENHANCED_FRAMING;
918
919 if (intel_crtc->pipe == 1)
920 intel_dp->DP |= DP_PIPEB_SELECT;
921
922 if (is_cpu_edp(intel_dp)) {
923 /* don't miss out required setting for eDP */
924 intel_dp->DP |= DP_PLL_ENABLE;
925 if (adjusted_mode->clock < 200000)
926 intel_dp->DP |= DP_PLL_FREQ_160MHZ;
927 else
928 intel_dp->DP |= DP_PLL_FREQ_270MHZ;
929 }
930 } else {
931 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
881 } 932 }
882} 933}
883 934
884static void ironlake_wait_panel_off(struct intel_dp *intel_dp) 935#define IDLE_ON_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK)
936#define IDLE_ON_VALUE (PP_ON | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_ON_IDLE)
937
938#define IDLE_OFF_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK)
939#define IDLE_OFF_VALUE (0 | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE)
940
941#define IDLE_CYCLE_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | PP_CYCLE_DELAY_ACTIVE | PP_SEQUENCE_STATE_MASK)
942#define IDLE_CYCLE_VALUE (0 | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE)
943
944static void ironlake_wait_panel_status(struct intel_dp *intel_dp,
945 u32 mask,
946 u32 value)
885{ 947{
886 unsigned long off_time; 948 struct drm_device *dev = intel_dp->base.base.dev;
887 unsigned long delay; 949 struct drm_i915_private *dev_priv = dev->dev_private;
888 950
889 DRM_DEBUG_KMS("Wait for panel power off time\n"); 951 DRM_DEBUG_KMS("mask %08x value %08x status %08x control %08x\n",
952 mask, value,
953 I915_READ(PCH_PP_STATUS),
954 I915_READ(PCH_PP_CONTROL));
890 955
891 if (ironlake_edp_have_panel_power(intel_dp) || 956 if (_wait_for((I915_READ(PCH_PP_STATUS) & mask) == value, 5000, 10)) {
892 ironlake_edp_have_panel_vdd(intel_dp)) 957 DRM_ERROR("Panel status timeout: status %08x control %08x\n",
893 { 958 I915_READ(PCH_PP_STATUS),
894 DRM_DEBUG_KMS("Panel still on, no delay needed\n"); 959 I915_READ(PCH_PP_CONTROL));
895 return;
896 } 960 }
961}
897 962
898 off_time = intel_dp->panel_off_jiffies + msecs_to_jiffies(intel_dp->panel_power_down_delay); 963static void ironlake_wait_panel_on(struct intel_dp *intel_dp)
899 if (time_after(jiffies, off_time)) { 964{
900 DRM_DEBUG_KMS("Time already passed"); 965 DRM_DEBUG_KMS("Wait for panel power on\n");
901 return; 966 ironlake_wait_panel_status(intel_dp, IDLE_ON_MASK, IDLE_ON_VALUE);
902 } 967}
903 delay = jiffies_to_msecs(off_time - jiffies); 968
904 if (delay > intel_dp->panel_power_down_delay) 969static void ironlake_wait_panel_off(struct intel_dp *intel_dp)
905 delay = intel_dp->panel_power_down_delay; 970{
906 DRM_DEBUG_KMS("Waiting an additional %ld ms\n", delay); 971 DRM_DEBUG_KMS("Wait for panel power off time\n");
907 msleep(delay); 972 ironlake_wait_panel_status(intel_dp, IDLE_OFF_MASK, IDLE_OFF_VALUE);
973}
974
975static void ironlake_wait_panel_power_cycle(struct intel_dp *intel_dp)
976{
977 DRM_DEBUG_KMS("Wait for panel power cycle\n");
978 ironlake_wait_panel_status(intel_dp, IDLE_CYCLE_MASK, IDLE_CYCLE_VALUE);
979}
980
981
982/* Read the current pp_control value, unlocking the register if it
983 * is locked
984 */
985
986static u32 ironlake_get_pp_control(struct drm_i915_private *dev_priv)
987{
988 u32 control = I915_READ(PCH_PP_CONTROL);
989
990 control &= ~PANEL_UNLOCK_MASK;
991 control |= PANEL_UNLOCK_REGS;
992 return control;
908} 993}
909 994
910static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp) 995static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp)
@@ -921,15 +1006,16 @@ static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp)
921 "eDP VDD already requested on\n"); 1006 "eDP VDD already requested on\n");
922 1007
923 intel_dp->want_panel_vdd = true; 1008 intel_dp->want_panel_vdd = true;
1009
924 if (ironlake_edp_have_panel_vdd(intel_dp)) { 1010 if (ironlake_edp_have_panel_vdd(intel_dp)) {
925 DRM_DEBUG_KMS("eDP VDD already on\n"); 1011 DRM_DEBUG_KMS("eDP VDD already on\n");
926 return; 1012 return;
927 } 1013 }
928 1014
929 ironlake_wait_panel_off(intel_dp); 1015 if (!ironlake_edp_have_panel_power(intel_dp))
930 pp = I915_READ(PCH_PP_CONTROL); 1016 ironlake_wait_panel_power_cycle(intel_dp);
931 pp &= ~PANEL_UNLOCK_MASK; 1017
932 pp |= PANEL_UNLOCK_REGS; 1018 pp = ironlake_get_pp_control(dev_priv);
933 pp |= EDP_FORCE_VDD; 1019 pp |= EDP_FORCE_VDD;
934 I915_WRITE(PCH_PP_CONTROL, pp); 1020 I915_WRITE(PCH_PP_CONTROL, pp);
935 POSTING_READ(PCH_PP_CONTROL); 1021 POSTING_READ(PCH_PP_CONTROL);
@@ -952,9 +1038,7 @@ static void ironlake_panel_vdd_off_sync(struct intel_dp *intel_dp)
952 u32 pp; 1038 u32 pp;
953 1039
954 if (!intel_dp->want_panel_vdd && ironlake_edp_have_panel_vdd(intel_dp)) { 1040 if (!intel_dp->want_panel_vdd && ironlake_edp_have_panel_vdd(intel_dp)) {
955 pp = I915_READ(PCH_PP_CONTROL); 1041 pp = ironlake_get_pp_control(dev_priv);
956 pp &= ~PANEL_UNLOCK_MASK;
957 pp |= PANEL_UNLOCK_REGS;
958 pp &= ~EDP_FORCE_VDD; 1042 pp &= ~EDP_FORCE_VDD;
959 I915_WRITE(PCH_PP_CONTROL, pp); 1043 I915_WRITE(PCH_PP_CONTROL, pp);
960 POSTING_READ(PCH_PP_CONTROL); 1044 POSTING_READ(PCH_PP_CONTROL);
@@ -962,7 +1046,8 @@ static void ironlake_panel_vdd_off_sync(struct intel_dp *intel_dp)
962 /* Make sure sequencer is idle before allowing subsequent activity */ 1046 /* Make sure sequencer is idle before allowing subsequent activity */
963 DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n", 1047 DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n",
964 I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL)); 1048 I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL));
965 intel_dp->panel_off_jiffies = jiffies; 1049
1050 msleep(intel_dp->panel_power_down_delay);
966 } 1051 }
967} 1052}
968 1053
@@ -972,9 +1057,9 @@ static void ironlake_panel_vdd_work(struct work_struct *__work)
972 struct intel_dp, panel_vdd_work); 1057 struct intel_dp, panel_vdd_work);
973 struct drm_device *dev = intel_dp->base.base.dev; 1058 struct drm_device *dev = intel_dp->base.base.dev;
974 1059
975 mutex_lock(&dev->struct_mutex); 1060 mutex_lock(&dev->mode_config.mutex);
976 ironlake_panel_vdd_off_sync(intel_dp); 1061 ironlake_panel_vdd_off_sync(intel_dp);
977 mutex_unlock(&dev->struct_mutex); 1062 mutex_unlock(&dev->mode_config.mutex);
978} 1063}
979 1064
980static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync) 1065static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync)
@@ -984,7 +1069,7 @@ static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync)
984 1069
985 DRM_DEBUG_KMS("Turn eDP VDD off %d\n", intel_dp->want_panel_vdd); 1070 DRM_DEBUG_KMS("Turn eDP VDD off %d\n", intel_dp->want_panel_vdd);
986 WARN(!intel_dp->want_panel_vdd, "eDP VDD not forced on"); 1071 WARN(!intel_dp->want_panel_vdd, "eDP VDD not forced on");
987 1072
988 intel_dp->want_panel_vdd = false; 1073 intel_dp->want_panel_vdd = false;
989 1074
990 if (sync) { 1075 if (sync) {
@@ -1000,23 +1085,25 @@ static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync)
1000 } 1085 }
1001} 1086}
1002 1087
1003/* Returns true if the panel was already on when called */
1004static void ironlake_edp_panel_on(struct intel_dp *intel_dp) 1088static void ironlake_edp_panel_on(struct intel_dp *intel_dp)
1005{ 1089{
1006 struct drm_device *dev = intel_dp->base.base.dev; 1090 struct drm_device *dev = intel_dp->base.base.dev;
1007 struct drm_i915_private *dev_priv = dev->dev_private; 1091 struct drm_i915_private *dev_priv = dev->dev_private;
1008 u32 pp, idle_on_mask = PP_ON | PP_SEQUENCE_STATE_ON_IDLE; 1092 u32 pp;
1009 1093
1010 if (!is_edp(intel_dp)) 1094 if (!is_edp(intel_dp))
1011 return; 1095 return;
1012 if (ironlake_edp_have_panel_power(intel_dp)) 1096
1097 DRM_DEBUG_KMS("Turn eDP power on\n");
1098
1099 if (ironlake_edp_have_panel_power(intel_dp)) {
1100 DRM_DEBUG_KMS("eDP power already on\n");
1013 return; 1101 return;
1102 }
1014 1103
1015 ironlake_wait_panel_off(intel_dp); 1104 ironlake_wait_panel_power_cycle(intel_dp);
1016 pp = I915_READ(PCH_PP_CONTROL);
1017 pp &= ~PANEL_UNLOCK_MASK;
1018 pp |= PANEL_UNLOCK_REGS;
1019 1105
1106 pp = ironlake_get_pp_control(dev_priv);
1020 if (IS_GEN5(dev)) { 1107 if (IS_GEN5(dev)) {
1021 /* ILK workaround: disable reset around power sequence */ 1108 /* ILK workaround: disable reset around power sequence */
1022 pp &= ~PANEL_POWER_RESET; 1109 pp &= ~PANEL_POWER_RESET;
@@ -1025,13 +1112,13 @@ static void ironlake_edp_panel_on(struct intel_dp *intel_dp)
1025 } 1112 }
1026 1113
1027 pp |= POWER_TARGET_ON; 1114 pp |= POWER_TARGET_ON;
1115 if (!IS_GEN5(dev))
1116 pp |= PANEL_POWER_RESET;
1117
1028 I915_WRITE(PCH_PP_CONTROL, pp); 1118 I915_WRITE(PCH_PP_CONTROL, pp);
1029 POSTING_READ(PCH_PP_CONTROL); 1119 POSTING_READ(PCH_PP_CONTROL);
1030 1120
1031 if (wait_for((I915_READ(PCH_PP_STATUS) & idle_on_mask) == idle_on_mask, 1121 ironlake_wait_panel_on(intel_dp);
1032 5000))
1033 DRM_ERROR("panel on wait timed out: 0x%08x\n",
1034 I915_READ(PCH_PP_STATUS));
1035 1122
1036 if (IS_GEN5(dev)) { 1123 if (IS_GEN5(dev)) {
1037 pp |= PANEL_POWER_RESET; /* restore panel reset bit */ 1124 pp |= PANEL_POWER_RESET; /* restore panel reset bit */
@@ -1040,46 +1127,25 @@ static void ironlake_edp_panel_on(struct intel_dp *intel_dp)
1040 } 1127 }
1041} 1128}
1042 1129
1043static void ironlake_edp_panel_off(struct drm_encoder *encoder) 1130static void ironlake_edp_panel_off(struct intel_dp *intel_dp)
1044{ 1131{
1045 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1132 struct drm_device *dev = intel_dp->base.base.dev;
1046 struct drm_device *dev = encoder->dev;
1047 struct drm_i915_private *dev_priv = dev->dev_private; 1133 struct drm_i915_private *dev_priv = dev->dev_private;
1048 u32 pp, idle_off_mask = PP_ON | PP_SEQUENCE_MASK | 1134 u32 pp;
1049 PP_CYCLE_DELAY_ACTIVE | PP_SEQUENCE_STATE_MASK;
1050 1135
1051 if (!is_edp(intel_dp)) 1136 if (!is_edp(intel_dp))
1052 return; 1137 return;
1053 pp = I915_READ(PCH_PP_CONTROL);
1054 pp &= ~PANEL_UNLOCK_MASK;
1055 pp |= PANEL_UNLOCK_REGS;
1056 1138
1057 if (IS_GEN5(dev)) { 1139 DRM_DEBUG_KMS("Turn eDP power off\n");
1058 /* ILK workaround: disable reset around power sequence */
1059 pp &= ~PANEL_POWER_RESET;
1060 I915_WRITE(PCH_PP_CONTROL, pp);
1061 POSTING_READ(PCH_PP_CONTROL);
1062 }
1063 1140
1064 intel_dp->panel_off_jiffies = jiffies; 1141 WARN(intel_dp->want_panel_vdd, "Cannot turn power off while VDD is on\n");
1065 1142
1066 if (IS_GEN5(dev)) { 1143 pp = ironlake_get_pp_control(dev_priv);
1067 pp &= ~POWER_TARGET_ON; 1144 pp &= ~(POWER_TARGET_ON | EDP_FORCE_VDD | PANEL_POWER_RESET | EDP_BLC_ENABLE);
1068 I915_WRITE(PCH_PP_CONTROL, pp); 1145 I915_WRITE(PCH_PP_CONTROL, pp);
1069 POSTING_READ(PCH_PP_CONTROL); 1146 POSTING_READ(PCH_PP_CONTROL);
1070 pp &= ~POWER_TARGET_ON;
1071 I915_WRITE(PCH_PP_CONTROL, pp);
1072 POSTING_READ(PCH_PP_CONTROL);
1073 msleep(intel_dp->panel_power_cycle_delay);
1074
1075 if (wait_for((I915_READ(PCH_PP_STATUS) & idle_off_mask) == 0, 5000))
1076 DRM_ERROR("panel off wait timed out: 0x%08x\n",
1077 I915_READ(PCH_PP_STATUS));
1078 1147
1079 pp |= PANEL_POWER_RESET; /* restore panel reset bit */ 1148 ironlake_wait_panel_off(intel_dp);
1080 I915_WRITE(PCH_PP_CONTROL, pp);
1081 POSTING_READ(PCH_PP_CONTROL);
1082 }
1083} 1149}
1084 1150
1085static void ironlake_edp_backlight_on(struct intel_dp *intel_dp) 1151static void ironlake_edp_backlight_on(struct intel_dp *intel_dp)
@@ -1099,9 +1165,7 @@ static void ironlake_edp_backlight_on(struct intel_dp *intel_dp)
1099 * allowing it to appear. 1165 * allowing it to appear.
1100 */ 1166 */
1101 msleep(intel_dp->backlight_on_delay); 1167 msleep(intel_dp->backlight_on_delay);
1102 pp = I915_READ(PCH_PP_CONTROL); 1168 pp = ironlake_get_pp_control(dev_priv);
1103 pp &= ~PANEL_UNLOCK_MASK;
1104 pp |= PANEL_UNLOCK_REGS;
1105 pp |= EDP_BLC_ENABLE; 1169 pp |= EDP_BLC_ENABLE;
1106 I915_WRITE(PCH_PP_CONTROL, pp); 1170 I915_WRITE(PCH_PP_CONTROL, pp);
1107 POSTING_READ(PCH_PP_CONTROL); 1171 POSTING_READ(PCH_PP_CONTROL);
@@ -1117,9 +1181,7 @@ static void ironlake_edp_backlight_off(struct intel_dp *intel_dp)
1117 return; 1181 return;
1118 1182
1119 DRM_DEBUG_KMS("\n"); 1183 DRM_DEBUG_KMS("\n");
1120 pp = I915_READ(PCH_PP_CONTROL); 1184 pp = ironlake_get_pp_control(dev_priv);
1121 pp &= ~PANEL_UNLOCK_MASK;
1122 pp |= PANEL_UNLOCK_REGS;
1123 pp &= ~EDP_BLC_ENABLE; 1185 pp &= ~EDP_BLC_ENABLE;
1124 I915_WRITE(PCH_PP_CONTROL, pp); 1186 I915_WRITE(PCH_PP_CONTROL, pp);
1125 POSTING_READ(PCH_PP_CONTROL); 1187 POSTING_READ(PCH_PP_CONTROL);
@@ -1187,17 +1249,18 @@ static void intel_dp_prepare(struct drm_encoder *encoder)
1187{ 1249{
1188 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1250 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1189 1251
1252 ironlake_edp_backlight_off(intel_dp);
1253 ironlake_edp_panel_off(intel_dp);
1254
1190 /* Wake up the sink first */ 1255 /* Wake up the sink first */
1191 ironlake_edp_panel_vdd_on(intel_dp); 1256 ironlake_edp_panel_vdd_on(intel_dp);
1192 intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON); 1257 intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON);
1258 intel_dp_link_down(intel_dp);
1193 ironlake_edp_panel_vdd_off(intel_dp, false); 1259 ironlake_edp_panel_vdd_off(intel_dp, false);
1194 1260
1195 /* Make sure the panel is off before trying to 1261 /* Make sure the panel is off before trying to
1196 * change the mode 1262 * change the mode
1197 */ 1263 */
1198 ironlake_edp_backlight_off(intel_dp);
1199 intel_dp_link_down(intel_dp);
1200 ironlake_edp_panel_off(encoder);
1201} 1264}
1202 1265
1203static void intel_dp_commit(struct drm_encoder *encoder) 1266static void intel_dp_commit(struct drm_encoder *encoder)
@@ -1211,7 +1274,6 @@ static void intel_dp_commit(struct drm_encoder *encoder)
1211 intel_dp_start_link_train(intel_dp); 1274 intel_dp_start_link_train(intel_dp);
1212 ironlake_edp_panel_on(intel_dp); 1275 ironlake_edp_panel_on(intel_dp);
1213 ironlake_edp_panel_vdd_off(intel_dp, true); 1276 ironlake_edp_panel_vdd_off(intel_dp, true);
1214
1215 intel_dp_complete_link_train(intel_dp); 1277 intel_dp_complete_link_train(intel_dp);
1216 ironlake_edp_backlight_on(intel_dp); 1278 ironlake_edp_backlight_on(intel_dp);
1217 1279
@@ -1230,16 +1292,20 @@ intel_dp_dpms(struct drm_encoder *encoder, int mode)
1230 uint32_t dp_reg = I915_READ(intel_dp->output_reg); 1292 uint32_t dp_reg = I915_READ(intel_dp->output_reg);
1231 1293
1232 if (mode != DRM_MODE_DPMS_ON) { 1294 if (mode != DRM_MODE_DPMS_ON) {
1295 ironlake_edp_backlight_off(intel_dp);
1296 ironlake_edp_panel_off(intel_dp);
1297
1233 ironlake_edp_panel_vdd_on(intel_dp); 1298 ironlake_edp_panel_vdd_on(intel_dp);
1234 if (is_edp(intel_dp))
1235 ironlake_edp_backlight_off(intel_dp);
1236 intel_dp_sink_dpms(intel_dp, mode); 1299 intel_dp_sink_dpms(intel_dp, mode);
1237 intel_dp_link_down(intel_dp); 1300 intel_dp_link_down(intel_dp);
1238 ironlake_edp_panel_off(encoder);
1239 if (is_edp(intel_dp) && !is_pch_edp(intel_dp))
1240 ironlake_edp_pll_off(encoder);
1241 ironlake_edp_panel_vdd_off(intel_dp, false); 1301 ironlake_edp_panel_vdd_off(intel_dp, false);
1302
1303 if (is_cpu_edp(intel_dp))
1304 ironlake_edp_pll_off(encoder);
1242 } else { 1305 } else {
1306 if (is_cpu_edp(intel_dp))
1307 ironlake_edp_pll_on(encoder);
1308
1243 ironlake_edp_panel_vdd_on(intel_dp); 1309 ironlake_edp_panel_vdd_on(intel_dp);
1244 intel_dp_sink_dpms(intel_dp, mode); 1310 intel_dp_sink_dpms(intel_dp, mode);
1245 if (!(dp_reg & DP_PORT_EN)) { 1311 if (!(dp_reg & DP_PORT_EN)) {
@@ -1247,7 +1313,6 @@ intel_dp_dpms(struct drm_encoder *encoder, int mode)
1247 ironlake_edp_panel_on(intel_dp); 1313 ironlake_edp_panel_on(intel_dp);
1248 ironlake_edp_panel_vdd_off(intel_dp, true); 1314 ironlake_edp_panel_vdd_off(intel_dp, true);
1249 intel_dp_complete_link_train(intel_dp); 1315 intel_dp_complete_link_train(intel_dp);
1250 ironlake_edp_backlight_on(intel_dp);
1251 } else 1316 } else
1252 ironlake_edp_panel_vdd_off(intel_dp, false); 1317 ironlake_edp_panel_vdd_off(intel_dp, false);
1253 ironlake_edp_backlight_on(intel_dp); 1318 ironlake_edp_backlight_on(intel_dp);
@@ -1285,11 +1350,11 @@ intel_dp_aux_native_read_retry(struct intel_dp *intel_dp, uint16_t address,
1285 * link status information 1350 * link status information
1286 */ 1351 */
1287static bool 1352static bool
1288intel_dp_get_link_status(struct intel_dp *intel_dp) 1353intel_dp_get_link_status(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE])
1289{ 1354{
1290 return intel_dp_aux_native_read_retry(intel_dp, 1355 return intel_dp_aux_native_read_retry(intel_dp,
1291 DP_LANE0_1_STATUS, 1356 DP_LANE0_1_STATUS,
1292 intel_dp->link_status, 1357 link_status,
1293 DP_LINK_STATUS_SIZE); 1358 DP_LINK_STATUS_SIZE);
1294} 1359}
1295 1360
@@ -1301,27 +1366,25 @@ intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1301} 1366}
1302 1367
1303static uint8_t 1368static uint8_t
1304intel_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE], 1369intel_get_adjust_request_voltage(uint8_t adjust_request[2],
1305 int lane) 1370 int lane)
1306{ 1371{
1307 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
1308 int s = ((lane & 1) ? 1372 int s = ((lane & 1) ?
1309 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT : 1373 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
1310 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT); 1374 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
1311 uint8_t l = intel_dp_link_status(link_status, i); 1375 uint8_t l = adjust_request[lane>>1];
1312 1376
1313 return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT; 1377 return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
1314} 1378}
1315 1379
1316static uint8_t 1380static uint8_t
1317intel_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE], 1381intel_get_adjust_request_pre_emphasis(uint8_t adjust_request[2],
1318 int lane) 1382 int lane)
1319{ 1383{
1320 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
1321 int s = ((lane & 1) ? 1384 int s = ((lane & 1) ?
1322 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT : 1385 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
1323 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT); 1386 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
1324 uint8_t l = intel_dp_link_status(link_status, i); 1387 uint8_t l = adjust_request[lane>>1];
1325 1388
1326 return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT; 1389 return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
1327} 1390}
@@ -1343,34 +1406,63 @@ static char *link_train_names[] = {
1343 * These are source-specific values; current Intel hardware supports 1406 * These are source-specific values; current Intel hardware supports
1344 * a maximum voltage of 800mV and a maximum pre-emphasis of 6dB 1407 * a maximum voltage of 800mV and a maximum pre-emphasis of 6dB
1345 */ 1408 */
1346#define I830_DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_800
1347 1409
1348static uint8_t 1410static uint8_t
1349intel_dp_pre_emphasis_max(uint8_t voltage_swing) 1411intel_dp_voltage_max(struct intel_dp *intel_dp)
1350{ 1412{
1351 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) { 1413 struct drm_device *dev = intel_dp->base.base.dev;
1352 case DP_TRAIN_VOLTAGE_SWING_400: 1414
1353 return DP_TRAIN_PRE_EMPHASIS_6; 1415 if (IS_GEN7(dev) && is_cpu_edp(intel_dp))
1354 case DP_TRAIN_VOLTAGE_SWING_600: 1416 return DP_TRAIN_VOLTAGE_SWING_800;
1355 return DP_TRAIN_PRE_EMPHASIS_6; 1417 else if (HAS_PCH_CPT(dev) && !is_cpu_edp(intel_dp))
1356 case DP_TRAIN_VOLTAGE_SWING_800: 1418 return DP_TRAIN_VOLTAGE_SWING_1200;
1357 return DP_TRAIN_PRE_EMPHASIS_3_5; 1419 else
1358 case DP_TRAIN_VOLTAGE_SWING_1200: 1420 return DP_TRAIN_VOLTAGE_SWING_800;
1359 default: 1421}
1360 return DP_TRAIN_PRE_EMPHASIS_0; 1422
1423static uint8_t
1424intel_dp_pre_emphasis_max(struct intel_dp *intel_dp, uint8_t voltage_swing)
1425{
1426 struct drm_device *dev = intel_dp->base.base.dev;
1427
1428 if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) {
1429 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
1430 case DP_TRAIN_VOLTAGE_SWING_400:
1431 return DP_TRAIN_PRE_EMPHASIS_6;
1432 case DP_TRAIN_VOLTAGE_SWING_600:
1433 case DP_TRAIN_VOLTAGE_SWING_800:
1434 return DP_TRAIN_PRE_EMPHASIS_3_5;
1435 default:
1436 return DP_TRAIN_PRE_EMPHASIS_0;
1437 }
1438 } else {
1439 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
1440 case DP_TRAIN_VOLTAGE_SWING_400:
1441 return DP_TRAIN_PRE_EMPHASIS_6;
1442 case DP_TRAIN_VOLTAGE_SWING_600:
1443 return DP_TRAIN_PRE_EMPHASIS_6;
1444 case DP_TRAIN_VOLTAGE_SWING_800:
1445 return DP_TRAIN_PRE_EMPHASIS_3_5;
1446 case DP_TRAIN_VOLTAGE_SWING_1200:
1447 default:
1448 return DP_TRAIN_PRE_EMPHASIS_0;
1449 }
1361 } 1450 }
1362} 1451}
1363 1452
1364static void 1453static void
1365intel_get_adjust_train(struct intel_dp *intel_dp) 1454intel_get_adjust_train(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE])
1366{ 1455{
1367 uint8_t v = 0; 1456 uint8_t v = 0;
1368 uint8_t p = 0; 1457 uint8_t p = 0;
1369 int lane; 1458 int lane;
1459 uint8_t *adjust_request = link_status + (DP_ADJUST_REQUEST_LANE0_1 - DP_LANE0_1_STATUS);
1460 uint8_t voltage_max;
1461 uint8_t preemph_max;
1370 1462
1371 for (lane = 0; lane < intel_dp->lane_count; lane++) { 1463 for (lane = 0; lane < intel_dp->lane_count; lane++) {
1372 uint8_t this_v = intel_get_adjust_request_voltage(intel_dp->link_status, lane); 1464 uint8_t this_v = intel_get_adjust_request_voltage(adjust_request, lane);
1373 uint8_t this_p = intel_get_adjust_request_pre_emphasis(intel_dp->link_status, lane); 1465 uint8_t this_p = intel_get_adjust_request_pre_emphasis(adjust_request, lane);
1374 1466
1375 if (this_v > v) 1467 if (this_v > v)
1376 v = this_v; 1468 v = this_v;
@@ -1378,18 +1470,20 @@ intel_get_adjust_train(struct intel_dp *intel_dp)
1378 p = this_p; 1470 p = this_p;
1379 } 1471 }
1380 1472
1381 if (v >= I830_DP_VOLTAGE_MAX) 1473 voltage_max = intel_dp_voltage_max(intel_dp);
1382 v = I830_DP_VOLTAGE_MAX | DP_TRAIN_MAX_SWING_REACHED; 1474 if (v >= voltage_max)
1475 v = voltage_max | DP_TRAIN_MAX_SWING_REACHED;
1383 1476
1384 if (p >= intel_dp_pre_emphasis_max(v)) 1477 preemph_max = intel_dp_pre_emphasis_max(intel_dp, v);
1385 p = intel_dp_pre_emphasis_max(v) | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED; 1478 if (p >= preemph_max)
1479 p = preemph_max | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
1386 1480
1387 for (lane = 0; lane < 4; lane++) 1481 for (lane = 0; lane < 4; lane++)
1388 intel_dp->train_set[lane] = v | p; 1482 intel_dp->train_set[lane] = v | p;
1389} 1483}
1390 1484
1391static uint32_t 1485static uint32_t
1392intel_dp_signal_levels(uint8_t train_set, int lane_count) 1486intel_dp_signal_levels(uint8_t train_set)
1393{ 1487{
1394 uint32_t signal_levels = 0; 1488 uint32_t signal_levels = 0;
1395 1489
@@ -1454,13 +1548,43 @@ intel_gen6_edp_signal_levels(uint8_t train_set)
1454 } 1548 }
1455} 1549}
1456 1550
1551/* Gen7's DP voltage swing and pre-emphasis control */
1552static uint32_t
1553intel_gen7_edp_signal_levels(uint8_t train_set)
1554{
1555 int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
1556 DP_TRAIN_PRE_EMPHASIS_MASK);
1557 switch (signal_levels) {
1558 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0:
1559 return EDP_LINK_TRAIN_400MV_0DB_IVB;
1560 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_3_5:
1561 return EDP_LINK_TRAIN_400MV_3_5DB_IVB;
1562 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6:
1563 return EDP_LINK_TRAIN_400MV_6DB_IVB;
1564
1565 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_0:
1566 return EDP_LINK_TRAIN_600MV_0DB_IVB;
1567 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5:
1568 return EDP_LINK_TRAIN_600MV_3_5DB_IVB;
1569
1570 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0:
1571 return EDP_LINK_TRAIN_800MV_0DB_IVB;
1572 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_3_5:
1573 return EDP_LINK_TRAIN_800MV_3_5DB_IVB;
1574
1575 default:
1576 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
1577 "0x%x\n", signal_levels);
1578 return EDP_LINK_TRAIN_500MV_0DB_IVB;
1579 }
1580}
1581
1457static uint8_t 1582static uint8_t
1458intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE], 1583intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1459 int lane) 1584 int lane)
1460{ 1585{
1461 int i = DP_LANE0_1_STATUS + (lane >> 1);
1462 int s = (lane & 1) * 4; 1586 int s = (lane & 1) * 4;
1463 uint8_t l = intel_dp_link_status(link_status, i); 1587 uint8_t l = link_status[lane>>1];
1464 1588
1465 return (l >> s) & 0xf; 1589 return (l >> s) & 0xf;
1466} 1590}
@@ -1485,18 +1609,18 @@ intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count
1485 DP_LANE_CHANNEL_EQ_DONE|\ 1609 DP_LANE_CHANNEL_EQ_DONE|\
1486 DP_LANE_SYMBOL_LOCKED) 1610 DP_LANE_SYMBOL_LOCKED)
1487static bool 1611static bool
1488intel_channel_eq_ok(struct intel_dp *intel_dp) 1612intel_channel_eq_ok(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE])
1489{ 1613{
1490 uint8_t lane_align; 1614 uint8_t lane_align;
1491 uint8_t lane_status; 1615 uint8_t lane_status;
1492 int lane; 1616 int lane;
1493 1617
1494 lane_align = intel_dp_link_status(intel_dp->link_status, 1618 lane_align = intel_dp_link_status(link_status,
1495 DP_LANE_ALIGN_STATUS_UPDATED); 1619 DP_LANE_ALIGN_STATUS_UPDATED);
1496 if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0) 1620 if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
1497 return false; 1621 return false;
1498 for (lane = 0; lane < intel_dp->lane_count; lane++) { 1622 for (lane = 0; lane < intel_dp->lane_count; lane++) {
1499 lane_status = intel_get_lane_status(intel_dp->link_status, lane); 1623 lane_status = intel_get_lane_status(link_status, lane);
1500 if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS) 1624 if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS)
1501 return false; 1625 return false;
1502 } 1626 }
@@ -1521,8 +1645,9 @@ intel_dp_set_link_train(struct intel_dp *intel_dp,
1521 1645
1522 ret = intel_dp_aux_native_write(intel_dp, 1646 ret = intel_dp_aux_native_write(intel_dp,
1523 DP_TRAINING_LANE0_SET, 1647 DP_TRAINING_LANE0_SET,
1524 intel_dp->train_set, 4); 1648 intel_dp->train_set,
1525 if (ret != 4) 1649 intel_dp->lane_count);
1650 if (ret != intel_dp->lane_count)
1526 return false; 1651 return false;
1527 1652
1528 return true; 1653 return true;
@@ -1538,7 +1663,7 @@ intel_dp_start_link_train(struct intel_dp *intel_dp)
1538 int i; 1663 int i;
1539 uint8_t voltage; 1664 uint8_t voltage;
1540 bool clock_recovery = false; 1665 bool clock_recovery = false;
1541 int tries; 1666 int voltage_tries, loop_tries;
1542 u32 reg; 1667 u32 reg;
1543 uint32_t DP = intel_dp->DP; 1668 uint32_t DP = intel_dp->DP;
1544 1669
@@ -1559,26 +1684,35 @@ intel_dp_start_link_train(struct intel_dp *intel_dp)
1559 DP_LINK_CONFIGURATION_SIZE); 1684 DP_LINK_CONFIGURATION_SIZE);
1560 1685
1561 DP |= DP_PORT_EN; 1686 DP |= DP_PORT_EN;
1562 if (HAS_PCH_CPT(dev) && !is_cpu_edp(intel_dp)) 1687
1688 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
1563 DP &= ~DP_LINK_TRAIN_MASK_CPT; 1689 DP &= ~DP_LINK_TRAIN_MASK_CPT;
1564 else 1690 else
1565 DP &= ~DP_LINK_TRAIN_MASK; 1691 DP &= ~DP_LINK_TRAIN_MASK;
1566 memset(intel_dp->train_set, 0, 4); 1692 memset(intel_dp->train_set, 0, 4);
1567 voltage = 0xff; 1693 voltage = 0xff;
1568 tries = 0; 1694 voltage_tries = 0;
1695 loop_tries = 0;
1569 clock_recovery = false; 1696 clock_recovery = false;
1570 for (;;) { 1697 for (;;) {
1571 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */ 1698 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1699 uint8_t link_status[DP_LINK_STATUS_SIZE];
1572 uint32_t signal_levels; 1700 uint32_t signal_levels;
1573 if (IS_GEN6(dev) && is_edp(intel_dp)) { 1701
1702
1703 if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) {
1704 signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]);
1705 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels;
1706 } else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) {
1574 signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]); 1707 signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]);
1575 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels; 1708 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels;
1576 } else { 1709 } else {
1577 signal_levels = intel_dp_signal_levels(intel_dp->train_set[0], intel_dp->lane_count); 1710 signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]);
1711 DRM_DEBUG_KMS("training pattern 1 signal levels %08x\n", signal_levels);
1578 DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels; 1712 DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels;
1579 } 1713 }
1580 1714
1581 if (HAS_PCH_CPT(dev) && !is_cpu_edp(intel_dp)) 1715 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
1582 reg = DP | DP_LINK_TRAIN_PAT_1_CPT; 1716 reg = DP | DP_LINK_TRAIN_PAT_1_CPT;
1583 else 1717 else
1584 reg = DP | DP_LINK_TRAIN_PAT_1; 1718 reg = DP | DP_LINK_TRAIN_PAT_1;
@@ -1590,10 +1724,13 @@ intel_dp_start_link_train(struct intel_dp *intel_dp)
1590 /* Set training pattern 1 */ 1724 /* Set training pattern 1 */
1591 1725
1592 udelay(100); 1726 udelay(100);
1593 if (!intel_dp_get_link_status(intel_dp)) 1727 if (!intel_dp_get_link_status(intel_dp, link_status)) {
1728 DRM_ERROR("failed to get link status\n");
1594 break; 1729 break;
1730 }
1595 1731
1596 if (intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) { 1732 if (intel_clock_recovery_ok(link_status, intel_dp->lane_count)) {
1733 DRM_DEBUG_KMS("clock recovery OK\n");
1597 clock_recovery = true; 1734 clock_recovery = true;
1598 break; 1735 break;
1599 } 1736 }
@@ -1602,20 +1739,30 @@ intel_dp_start_link_train(struct intel_dp *intel_dp)
1602 for (i = 0; i < intel_dp->lane_count; i++) 1739 for (i = 0; i < intel_dp->lane_count; i++)
1603 if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0) 1740 if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
1604 break; 1741 break;
1605 if (i == intel_dp->lane_count) 1742 if (i == intel_dp->lane_count) {
1606 break; 1743 ++loop_tries;
1744 if (loop_tries == 5) {
1745 DRM_DEBUG_KMS("too many full retries, give up\n");
1746 break;
1747 }
1748 memset(intel_dp->train_set, 0, 4);
1749 voltage_tries = 0;
1750 continue;
1751 }
1607 1752
1608 /* Check to see if we've tried the same voltage 5 times */ 1753 /* Check to see if we've tried the same voltage 5 times */
1609 if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) { 1754 if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
1610 ++tries; 1755 ++voltage_tries;
1611 if (tries == 5) 1756 if (voltage_tries == 5) {
1757 DRM_DEBUG_KMS("too many voltage retries, give up\n");
1612 break; 1758 break;
1759 }
1613 } else 1760 } else
1614 tries = 0; 1761 voltage_tries = 0;
1615 voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK; 1762 voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
1616 1763
1617 /* Compute new intel_dp->train_set as requested by target */ 1764 /* Compute new intel_dp->train_set as requested by target */
1618 intel_get_adjust_train(intel_dp); 1765 intel_get_adjust_train(intel_dp, link_status);
1619 } 1766 }
1620 1767
1621 intel_dp->DP = DP; 1768 intel_dp->DP = DP;
@@ -1638,6 +1785,7 @@ intel_dp_complete_link_train(struct intel_dp *intel_dp)
1638 for (;;) { 1785 for (;;) {
1639 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */ 1786 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1640 uint32_t signal_levels; 1787 uint32_t signal_levels;
1788 uint8_t link_status[DP_LINK_STATUS_SIZE];
1641 1789
1642 if (cr_tries > 5) { 1790 if (cr_tries > 5) {
1643 DRM_ERROR("failed to train DP, aborting\n"); 1791 DRM_ERROR("failed to train DP, aborting\n");
@@ -1645,15 +1793,18 @@ intel_dp_complete_link_train(struct intel_dp *intel_dp)
1645 break; 1793 break;
1646 } 1794 }
1647 1795
1648 if (IS_GEN6(dev) && is_edp(intel_dp)) { 1796 if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) {
1797 signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]);
1798 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels;
1799 } else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) {
1649 signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]); 1800 signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]);
1650 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels; 1801 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels;
1651 } else { 1802 } else {
1652 signal_levels = intel_dp_signal_levels(intel_dp->train_set[0], intel_dp->lane_count); 1803 signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]);
1653 DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels; 1804 DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels;
1654 } 1805 }
1655 1806
1656 if (HAS_PCH_CPT(dev) && !is_cpu_edp(intel_dp)) 1807 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
1657 reg = DP | DP_LINK_TRAIN_PAT_2_CPT; 1808 reg = DP | DP_LINK_TRAIN_PAT_2_CPT;
1658 else 1809 else
1659 reg = DP | DP_LINK_TRAIN_PAT_2; 1810 reg = DP | DP_LINK_TRAIN_PAT_2;
@@ -1665,17 +1816,17 @@ intel_dp_complete_link_train(struct intel_dp *intel_dp)
1665 break; 1816 break;
1666 1817
1667 udelay(400); 1818 udelay(400);
1668 if (!intel_dp_get_link_status(intel_dp)) 1819 if (!intel_dp_get_link_status(intel_dp, link_status))
1669 break; 1820 break;
1670 1821
1671 /* Make sure clock is still ok */ 1822 /* Make sure clock is still ok */
1672 if (!intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) { 1823 if (!intel_clock_recovery_ok(link_status, intel_dp->lane_count)) {
1673 intel_dp_start_link_train(intel_dp); 1824 intel_dp_start_link_train(intel_dp);
1674 cr_tries++; 1825 cr_tries++;
1675 continue; 1826 continue;
1676 } 1827 }
1677 1828
1678 if (intel_channel_eq_ok(intel_dp)) { 1829 if (intel_channel_eq_ok(intel_dp, link_status)) {
1679 channel_eq = true; 1830 channel_eq = true;
1680 break; 1831 break;
1681 } 1832 }
@@ -1690,11 +1841,11 @@ intel_dp_complete_link_train(struct intel_dp *intel_dp)
1690 } 1841 }
1691 1842
1692 /* Compute new intel_dp->train_set as requested by target */ 1843 /* Compute new intel_dp->train_set as requested by target */
1693 intel_get_adjust_train(intel_dp); 1844 intel_get_adjust_train(intel_dp, link_status);
1694 ++tries; 1845 ++tries;
1695 } 1846 }
1696 1847
1697 if (HAS_PCH_CPT(dev) && !is_cpu_edp(intel_dp)) 1848 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
1698 reg = DP | DP_LINK_TRAIN_OFF_CPT; 1849 reg = DP | DP_LINK_TRAIN_OFF_CPT;
1699 else 1850 else
1700 reg = DP | DP_LINK_TRAIN_OFF; 1851 reg = DP | DP_LINK_TRAIN_OFF;
@@ -1724,7 +1875,7 @@ intel_dp_link_down(struct intel_dp *intel_dp)
1724 udelay(100); 1875 udelay(100);
1725 } 1876 }
1726 1877
1727 if (HAS_PCH_CPT(dev) && !is_cpu_edp(intel_dp)) { 1878 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) {
1728 DP &= ~DP_LINK_TRAIN_MASK_CPT; 1879 DP &= ~DP_LINK_TRAIN_MASK_CPT;
1729 I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE_CPT); 1880 I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE_CPT);
1730 } else { 1881 } else {
@@ -1735,8 +1886,12 @@ intel_dp_link_down(struct intel_dp *intel_dp)
1735 1886
1736 msleep(17); 1887 msleep(17);
1737 1888
1738 if (is_edp(intel_dp)) 1889 if (is_edp(intel_dp)) {
1739 DP |= DP_LINK_TRAIN_OFF; 1890 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
1891 DP |= DP_LINK_TRAIN_OFF_CPT;
1892 else
1893 DP |= DP_LINK_TRAIN_OFF;
1894 }
1740 1895
1741 if (!HAS_PCH_CPT(dev) && 1896 if (!HAS_PCH_CPT(dev) &&
1742 I915_READ(intel_dp->output_reg) & DP_PIPEB_SELECT) { 1897 I915_READ(intel_dp->output_reg) & DP_PIPEB_SELECT) {
@@ -1822,6 +1977,7 @@ static void
1822intel_dp_check_link_status(struct intel_dp *intel_dp) 1977intel_dp_check_link_status(struct intel_dp *intel_dp)
1823{ 1978{
1824 u8 sink_irq_vector; 1979 u8 sink_irq_vector;
1980 u8 link_status[DP_LINK_STATUS_SIZE];
1825 1981
1826 if (intel_dp->dpms_mode != DRM_MODE_DPMS_ON) 1982 if (intel_dp->dpms_mode != DRM_MODE_DPMS_ON)
1827 return; 1983 return;
@@ -1830,7 +1986,7 @@ intel_dp_check_link_status(struct intel_dp *intel_dp)
1830 return; 1986 return;
1831 1987
1832 /* Try to read receiver status if the link appears to be up */ 1988 /* Try to read receiver status if the link appears to be up */
1833 if (!intel_dp_get_link_status(intel_dp)) { 1989 if (!intel_dp_get_link_status(intel_dp, link_status)) {
1834 intel_dp_link_down(intel_dp); 1990 intel_dp_link_down(intel_dp);
1835 return; 1991 return;
1836 } 1992 }
@@ -1855,7 +2011,7 @@ intel_dp_check_link_status(struct intel_dp *intel_dp)
1855 DRM_DEBUG_DRIVER("CP or sink specific irq unhandled\n"); 2011 DRM_DEBUG_DRIVER("CP or sink specific irq unhandled\n");
1856 } 2012 }
1857 2013
1858 if (!intel_channel_eq_ok(intel_dp)) { 2014 if (!intel_channel_eq_ok(intel_dp, link_status)) {
1859 DRM_DEBUG_KMS("%s: channel EQ not ok, retraining\n", 2015 DRM_DEBUG_KMS("%s: channel EQ not ok, retraining\n",
1860 drm_get_encoder_name(&intel_dp->base.base)); 2016 drm_get_encoder_name(&intel_dp->base.base));
1861 intel_dp_start_link_train(intel_dp); 2017 intel_dp_start_link_train(intel_dp);
@@ -2179,7 +2335,8 @@ intel_trans_dp_port_sel(struct drm_crtc *crtc)
2179 continue; 2335 continue;
2180 2336
2181 intel_dp = enc_to_intel_dp(encoder); 2337 intel_dp = enc_to_intel_dp(encoder);
2182 if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT) 2338 if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT ||
2339 intel_dp->base.type == INTEL_OUTPUT_EDP)
2183 return intel_dp->output_reg; 2340 return intel_dp->output_reg;
2184 } 2341 }
2185 2342
@@ -2321,7 +2478,7 @@ intel_dp_init(struct drm_device *dev, int output_reg)
2321 2478
2322 cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >> 2479 cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >>
2323 PANEL_LIGHT_ON_DELAY_SHIFT; 2480 PANEL_LIGHT_ON_DELAY_SHIFT;
2324 2481
2325 cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >> 2482 cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >>
2326 PANEL_LIGHT_OFF_DELAY_SHIFT; 2483 PANEL_LIGHT_OFF_DELAY_SHIFT;
2327 2484
@@ -2354,11 +2511,10 @@ intel_dp_init(struct drm_device *dev, int output_reg)
2354 DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n", 2511 DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
2355 intel_dp->backlight_on_delay, intel_dp->backlight_off_delay); 2512 intel_dp->backlight_on_delay, intel_dp->backlight_off_delay);
2356 2513
2357 intel_dp->panel_off_jiffies = jiffies - intel_dp->panel_power_down_delay;
2358
2359 ironlake_edp_panel_vdd_on(intel_dp); 2514 ironlake_edp_panel_vdd_on(intel_dp);
2360 ret = intel_dp_get_dpcd(intel_dp); 2515 ret = intel_dp_get_dpcd(intel_dp);
2361 ironlake_edp_panel_vdd_off(intel_dp, false); 2516 ironlake_edp_panel_vdd_off(intel_dp, false);
2517
2362 if (ret) { 2518 if (ret) {
2363 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) 2519 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11)
2364 dev_priv->no_aux_handshake = 2520 dev_priv->no_aux_handshake =