aboutsummaryrefslogtreecommitdiffstats
path: root/drivers
diff options
context:
space:
mode:
Diffstat (limited to 'drivers')
-rw-r--r--drivers/gpu/drm/i915/i915_reg.h17
-rw-r--r--drivers/gpu/drm/i915/intel_dp.c137
2 files changed, 85 insertions, 69 deletions
diff --git a/drivers/gpu/drm/i915/i915_reg.h b/drivers/gpu/drm/i915/i915_reg.h
index a34e86630f26..b080cc824001 100644
--- a/drivers/gpu/drm/i915/i915_reg.h
+++ b/drivers/gpu/drm/i915/i915_reg.h
@@ -1553,12 +1553,21 @@
1553 */ 1553 */
1554#define PP_READY (1 << 30) 1554#define PP_READY (1 << 30)
1555#define PP_SEQUENCE_NONE (0 << 28) 1555#define PP_SEQUENCE_NONE (0 << 28)
1556#define PP_SEQUENCE_ON (1 << 28) 1556#define PP_SEQUENCE_POWER_UP (1 << 28)
1557#define PP_SEQUENCE_OFF (2 << 28) 1557#define PP_SEQUENCE_POWER_DOWN (2 << 28)
1558#define PP_SEQUENCE_MASK 0x30000000 1558#define PP_SEQUENCE_MASK (3 << 28)
1559#define PP_SEQUENCE_SHIFT 28
1559#define PP_CYCLE_DELAY_ACTIVE (1 << 27) 1560#define PP_CYCLE_DELAY_ACTIVE (1 << 27)
1560#define PP_SEQUENCE_STATE_ON_IDLE (1 << 3)
1561#define PP_SEQUENCE_STATE_MASK 0x0000000f 1561#define PP_SEQUENCE_STATE_MASK 0x0000000f
1562#define PP_SEQUENCE_STATE_OFF_IDLE (0x0 << 0)
1563#define PP_SEQUENCE_STATE_OFF_S0_1 (0x1 << 0)
1564#define PP_SEQUENCE_STATE_OFF_S0_2 (0x2 << 0)
1565#define PP_SEQUENCE_STATE_OFF_S0_3 (0x3 << 0)
1566#define PP_SEQUENCE_STATE_ON_IDLE (0x8 << 0)
1567#define PP_SEQUENCE_STATE_ON_S1_0 (0x9 << 0)
1568#define PP_SEQUENCE_STATE_ON_S1_2 (0xa << 0)
1569#define PP_SEQUENCE_STATE_ON_S1_3 (0xb << 0)
1570#define PP_SEQUENCE_STATE_RESET (0xf << 0)
1562#define PP_CONTROL 0x61204 1571#define PP_CONTROL 0x61204
1563#define POWER_TARGET_ON (1 << 0) 1572#define POWER_TARGET_ON (1 << 0)
1564#define PP_ON_DELAYS 0x61208 1573#define PP_ON_DELAYS 0x61208
diff --git a/drivers/gpu/drm/i915/intel_dp.c b/drivers/gpu/drm/i915/intel_dp.c
index d1eabd4165c4..56106101d3f8 100644
--- a/drivers/gpu/drm/i915/intel_dp.c
+++ b/drivers/gpu/drm/i915/intel_dp.c
@@ -66,7 +66,6 @@ struct intel_dp {
66 struct drm_display_mode *panel_fixed_mode; /* for eDP */ 66 struct drm_display_mode *panel_fixed_mode; /* for eDP */
67 struct delayed_work panel_vdd_work; 67 struct delayed_work panel_vdd_work;
68 bool want_panel_vdd; 68 bool want_panel_vdd;
69 unsigned long panel_off_jiffies;
70}; 69};
71 70
72/** 71/**
@@ -906,32 +905,53 @@ intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
906 } 905 }
907} 906}
908 907
909static void ironlake_wait_panel_off(struct intel_dp *intel_dp) 908#define IDLE_ON_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK)
909#define IDLE_ON_VALUE (PP_ON | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_ON_IDLE)
910
911#define IDLE_OFF_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK)
912#define IDLE_OFF_VALUE (0 | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE)
913
914#define IDLE_CYCLE_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | PP_CYCLE_DELAY_ACTIVE | PP_SEQUENCE_STATE_MASK)
915#define IDLE_CYCLE_VALUE (0 | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE)
916
917static void ironlake_wait_panel_status(struct intel_dp *intel_dp,
918 u32 mask,
919 u32 value)
910{ 920{
911 unsigned long off_time; 921 struct drm_device *dev = intel_dp->base.base.dev;
912 unsigned long delay; 922 struct drm_i915_private *dev_priv = dev->dev_private;
913 923
914 DRM_DEBUG_KMS("Wait for panel power off time\n"); 924 DRM_DEBUG_KMS("mask %08x value %08x status %08x control %08x\n",
925 mask, value,
926 I915_READ(PCH_PP_STATUS),
927 I915_READ(PCH_PP_CONTROL));
915 928
916 if (ironlake_edp_have_panel_power(intel_dp) || 929 if (_wait_for((I915_READ(PCH_PP_STATUS) & mask) == value, 5000, 10)) {
917 ironlake_edp_have_panel_vdd(intel_dp)) 930 DRM_ERROR("Panel status timeout: status %08x control %08x\n",
918 { 931 I915_READ(PCH_PP_STATUS),
919 DRM_DEBUG_KMS("Panel still on, no delay needed\n"); 932 I915_READ(PCH_PP_CONTROL));
920 return;
921 } 933 }
934}
922 935
923 off_time = intel_dp->panel_off_jiffies + msecs_to_jiffies(intel_dp->panel_power_down_delay); 936static void ironlake_wait_panel_on(struct intel_dp *intel_dp)
924 if (time_after(jiffies, off_time)) { 937{
925 DRM_DEBUG_KMS("Time already passed"); 938 DRM_DEBUG_KMS("Wait for panel power on\n");
926 return; 939 ironlake_wait_panel_status(intel_dp, IDLE_ON_MASK, IDLE_ON_VALUE);
927 }
928 delay = jiffies_to_msecs(off_time - jiffies);
929 if (delay > intel_dp->panel_power_down_delay)
930 delay = intel_dp->panel_power_down_delay;
931 DRM_DEBUG_KMS("Waiting an additional %ld ms\n", delay);
932 msleep(delay);
933} 940}
934 941
942static void ironlake_wait_panel_off(struct intel_dp *intel_dp)
943{
944 DRM_DEBUG_KMS("Wait for panel power off time\n");
945 ironlake_wait_panel_status(intel_dp, IDLE_OFF_MASK, IDLE_OFF_VALUE);
946}
947
948static void ironlake_wait_panel_power_cycle(struct intel_dp *intel_dp)
949{
950 DRM_DEBUG_KMS("Wait for panel power cycle\n");
951 ironlake_wait_panel_status(intel_dp, IDLE_CYCLE_MASK, IDLE_CYCLE_VALUE);
952}
953
954
935/* Read the current pp_control value, unlocking the register if it 955/* Read the current pp_control value, unlocking the register if it
936 * is locked 956 * is locked
937 */ 957 */
@@ -959,12 +979,15 @@ static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp)
959 "eDP VDD already requested on\n"); 979 "eDP VDD already requested on\n");
960 980
961 intel_dp->want_panel_vdd = true; 981 intel_dp->want_panel_vdd = true;
982
962 if (ironlake_edp_have_panel_vdd(intel_dp)) { 983 if (ironlake_edp_have_panel_vdd(intel_dp)) {
963 DRM_DEBUG_KMS("eDP VDD already on\n"); 984 DRM_DEBUG_KMS("eDP VDD already on\n");
964 return; 985 return;
965 } 986 }
966 987
967 ironlake_wait_panel_off(intel_dp); 988 if (!ironlake_edp_have_panel_power(intel_dp))
989 ironlake_wait_panel_power_cycle(intel_dp);
990
968 pp = ironlake_get_pp_control(dev_priv); 991 pp = ironlake_get_pp_control(dev_priv);
969 pp |= EDP_FORCE_VDD; 992 pp |= EDP_FORCE_VDD;
970 I915_WRITE(PCH_PP_CONTROL, pp); 993 I915_WRITE(PCH_PP_CONTROL, pp);
@@ -996,7 +1019,8 @@ static void ironlake_panel_vdd_off_sync(struct intel_dp *intel_dp)
996 /* Make sure sequencer is idle before allowing subsequent activity */ 1019 /* Make sure sequencer is idle before allowing subsequent activity */
997 DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n", 1020 DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n",
998 I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL)); 1021 I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL));
999 intel_dp->panel_off_jiffies = jiffies; 1022
1023 msleep(intel_dp->panel_power_down_delay);
1000 } 1024 }
1001} 1025}
1002 1026
@@ -1034,21 +1058,25 @@ static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync)
1034 } 1058 }
1035} 1059}
1036 1060
1037/* Returns true if the panel was already on when called */
1038static void ironlake_edp_panel_on(struct intel_dp *intel_dp) 1061static void ironlake_edp_panel_on(struct intel_dp *intel_dp)
1039{ 1062{
1040 struct drm_device *dev = intel_dp->base.base.dev; 1063 struct drm_device *dev = intel_dp->base.base.dev;
1041 struct drm_i915_private *dev_priv = dev->dev_private; 1064 struct drm_i915_private *dev_priv = dev->dev_private;
1042 u32 pp, idle_on_mask = PP_ON | PP_SEQUENCE_STATE_ON_IDLE; 1065 u32 pp;
1043 1066
1044 if (!is_edp(intel_dp)) 1067 if (!is_edp(intel_dp))
1045 return; 1068 return;
1046 if (ironlake_edp_have_panel_power(intel_dp)) 1069
1070 DRM_DEBUG_KMS("Turn eDP power on\n");
1071
1072 if (ironlake_edp_have_panel_power(intel_dp)) {
1073 DRM_DEBUG_KMS("eDP power already on\n");
1047 return; 1074 return;
1075 }
1048 1076
1049 ironlake_wait_panel_off(intel_dp); 1077 ironlake_wait_panel_power_cycle(intel_dp);
1050 pp = ironlake_get_pp_control(dev_priv);
1051 1078
1079 pp = ironlake_get_pp_control(dev_priv);
1052 if (IS_GEN5(dev)) { 1080 if (IS_GEN5(dev)) {
1053 /* ILK workaround: disable reset around power sequence */ 1081 /* ILK workaround: disable reset around power sequence */
1054 pp &= ~PANEL_POWER_RESET; 1082 pp &= ~PANEL_POWER_RESET;
@@ -1057,13 +1085,13 @@ static void ironlake_edp_panel_on(struct intel_dp *intel_dp)
1057 } 1085 }
1058 1086
1059 pp |= POWER_TARGET_ON; 1087 pp |= POWER_TARGET_ON;
1088 if (!IS_GEN5(dev))
1089 pp |= PANEL_POWER_RESET;
1090
1060 I915_WRITE(PCH_PP_CONTROL, pp); 1091 I915_WRITE(PCH_PP_CONTROL, pp);
1061 POSTING_READ(PCH_PP_CONTROL); 1092 POSTING_READ(PCH_PP_CONTROL);
1062 1093
1063 if (wait_for((I915_READ(PCH_PP_STATUS) & idle_on_mask) == idle_on_mask, 1094 ironlake_wait_panel_on(intel_dp);
1064 5000))
1065 DRM_ERROR("panel on wait timed out: 0x%08x\n",
1066 I915_READ(PCH_PP_STATUS));
1067 1095
1068 if (IS_GEN5(dev)) { 1096 if (IS_GEN5(dev)) {
1069 pp |= PANEL_POWER_RESET; /* restore panel reset bit */ 1097 pp |= PANEL_POWER_RESET; /* restore panel reset bit */
@@ -1072,44 +1100,25 @@ static void ironlake_edp_panel_on(struct intel_dp *intel_dp)
1072 } 1100 }
1073} 1101}
1074 1102
1075static void ironlake_edp_panel_off(struct drm_encoder *encoder) 1103static void ironlake_edp_panel_off(struct intel_dp *intel_dp)
1076{ 1104{
1077 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1105 struct drm_device *dev = intel_dp->base.base.dev;
1078 struct drm_device *dev = encoder->dev;
1079 struct drm_i915_private *dev_priv = dev->dev_private; 1106 struct drm_i915_private *dev_priv = dev->dev_private;
1080 u32 pp, idle_off_mask = PP_ON | PP_SEQUENCE_MASK | 1107 u32 pp;
1081 PP_CYCLE_DELAY_ACTIVE | PP_SEQUENCE_STATE_MASK;
1082 1108
1083 if (!is_edp(intel_dp)) 1109 if (!is_edp(intel_dp))
1084 return; 1110 return;
1085 pp = ironlake_get_pp_control(dev_priv);
1086 1111
1087 if (IS_GEN5(dev)) { 1112 DRM_DEBUG_KMS("Turn eDP power off\n");
1088 /* ILK workaround: disable reset around power sequence */
1089 pp &= ~PANEL_POWER_RESET;
1090 I915_WRITE(PCH_PP_CONTROL, pp);
1091 POSTING_READ(PCH_PP_CONTROL);
1092 }
1093
1094 intel_dp->panel_off_jiffies = jiffies;
1095 1113
1096 if (IS_GEN5(dev)) { 1114 WARN(intel_dp->want_panel_vdd, "Cannot turn power off while VDD is on\n");
1097 pp &= ~POWER_TARGET_ON;
1098 I915_WRITE(PCH_PP_CONTROL, pp);
1099 POSTING_READ(PCH_PP_CONTROL);
1100 pp &= ~POWER_TARGET_ON;
1101 I915_WRITE(PCH_PP_CONTROL, pp);
1102 POSTING_READ(PCH_PP_CONTROL);
1103 msleep(intel_dp->panel_power_cycle_delay);
1104 1115
1105 if (wait_for((I915_READ(PCH_PP_STATUS) & idle_off_mask) == 0, 5000)) 1116 pp = ironlake_get_pp_control(dev_priv);
1106 DRM_ERROR("panel off wait timed out: 0x%08x\n", 1117 pp &= ~(POWER_TARGET_ON | EDP_FORCE_VDD | PANEL_POWER_RESET | EDP_BLC_ENABLE);
1107 I915_READ(PCH_PP_STATUS)); 1118 I915_WRITE(PCH_PP_CONTROL, pp);
1119 POSTING_READ(PCH_PP_CONTROL);
1108 1120
1109 pp |= PANEL_POWER_RESET; /* restore panel reset bit */ 1121 ironlake_wait_panel_off(intel_dp);
1110 I915_WRITE(PCH_PP_CONTROL, pp);
1111 POSTING_READ(PCH_PP_CONTROL);
1112 }
1113} 1122}
1114 1123
1115static void ironlake_edp_backlight_on(struct intel_dp *intel_dp) 1124static void ironlake_edp_backlight_on(struct intel_dp *intel_dp)
@@ -1223,7 +1232,7 @@ static void intel_dp_prepare(struct drm_encoder *encoder)
1223 */ 1232 */
1224 ironlake_edp_backlight_off(intel_dp); 1233 ironlake_edp_backlight_off(intel_dp);
1225 intel_dp_link_down(intel_dp); 1234 intel_dp_link_down(intel_dp);
1226 ironlake_edp_panel_off(encoder); 1235 ironlake_edp_panel_off(intel_dp);
1227} 1236}
1228 1237
1229static void intel_dp_commit(struct drm_encoder *encoder) 1238static void intel_dp_commit(struct drm_encoder *encoder)
@@ -1237,7 +1246,6 @@ static void intel_dp_commit(struct drm_encoder *encoder)
1237 intel_dp_start_link_train(intel_dp); 1246 intel_dp_start_link_train(intel_dp);
1238 ironlake_edp_panel_on(intel_dp); 1247 ironlake_edp_panel_on(intel_dp);
1239 ironlake_edp_panel_vdd_off(intel_dp, true); 1248 ironlake_edp_panel_vdd_off(intel_dp, true);
1240
1241 intel_dp_complete_link_train(intel_dp); 1249 intel_dp_complete_link_train(intel_dp);
1242 ironlake_edp_backlight_on(intel_dp); 1250 ironlake_edp_backlight_on(intel_dp);
1243 1251
@@ -1261,7 +1269,7 @@ intel_dp_dpms(struct drm_encoder *encoder, int mode)
1261 ironlake_edp_backlight_off(intel_dp); 1269 ironlake_edp_backlight_off(intel_dp);
1262 intel_dp_sink_dpms(intel_dp, mode); 1270 intel_dp_sink_dpms(intel_dp, mode);
1263 intel_dp_link_down(intel_dp); 1271 intel_dp_link_down(intel_dp);
1264 ironlake_edp_panel_off(encoder); 1272 ironlake_edp_panel_off(intel_dp);
1265 if (is_edp(intel_dp) && !is_pch_edp(intel_dp)) 1273 if (is_edp(intel_dp) && !is_pch_edp(intel_dp))
1266 ironlake_edp_pll_off(encoder); 1274 ironlake_edp_pll_off(encoder);
1267 ironlake_edp_panel_vdd_off(intel_dp, false); 1275 ironlake_edp_panel_vdd_off(intel_dp, false);
@@ -2398,11 +2406,10 @@ intel_dp_init(struct drm_device *dev, int output_reg)
2398 DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n", 2406 DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
2399 intel_dp->backlight_on_delay, intel_dp->backlight_off_delay); 2407 intel_dp->backlight_on_delay, intel_dp->backlight_off_delay);
2400 2408
2401 intel_dp->panel_off_jiffies = jiffies - intel_dp->panel_power_down_delay;
2402
2403 ironlake_edp_panel_vdd_on(intel_dp); 2409 ironlake_edp_panel_vdd_on(intel_dp);
2404 ret = intel_dp_get_dpcd(intel_dp); 2410 ret = intel_dp_get_dpcd(intel_dp);
2405 ironlake_edp_panel_vdd_off(intel_dp, false); 2411 ironlake_edp_panel_vdd_off(intel_dp, false);
2412
2406 if (ret) { 2413 if (ret) {
2407 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) 2414 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11)
2408 dev_priv->no_aux_handshake = 2415 dev_priv->no_aux_handshake =