aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/i915/intel_pm.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/drm/i915/intel_pm.c')
-rw-r--r--drivers/gpu/drm/i915/intel_pm.c188
1 files changed, 163 insertions, 25 deletions
diff --git a/drivers/gpu/drm/i915/intel_pm.c b/drivers/gpu/drm/i915/intel_pm.c
index 496caa73eb70..3280cffe50f4 100644
--- a/drivers/gpu/drm/i915/intel_pm.c
+++ b/drivers/gpu/drm/i915/intel_pm.c
@@ -44,6 +44,14 @@
44 * i915.i915_enable_fbc parameter 44 * i915.i915_enable_fbc parameter
45 */ 45 */
46 46
47static bool intel_crtc_active(struct drm_crtc *crtc)
48{
49 /* Be paranoid as we can arrive here with only partial
50 * state retrieved from the hardware during setup.
51 */
52 return to_intel_crtc(crtc)->active && crtc->fb && crtc->mode.clock;
53}
54
47static void i8xx_disable_fbc(struct drm_device *dev) 55static void i8xx_disable_fbc(struct drm_device *dev)
48{ 56{
49 struct drm_i915_private *dev_priv = dev->dev_private; 57 struct drm_i915_private *dev_priv = dev->dev_private;
@@ -405,9 +413,8 @@ void intel_update_fbc(struct drm_device *dev)
405 * - going to an unsupported config (interlace, pixel multiply, etc.) 413 * - going to an unsupported config (interlace, pixel multiply, etc.)
406 */ 414 */
407 list_for_each_entry(tmp_crtc, &dev->mode_config.crtc_list, head) { 415 list_for_each_entry(tmp_crtc, &dev->mode_config.crtc_list, head) {
408 if (tmp_crtc->enabled && 416 if (intel_crtc_active(tmp_crtc) &&
409 !to_intel_crtc(tmp_crtc)->primary_disabled && 417 !to_intel_crtc(tmp_crtc)->primary_disabled) {
410 tmp_crtc->fb) {
411 if (crtc) { 418 if (crtc) {
412 DRM_DEBUG_KMS("more than one pipe active, disabling compression\n"); 419 DRM_DEBUG_KMS("more than one pipe active, disabling compression\n");
413 dev_priv->no_fbc_reason = FBC_MULTIPLE_PIPES; 420 dev_priv->no_fbc_reason = FBC_MULTIPLE_PIPES;
@@ -992,7 +999,7 @@ static struct drm_crtc *single_enabled_crtc(struct drm_device *dev)
992 struct drm_crtc *crtc, *enabled = NULL; 999 struct drm_crtc *crtc, *enabled = NULL;
993 1000
994 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 1001 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
995 if (crtc->enabled && crtc->fb) { 1002 if (intel_crtc_active(crtc)) {
996 if (enabled) 1003 if (enabled)
997 return NULL; 1004 return NULL;
998 enabled = crtc; 1005 enabled = crtc;
@@ -1086,7 +1093,7 @@ static bool g4x_compute_wm0(struct drm_device *dev,
1086 int entries, tlb_miss; 1093 int entries, tlb_miss;
1087 1094
1088 crtc = intel_get_crtc_for_plane(dev, plane); 1095 crtc = intel_get_crtc_for_plane(dev, plane);
1089 if (crtc->fb == NULL || !crtc->enabled) { 1096 if (!intel_crtc_active(crtc)) {
1090 *cursor_wm = cursor->guard_size; 1097 *cursor_wm = cursor->guard_size;
1091 *plane_wm = display->guard_size; 1098 *plane_wm = display->guard_size;
1092 return false; 1099 return false;
@@ -1215,7 +1222,7 @@ static bool vlv_compute_drain_latency(struct drm_device *dev,
1215 int entries; 1222 int entries;
1216 1223
1217 crtc = intel_get_crtc_for_plane(dev, plane); 1224 crtc = intel_get_crtc_for_plane(dev, plane);
1218 if (crtc->fb == NULL || !crtc->enabled) 1225 if (!intel_crtc_active(crtc))
1219 return false; 1226 return false;
1220 1227
1221 clock = crtc->mode.clock; /* VESA DOT Clock */ 1228 clock = crtc->mode.clock; /* VESA DOT Clock */
@@ -1286,6 +1293,7 @@ static void valleyview_update_wm(struct drm_device *dev)
1286 struct drm_i915_private *dev_priv = dev->dev_private; 1293 struct drm_i915_private *dev_priv = dev->dev_private;
1287 int planea_wm, planeb_wm, cursora_wm, cursorb_wm; 1294 int planea_wm, planeb_wm, cursora_wm, cursorb_wm;
1288 int plane_sr, cursor_sr; 1295 int plane_sr, cursor_sr;
1296 int ignore_plane_sr, ignore_cursor_sr;
1289 unsigned int enabled = 0; 1297 unsigned int enabled = 0;
1290 1298
1291 vlv_update_drain_latency(dev); 1299 vlv_update_drain_latency(dev);
@@ -1302,17 +1310,23 @@ static void valleyview_update_wm(struct drm_device *dev)
1302 &planeb_wm, &cursorb_wm)) 1310 &planeb_wm, &cursorb_wm))
1303 enabled |= 2; 1311 enabled |= 2;
1304 1312
1305 plane_sr = cursor_sr = 0;
1306 if (single_plane_enabled(enabled) && 1313 if (single_plane_enabled(enabled) &&
1307 g4x_compute_srwm(dev, ffs(enabled) - 1, 1314 g4x_compute_srwm(dev, ffs(enabled) - 1,
1308 sr_latency_ns, 1315 sr_latency_ns,
1309 &valleyview_wm_info, 1316 &valleyview_wm_info,
1310 &valleyview_cursor_wm_info, 1317 &valleyview_cursor_wm_info,
1311 &plane_sr, &cursor_sr)) 1318 &plane_sr, &ignore_cursor_sr) &&
1319 g4x_compute_srwm(dev, ffs(enabled) - 1,
1320 2*sr_latency_ns,
1321 &valleyview_wm_info,
1322 &valleyview_cursor_wm_info,
1323 &ignore_plane_sr, &cursor_sr)) {
1312 I915_WRITE(FW_BLC_SELF_VLV, FW_CSPWRDWNEN); 1324 I915_WRITE(FW_BLC_SELF_VLV, FW_CSPWRDWNEN);
1313 else 1325 } else {
1314 I915_WRITE(FW_BLC_SELF_VLV, 1326 I915_WRITE(FW_BLC_SELF_VLV,
1315 I915_READ(FW_BLC_SELF_VLV) & ~FW_CSPWRDWNEN); 1327 I915_READ(FW_BLC_SELF_VLV) & ~FW_CSPWRDWNEN);
1328 plane_sr = cursor_sr = 0;
1329 }
1316 1330
1317 DRM_DEBUG_KMS("Setting FIFO watermarks - A: plane=%d, cursor=%d, B: plane=%d, cursor=%d, SR: plane=%d, cursor=%d\n", 1331 DRM_DEBUG_KMS("Setting FIFO watermarks - A: plane=%d, cursor=%d, B: plane=%d, cursor=%d, SR: plane=%d, cursor=%d\n",
1318 planea_wm, cursora_wm, 1332 planea_wm, cursora_wm,
@@ -1352,17 +1366,18 @@ static void g4x_update_wm(struct drm_device *dev)
1352 &planeb_wm, &cursorb_wm)) 1366 &planeb_wm, &cursorb_wm))
1353 enabled |= 2; 1367 enabled |= 2;
1354 1368
1355 plane_sr = cursor_sr = 0;
1356 if (single_plane_enabled(enabled) && 1369 if (single_plane_enabled(enabled) &&
1357 g4x_compute_srwm(dev, ffs(enabled) - 1, 1370 g4x_compute_srwm(dev, ffs(enabled) - 1,
1358 sr_latency_ns, 1371 sr_latency_ns,
1359 &g4x_wm_info, 1372 &g4x_wm_info,
1360 &g4x_cursor_wm_info, 1373 &g4x_cursor_wm_info,
1361 &plane_sr, &cursor_sr)) 1374 &plane_sr, &cursor_sr)) {
1362 I915_WRITE(FW_BLC_SELF, FW_BLC_SELF_EN); 1375 I915_WRITE(FW_BLC_SELF, FW_BLC_SELF_EN);
1363 else 1376 } else {
1364 I915_WRITE(FW_BLC_SELF, 1377 I915_WRITE(FW_BLC_SELF,
1365 I915_READ(FW_BLC_SELF) & ~FW_BLC_SELF_EN); 1378 I915_READ(FW_BLC_SELF) & ~FW_BLC_SELF_EN);
1379 plane_sr = cursor_sr = 0;
1380 }
1366 1381
1367 DRM_DEBUG_KMS("Setting FIFO watermarks - A: plane=%d, cursor=%d, B: plane=%d, cursor=%d, SR: plane=%d, cursor=%d\n", 1382 DRM_DEBUG_KMS("Setting FIFO watermarks - A: plane=%d, cursor=%d, B: plane=%d, cursor=%d, SR: plane=%d, cursor=%d\n",
1368 planea_wm, cursora_wm, 1383 planea_wm, cursora_wm,
@@ -1468,7 +1483,7 @@ static void i9xx_update_wm(struct drm_device *dev)
1468 1483
1469 fifo_size = dev_priv->display.get_fifo_size(dev, 0); 1484 fifo_size = dev_priv->display.get_fifo_size(dev, 0);
1470 crtc = intel_get_crtc_for_plane(dev, 0); 1485 crtc = intel_get_crtc_for_plane(dev, 0);
1471 if (crtc->enabled && crtc->fb) { 1486 if (intel_crtc_active(crtc)) {
1472 int cpp = crtc->fb->bits_per_pixel / 8; 1487 int cpp = crtc->fb->bits_per_pixel / 8;
1473 if (IS_GEN2(dev)) 1488 if (IS_GEN2(dev))
1474 cpp = 4; 1489 cpp = 4;
@@ -1482,7 +1497,7 @@ static void i9xx_update_wm(struct drm_device *dev)
1482 1497
1483 fifo_size = dev_priv->display.get_fifo_size(dev, 1); 1498 fifo_size = dev_priv->display.get_fifo_size(dev, 1);
1484 crtc = intel_get_crtc_for_plane(dev, 1); 1499 crtc = intel_get_crtc_for_plane(dev, 1);
1485 if (crtc->enabled && crtc->fb) { 1500 if (intel_crtc_active(crtc)) {
1486 int cpp = crtc->fb->bits_per_pixel / 8; 1501 int cpp = crtc->fb->bits_per_pixel / 8;
1487 if (IS_GEN2(dev)) 1502 if (IS_GEN2(dev))
1488 cpp = 4; 1503 cpp = 4;
@@ -1811,8 +1826,110 @@ static void sandybridge_update_wm(struct drm_device *dev)
1811 enabled |= 2; 1826 enabled |= 2;
1812 } 1827 }
1813 1828
1814 if ((dev_priv->num_pipe == 3) && 1829 /*
1815 g4x_compute_wm0(dev, 2, 1830 * Calculate and update the self-refresh watermark only when one
1831 * display plane is used.
1832 *
1833 * SNB support 3 levels of watermark.
1834 *
1835 * WM1/WM2/WM2 watermarks have to be enabled in the ascending order,
1836 * and disabled in the descending order
1837 *
1838 */
1839 I915_WRITE(WM3_LP_ILK, 0);
1840 I915_WRITE(WM2_LP_ILK, 0);
1841 I915_WRITE(WM1_LP_ILK, 0);
1842
1843 if (!single_plane_enabled(enabled) ||
1844 dev_priv->sprite_scaling_enabled)
1845 return;
1846 enabled = ffs(enabled) - 1;
1847
1848 /* WM1 */
1849 if (!ironlake_compute_srwm(dev, 1, enabled,
1850 SNB_READ_WM1_LATENCY() * 500,
1851 &sandybridge_display_srwm_info,
1852 &sandybridge_cursor_srwm_info,
1853 &fbc_wm, &plane_wm, &cursor_wm))
1854 return;
1855
1856 I915_WRITE(WM1_LP_ILK,
1857 WM1_LP_SR_EN |
1858 (SNB_READ_WM1_LATENCY() << WM1_LP_LATENCY_SHIFT) |
1859 (fbc_wm << WM1_LP_FBC_SHIFT) |
1860 (plane_wm << WM1_LP_SR_SHIFT) |
1861 cursor_wm);
1862
1863 /* WM2 */
1864 if (!ironlake_compute_srwm(dev, 2, enabled,
1865 SNB_READ_WM2_LATENCY() * 500,
1866 &sandybridge_display_srwm_info,
1867 &sandybridge_cursor_srwm_info,
1868 &fbc_wm, &plane_wm, &cursor_wm))
1869 return;
1870
1871 I915_WRITE(WM2_LP_ILK,
1872 WM2_LP_EN |
1873 (SNB_READ_WM2_LATENCY() << WM1_LP_LATENCY_SHIFT) |
1874 (fbc_wm << WM1_LP_FBC_SHIFT) |
1875 (plane_wm << WM1_LP_SR_SHIFT) |
1876 cursor_wm);
1877
1878 /* WM3 */
1879 if (!ironlake_compute_srwm(dev, 3, enabled,
1880 SNB_READ_WM3_LATENCY() * 500,
1881 &sandybridge_display_srwm_info,
1882 &sandybridge_cursor_srwm_info,
1883 &fbc_wm, &plane_wm, &cursor_wm))
1884 return;
1885
1886 I915_WRITE(WM3_LP_ILK,
1887 WM3_LP_EN |
1888 (SNB_READ_WM3_LATENCY() << WM1_LP_LATENCY_SHIFT) |
1889 (fbc_wm << WM1_LP_FBC_SHIFT) |
1890 (plane_wm << WM1_LP_SR_SHIFT) |
1891 cursor_wm);
1892}
1893
1894static void ivybridge_update_wm(struct drm_device *dev)
1895{
1896 struct drm_i915_private *dev_priv = dev->dev_private;
1897 int latency = SNB_READ_WM0_LATENCY() * 100; /* In unit 0.1us */
1898 u32 val;
1899 int fbc_wm, plane_wm, cursor_wm;
1900 int ignore_fbc_wm, ignore_plane_wm, ignore_cursor_wm;
1901 unsigned int enabled;
1902
1903 enabled = 0;
1904 if (g4x_compute_wm0(dev, 0,
1905 &sandybridge_display_wm_info, latency,
1906 &sandybridge_cursor_wm_info, latency,
1907 &plane_wm, &cursor_wm)) {
1908 val = I915_READ(WM0_PIPEA_ILK);
1909 val &= ~(WM0_PIPE_PLANE_MASK | WM0_PIPE_CURSOR_MASK);
1910 I915_WRITE(WM0_PIPEA_ILK, val |
1911 ((plane_wm << WM0_PIPE_PLANE_SHIFT) | cursor_wm));
1912 DRM_DEBUG_KMS("FIFO watermarks For pipe A -"
1913 " plane %d, " "cursor: %d\n",
1914 plane_wm, cursor_wm);
1915 enabled |= 1;
1916 }
1917
1918 if (g4x_compute_wm0(dev, 1,
1919 &sandybridge_display_wm_info, latency,
1920 &sandybridge_cursor_wm_info, latency,
1921 &plane_wm, &cursor_wm)) {
1922 val = I915_READ(WM0_PIPEB_ILK);
1923 val &= ~(WM0_PIPE_PLANE_MASK | WM0_PIPE_CURSOR_MASK);
1924 I915_WRITE(WM0_PIPEB_ILK, val |
1925 ((plane_wm << WM0_PIPE_PLANE_SHIFT) | cursor_wm));
1926 DRM_DEBUG_KMS("FIFO watermarks For pipe B -"
1927 " plane %d, cursor: %d\n",
1928 plane_wm, cursor_wm);
1929 enabled |= 2;
1930 }
1931
1932 if (g4x_compute_wm0(dev, 2,
1816 &sandybridge_display_wm_info, latency, 1933 &sandybridge_display_wm_info, latency,
1817 &sandybridge_cursor_wm_info, latency, 1934 &sandybridge_cursor_wm_info, latency,
1818 &plane_wm, &cursor_wm)) { 1935 &plane_wm, &cursor_wm)) {
@@ -1875,12 +1992,17 @@ static void sandybridge_update_wm(struct drm_device *dev)
1875 (plane_wm << WM1_LP_SR_SHIFT) | 1992 (plane_wm << WM1_LP_SR_SHIFT) |
1876 cursor_wm); 1993 cursor_wm);
1877 1994
1878 /* WM3 */ 1995 /* WM3, note we have to correct the cursor latency */
1879 if (!ironlake_compute_srwm(dev, 3, enabled, 1996 if (!ironlake_compute_srwm(dev, 3, enabled,
1880 SNB_READ_WM3_LATENCY() * 500, 1997 SNB_READ_WM3_LATENCY() * 500,
1881 &sandybridge_display_srwm_info, 1998 &sandybridge_display_srwm_info,
1882 &sandybridge_cursor_srwm_info, 1999 &sandybridge_cursor_srwm_info,
1883 &fbc_wm, &plane_wm, &cursor_wm)) 2000 &fbc_wm, &plane_wm, &ignore_cursor_wm) ||
2001 !ironlake_compute_srwm(dev, 3, enabled,
2002 2 * SNB_READ_WM3_LATENCY() * 500,
2003 &sandybridge_display_srwm_info,
2004 &sandybridge_cursor_srwm_info,
2005 &ignore_fbc_wm, &ignore_plane_wm, &cursor_wm))
1884 return; 2006 return;
1885 2007
1886 I915_WRITE(WM3_LP_ILK, 2008 I915_WRITE(WM3_LP_ILK,
@@ -1929,7 +2051,7 @@ sandybridge_compute_sprite_wm(struct drm_device *dev, int plane,
1929 int entries, tlb_miss; 2051 int entries, tlb_miss;
1930 2052
1931 crtc = intel_get_crtc_for_plane(dev, plane); 2053 crtc = intel_get_crtc_for_plane(dev, plane);
1932 if (crtc->fb == NULL || !crtc->enabled) { 2054 if (!intel_crtc_active(crtc)) {
1933 *sprite_wm = display->guard_size; 2055 *sprite_wm = display->guard_size;
1934 return false; 2056 return false;
1935 } 2057 }
@@ -3471,6 +3593,15 @@ static void gen6_init_clock_gating(struct drm_device *dev)
3471 I915_READ(ILK_DISPLAY_CHICKEN2) | 3593 I915_READ(ILK_DISPLAY_CHICKEN2) |
3472 ILK_ELPIN_409_SELECT); 3594 ILK_ELPIN_409_SELECT);
3473 3595
3596 /* WaDisableHiZPlanesWhenMSAAEnabled */
3597 I915_WRITE(_3D_CHICKEN,
3598 _MASKED_BIT_ENABLE(_3D_CHICKEN_HIZ_PLANE_DISABLE_MSAA_4X_SNB));
3599
3600 /* WaSetupGtModeTdRowDispatch */
3601 if (IS_SNB_GT1(dev))
3602 I915_WRITE(GEN6_GT_MODE,
3603 _MASKED_BIT_ENABLE(GEN6_TD_FOUR_ROW_DISPATCH_DISABLE));
3604
3474 I915_WRITE(WM3_LP_ILK, 0); 3605 I915_WRITE(WM3_LP_ILK, 0);
3475 I915_WRITE(WM2_LP_ILK, 0); 3606 I915_WRITE(WM2_LP_ILK, 0);
3476 I915_WRITE(WM1_LP_ILK, 0); 3607 I915_WRITE(WM1_LP_ILK, 0);
@@ -3999,7 +4130,7 @@ void intel_init_pm(struct drm_device *dev)
3999 } else if (IS_IVYBRIDGE(dev)) { 4130 } else if (IS_IVYBRIDGE(dev)) {
4000 /* FIXME: detect B0+ stepping and use auto training */ 4131 /* FIXME: detect B0+ stepping and use auto training */
4001 if (SNB_READ_WM0_LATENCY()) { 4132 if (SNB_READ_WM0_LATENCY()) {
4002 dev_priv->display.update_wm = sandybridge_update_wm; 4133 dev_priv->display.update_wm = ivybridge_update_wm;
4003 dev_priv->display.update_sprite_wm = sandybridge_update_sprite_wm; 4134 dev_priv->display.update_sprite_wm = sandybridge_update_sprite_wm;
4004 } else { 4135 } else {
4005 DRM_DEBUG_KMS("Failed to read display plane latency. " 4136 DRM_DEBUG_KMS("Failed to read display plane latency. "
@@ -4119,7 +4250,8 @@ static void __gen6_gt_force_wake_get(struct drm_i915_private *dev_priv)
4119static void __gen6_gt_force_wake_mt_reset(struct drm_i915_private *dev_priv) 4250static void __gen6_gt_force_wake_mt_reset(struct drm_i915_private *dev_priv)
4120{ 4251{
4121 I915_WRITE_NOTRACE(FORCEWAKE_MT, _MASKED_BIT_DISABLE(0xffff)); 4252 I915_WRITE_NOTRACE(FORCEWAKE_MT, _MASKED_BIT_DISABLE(0xffff));
4122 POSTING_READ(ECOBUS); /* something from same cacheline, but !FORCEWAKE */ 4253 /* something from same cacheline, but !FORCEWAKE_MT */
4254 POSTING_READ(ECOBUS);
4123} 4255}
4124 4256
4125static void __gen6_gt_force_wake_mt_get(struct drm_i915_private *dev_priv) 4257static void __gen6_gt_force_wake_mt_get(struct drm_i915_private *dev_priv)
@@ -4136,7 +4268,8 @@ static void __gen6_gt_force_wake_mt_get(struct drm_i915_private *dev_priv)
4136 DRM_ERROR("Timed out waiting for forcewake old ack to clear.\n"); 4268 DRM_ERROR("Timed out waiting for forcewake old ack to clear.\n");
4137 4269
4138 I915_WRITE_NOTRACE(FORCEWAKE_MT, _MASKED_BIT_ENABLE(FORCEWAKE_KERNEL)); 4270 I915_WRITE_NOTRACE(FORCEWAKE_MT, _MASKED_BIT_ENABLE(FORCEWAKE_KERNEL));
4139 POSTING_READ(ECOBUS); /* something from same cacheline, but !FORCEWAKE */ 4271 /* something from same cacheline, but !FORCEWAKE_MT */
4272 POSTING_READ(ECOBUS);
4140 4273
4141 if (wait_for_atomic((I915_READ_NOTRACE(forcewake_ack) & 1), 4274 if (wait_for_atomic((I915_READ_NOTRACE(forcewake_ack) & 1),
4142 FORCEWAKE_ACK_TIMEOUT_MS)) 4275 FORCEWAKE_ACK_TIMEOUT_MS))
@@ -4173,14 +4306,16 @@ void gen6_gt_check_fifodbg(struct drm_i915_private *dev_priv)
4173static void __gen6_gt_force_wake_put(struct drm_i915_private *dev_priv) 4306static void __gen6_gt_force_wake_put(struct drm_i915_private *dev_priv)
4174{ 4307{
4175 I915_WRITE_NOTRACE(FORCEWAKE, 0); 4308 I915_WRITE_NOTRACE(FORCEWAKE, 0);
4176 /* gen6_gt_check_fifodbg doubles as the POSTING_READ */ 4309 /* something from same cacheline, but !FORCEWAKE */
4310 POSTING_READ(ECOBUS);
4177 gen6_gt_check_fifodbg(dev_priv); 4311 gen6_gt_check_fifodbg(dev_priv);
4178} 4312}
4179 4313
4180static void __gen6_gt_force_wake_mt_put(struct drm_i915_private *dev_priv) 4314static void __gen6_gt_force_wake_mt_put(struct drm_i915_private *dev_priv)
4181{ 4315{
4182 I915_WRITE_NOTRACE(FORCEWAKE_MT, _MASKED_BIT_DISABLE(FORCEWAKE_KERNEL)); 4316 I915_WRITE_NOTRACE(FORCEWAKE_MT, _MASKED_BIT_DISABLE(FORCEWAKE_KERNEL));
4183 /* gen6_gt_check_fifodbg doubles as the POSTING_READ */ 4317 /* something from same cacheline, but !FORCEWAKE_MT */
4318 POSTING_READ(ECOBUS);
4184 gen6_gt_check_fifodbg(dev_priv); 4319 gen6_gt_check_fifodbg(dev_priv);
4185} 4320}
4186 4321
@@ -4220,6 +4355,8 @@ int __gen6_gt_wait_for_fifo(struct drm_i915_private *dev_priv)
4220static void vlv_force_wake_reset(struct drm_i915_private *dev_priv) 4355static void vlv_force_wake_reset(struct drm_i915_private *dev_priv)
4221{ 4356{
4222 I915_WRITE_NOTRACE(FORCEWAKE_VLV, _MASKED_BIT_DISABLE(0xffff)); 4357 I915_WRITE_NOTRACE(FORCEWAKE_VLV, _MASKED_BIT_DISABLE(0xffff));
4358 /* something from same cacheline, but !FORCEWAKE_VLV */
4359 POSTING_READ(FORCEWAKE_ACK_VLV);
4223} 4360}
4224 4361
4225static void vlv_force_wake_get(struct drm_i915_private *dev_priv) 4362static void vlv_force_wake_get(struct drm_i915_private *dev_priv)
@@ -4240,7 +4377,8 @@ static void vlv_force_wake_get(struct drm_i915_private *dev_priv)
4240static void vlv_force_wake_put(struct drm_i915_private *dev_priv) 4377static void vlv_force_wake_put(struct drm_i915_private *dev_priv)
4241{ 4378{
4242 I915_WRITE_NOTRACE(FORCEWAKE_VLV, _MASKED_BIT_DISABLE(FORCEWAKE_KERNEL)); 4379 I915_WRITE_NOTRACE(FORCEWAKE_VLV, _MASKED_BIT_DISABLE(FORCEWAKE_KERNEL));
4243 /* The below doubles as a POSTING_READ */ 4380 /* something from same cacheline, but !FORCEWAKE_VLV */
4381 POSTING_READ(FORCEWAKE_ACK_VLV);
4244 gen6_gt_check_fifodbg(dev_priv); 4382 gen6_gt_check_fifodbg(dev_priv);
4245} 4383}
4246 4384