aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/i915/intel_display.c
diff options
context:
space:
mode:
authorChris Wilson <chris@chris-wilson.co.uk>2010-11-08 14:18:58 -0500
committerChris Wilson <chris@chris-wilson.co.uk>2010-11-23 15:19:10 -0500
commit05394f3975dceb107a5e1393e2244946e5b43660 (patch)
tree2af73b6efec503ed4cd9c932018619bd28a1fe60 /drivers/gpu/drm/i915/intel_display.c
parent185cbcb304ba4dee55e39593fd86dcd7813f62ec (diff)
drm/i915: Use drm_i915_gem_object as the preferred type
A glorified s/obj_priv/obj/ with a net reduction of over a 100 lines and many characters! Signed-off-by: Chris Wilson <chris@chris-wilson.co.uk>
Diffstat (limited to 'drivers/gpu/drm/i915/intel_display.c')
-rw-r--r--drivers/gpu/drm/i915/intel_display.c242
1 files changed, 109 insertions, 133 deletions
diff --git a/drivers/gpu/drm/i915/intel_display.c b/drivers/gpu/drm/i915/intel_display.c
index d4bc443f43fc..ae7d4f55ce07 100644
--- a/drivers/gpu/drm/i915/intel_display.c
+++ b/drivers/gpu/drm/i915/intel_display.c
@@ -1066,13 +1066,13 @@ static void i8xx_enable_fbc(struct drm_crtc *crtc, unsigned long interval)
1066 struct drm_i915_private *dev_priv = dev->dev_private; 1066 struct drm_i915_private *dev_priv = dev->dev_private;
1067 struct drm_framebuffer *fb = crtc->fb; 1067 struct drm_framebuffer *fb = crtc->fb;
1068 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb); 1068 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb);
1069 struct drm_i915_gem_object *obj_priv = to_intel_bo(intel_fb->obj); 1069 struct drm_i915_gem_object *obj = intel_fb->obj;
1070 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 1070 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
1071 int plane, i; 1071 int plane, i;
1072 u32 fbc_ctl, fbc_ctl2; 1072 u32 fbc_ctl, fbc_ctl2;
1073 1073
1074 if (fb->pitch == dev_priv->cfb_pitch && 1074 if (fb->pitch == dev_priv->cfb_pitch &&
1075 obj_priv->fence_reg == dev_priv->cfb_fence && 1075 obj->fence_reg == dev_priv->cfb_fence &&
1076 intel_crtc->plane == dev_priv->cfb_plane && 1076 intel_crtc->plane == dev_priv->cfb_plane &&
1077 I915_READ(FBC_CONTROL) & FBC_CTL_EN) 1077 I915_READ(FBC_CONTROL) & FBC_CTL_EN)
1078 return; 1078 return;
@@ -1086,7 +1086,7 @@ static void i8xx_enable_fbc(struct drm_crtc *crtc, unsigned long interval)
1086 1086
1087 /* FBC_CTL wants 64B units */ 1087 /* FBC_CTL wants 64B units */
1088 dev_priv->cfb_pitch = (dev_priv->cfb_pitch / 64) - 1; 1088 dev_priv->cfb_pitch = (dev_priv->cfb_pitch / 64) - 1;
1089 dev_priv->cfb_fence = obj_priv->fence_reg; 1089 dev_priv->cfb_fence = obj->fence_reg;
1090 dev_priv->cfb_plane = intel_crtc->plane; 1090 dev_priv->cfb_plane = intel_crtc->plane;
1091 plane = dev_priv->cfb_plane == 0 ? FBC_CTL_PLANEA : FBC_CTL_PLANEB; 1091 plane = dev_priv->cfb_plane == 0 ? FBC_CTL_PLANEA : FBC_CTL_PLANEB;
1092 1092
@@ -1096,7 +1096,7 @@ static void i8xx_enable_fbc(struct drm_crtc *crtc, unsigned long interval)
1096 1096
1097 /* Set it up... */ 1097 /* Set it up... */
1098 fbc_ctl2 = FBC_CTL_FENCE_DBL | FBC_CTL_IDLE_IMM | plane; 1098 fbc_ctl2 = FBC_CTL_FENCE_DBL | FBC_CTL_IDLE_IMM | plane;
1099 if (obj_priv->tiling_mode != I915_TILING_NONE) 1099 if (obj->tiling_mode != I915_TILING_NONE)
1100 fbc_ctl2 |= FBC_CTL_CPU_FENCE; 1100 fbc_ctl2 |= FBC_CTL_CPU_FENCE;
1101 I915_WRITE(FBC_CONTROL2, fbc_ctl2); 1101 I915_WRITE(FBC_CONTROL2, fbc_ctl2);
1102 I915_WRITE(FBC_FENCE_OFF, crtc->y); 1102 I915_WRITE(FBC_FENCE_OFF, crtc->y);
@@ -1107,7 +1107,7 @@ static void i8xx_enable_fbc(struct drm_crtc *crtc, unsigned long interval)
1107 fbc_ctl |= FBC_CTL_C3_IDLE; /* 945 needs special SR handling */ 1107 fbc_ctl |= FBC_CTL_C3_IDLE; /* 945 needs special SR handling */
1108 fbc_ctl |= (dev_priv->cfb_pitch & 0xff) << FBC_CTL_STRIDE_SHIFT; 1108 fbc_ctl |= (dev_priv->cfb_pitch & 0xff) << FBC_CTL_STRIDE_SHIFT;
1109 fbc_ctl |= (interval & 0x2fff) << FBC_CTL_INTERVAL_SHIFT; 1109 fbc_ctl |= (interval & 0x2fff) << FBC_CTL_INTERVAL_SHIFT;
1110 if (obj_priv->tiling_mode != I915_TILING_NONE) 1110 if (obj->tiling_mode != I915_TILING_NONE)
1111 fbc_ctl |= dev_priv->cfb_fence; 1111 fbc_ctl |= dev_priv->cfb_fence;
1112 I915_WRITE(FBC_CONTROL, fbc_ctl); 1112 I915_WRITE(FBC_CONTROL, fbc_ctl);
1113 1113
@@ -1150,7 +1150,7 @@ static void g4x_enable_fbc(struct drm_crtc *crtc, unsigned long interval)
1150 struct drm_i915_private *dev_priv = dev->dev_private; 1150 struct drm_i915_private *dev_priv = dev->dev_private;
1151 struct drm_framebuffer *fb = crtc->fb; 1151 struct drm_framebuffer *fb = crtc->fb;
1152 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb); 1152 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb);
1153 struct drm_i915_gem_object *obj_priv = to_intel_bo(intel_fb->obj); 1153 struct drm_i915_gem_object *obj = intel_fb->obj;
1154 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 1154 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
1155 int plane = intel_crtc->plane == 0 ? DPFC_CTL_PLANEA : DPFC_CTL_PLANEB; 1155 int plane = intel_crtc->plane == 0 ? DPFC_CTL_PLANEA : DPFC_CTL_PLANEB;
1156 unsigned long stall_watermark = 200; 1156 unsigned long stall_watermark = 200;
@@ -1159,7 +1159,7 @@ static void g4x_enable_fbc(struct drm_crtc *crtc, unsigned long interval)
1159 dpfc_ctl = I915_READ(DPFC_CONTROL); 1159 dpfc_ctl = I915_READ(DPFC_CONTROL);
1160 if (dpfc_ctl & DPFC_CTL_EN) { 1160 if (dpfc_ctl & DPFC_CTL_EN) {
1161 if (dev_priv->cfb_pitch == dev_priv->cfb_pitch / 64 - 1 && 1161 if (dev_priv->cfb_pitch == dev_priv->cfb_pitch / 64 - 1 &&
1162 dev_priv->cfb_fence == obj_priv->fence_reg && 1162 dev_priv->cfb_fence == obj->fence_reg &&
1163 dev_priv->cfb_plane == intel_crtc->plane && 1163 dev_priv->cfb_plane == intel_crtc->plane &&
1164 dev_priv->cfb_y == crtc->y) 1164 dev_priv->cfb_y == crtc->y)
1165 return; 1165 return;
@@ -1170,12 +1170,12 @@ static void g4x_enable_fbc(struct drm_crtc *crtc, unsigned long interval)
1170 } 1170 }
1171 1171
1172 dev_priv->cfb_pitch = (dev_priv->cfb_pitch / 64) - 1; 1172 dev_priv->cfb_pitch = (dev_priv->cfb_pitch / 64) - 1;
1173 dev_priv->cfb_fence = obj_priv->fence_reg; 1173 dev_priv->cfb_fence = obj->fence_reg;
1174 dev_priv->cfb_plane = intel_crtc->plane; 1174 dev_priv->cfb_plane = intel_crtc->plane;
1175 dev_priv->cfb_y = crtc->y; 1175 dev_priv->cfb_y = crtc->y;
1176 1176
1177 dpfc_ctl = plane | DPFC_SR_EN | DPFC_CTL_LIMIT_1X; 1177 dpfc_ctl = plane | DPFC_SR_EN | DPFC_CTL_LIMIT_1X;
1178 if (obj_priv->tiling_mode != I915_TILING_NONE) { 1178 if (obj->tiling_mode != I915_TILING_NONE) {
1179 dpfc_ctl |= DPFC_CTL_FENCE_EN | dev_priv->cfb_fence; 1179 dpfc_ctl |= DPFC_CTL_FENCE_EN | dev_priv->cfb_fence;
1180 I915_WRITE(DPFC_CHICKEN, DPFC_HT_MODIFY); 1180 I915_WRITE(DPFC_CHICKEN, DPFC_HT_MODIFY);
1181 } else { 1181 } else {
@@ -1221,7 +1221,7 @@ static void ironlake_enable_fbc(struct drm_crtc *crtc, unsigned long interval)
1221 struct drm_i915_private *dev_priv = dev->dev_private; 1221 struct drm_i915_private *dev_priv = dev->dev_private;
1222 struct drm_framebuffer *fb = crtc->fb; 1222 struct drm_framebuffer *fb = crtc->fb;
1223 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb); 1223 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb);
1224 struct drm_i915_gem_object *obj_priv = to_intel_bo(intel_fb->obj); 1224 struct drm_i915_gem_object *obj = intel_fb->obj;
1225 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 1225 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
1226 int plane = intel_crtc->plane == 0 ? DPFC_CTL_PLANEA : DPFC_CTL_PLANEB; 1226 int plane = intel_crtc->plane == 0 ? DPFC_CTL_PLANEA : DPFC_CTL_PLANEB;
1227 unsigned long stall_watermark = 200; 1227 unsigned long stall_watermark = 200;
@@ -1230,9 +1230,9 @@ static void ironlake_enable_fbc(struct drm_crtc *crtc, unsigned long interval)
1230 dpfc_ctl = I915_READ(ILK_DPFC_CONTROL); 1230 dpfc_ctl = I915_READ(ILK_DPFC_CONTROL);
1231 if (dpfc_ctl & DPFC_CTL_EN) { 1231 if (dpfc_ctl & DPFC_CTL_EN) {
1232 if (dev_priv->cfb_pitch == dev_priv->cfb_pitch / 64 - 1 && 1232 if (dev_priv->cfb_pitch == dev_priv->cfb_pitch / 64 - 1 &&
1233 dev_priv->cfb_fence == obj_priv->fence_reg && 1233 dev_priv->cfb_fence == obj->fence_reg &&
1234 dev_priv->cfb_plane == intel_crtc->plane && 1234 dev_priv->cfb_plane == intel_crtc->plane &&
1235 dev_priv->cfb_offset == obj_priv->gtt_offset && 1235 dev_priv->cfb_offset == obj->gtt_offset &&
1236 dev_priv->cfb_y == crtc->y) 1236 dev_priv->cfb_y == crtc->y)
1237 return; 1237 return;
1238 1238
@@ -1242,14 +1242,14 @@ static void ironlake_enable_fbc(struct drm_crtc *crtc, unsigned long interval)
1242 } 1242 }
1243 1243
1244 dev_priv->cfb_pitch = (dev_priv->cfb_pitch / 64) - 1; 1244 dev_priv->cfb_pitch = (dev_priv->cfb_pitch / 64) - 1;
1245 dev_priv->cfb_fence = obj_priv->fence_reg; 1245 dev_priv->cfb_fence = obj->fence_reg;
1246 dev_priv->cfb_plane = intel_crtc->plane; 1246 dev_priv->cfb_plane = intel_crtc->plane;
1247 dev_priv->cfb_offset = obj_priv->gtt_offset; 1247 dev_priv->cfb_offset = obj->gtt_offset;
1248 dev_priv->cfb_y = crtc->y; 1248 dev_priv->cfb_y = crtc->y;
1249 1249
1250 dpfc_ctl &= DPFC_RESERVED; 1250 dpfc_ctl &= DPFC_RESERVED;
1251 dpfc_ctl |= (plane | DPFC_CTL_LIMIT_1X); 1251 dpfc_ctl |= (plane | DPFC_CTL_LIMIT_1X);
1252 if (obj_priv->tiling_mode != I915_TILING_NONE) { 1252 if (obj->tiling_mode != I915_TILING_NONE) {
1253 dpfc_ctl |= (DPFC_CTL_FENCE_EN | dev_priv->cfb_fence); 1253 dpfc_ctl |= (DPFC_CTL_FENCE_EN | dev_priv->cfb_fence);
1254 I915_WRITE(ILK_DPFC_CHICKEN, DPFC_HT_MODIFY); 1254 I915_WRITE(ILK_DPFC_CHICKEN, DPFC_HT_MODIFY);
1255 } else { 1255 } else {
@@ -1260,7 +1260,7 @@ static void ironlake_enable_fbc(struct drm_crtc *crtc, unsigned long interval)
1260 (stall_watermark << DPFC_RECOMP_STALL_WM_SHIFT) | 1260 (stall_watermark << DPFC_RECOMP_STALL_WM_SHIFT) |
1261 (interval << DPFC_RECOMP_TIMER_COUNT_SHIFT)); 1261 (interval << DPFC_RECOMP_TIMER_COUNT_SHIFT));
1262 I915_WRITE(ILK_DPFC_FENCE_YOFF, crtc->y); 1262 I915_WRITE(ILK_DPFC_FENCE_YOFF, crtc->y);
1263 I915_WRITE(ILK_FBC_RT_BASE, obj_priv->gtt_offset | ILK_FBC_RT_VALID); 1263 I915_WRITE(ILK_FBC_RT_BASE, obj->gtt_offset | ILK_FBC_RT_VALID);
1264 /* enable it... */ 1264 /* enable it... */
1265 I915_WRITE(ILK_DPFC_CONTROL, dpfc_ctl | DPFC_CTL_EN); 1265 I915_WRITE(ILK_DPFC_CONTROL, dpfc_ctl | DPFC_CTL_EN);
1266 1266
@@ -1345,7 +1345,7 @@ static void intel_update_fbc(struct drm_device *dev)
1345 struct intel_crtc *intel_crtc; 1345 struct intel_crtc *intel_crtc;
1346 struct drm_framebuffer *fb; 1346 struct drm_framebuffer *fb;
1347 struct intel_framebuffer *intel_fb; 1347 struct intel_framebuffer *intel_fb;
1348 struct drm_i915_gem_object *obj_priv; 1348 struct drm_i915_gem_object *obj;
1349 1349
1350 DRM_DEBUG_KMS("\n"); 1350 DRM_DEBUG_KMS("\n");
1351 1351
@@ -1384,9 +1384,9 @@ static void intel_update_fbc(struct drm_device *dev)
1384 intel_crtc = to_intel_crtc(crtc); 1384 intel_crtc = to_intel_crtc(crtc);
1385 fb = crtc->fb; 1385 fb = crtc->fb;
1386 intel_fb = to_intel_framebuffer(fb); 1386 intel_fb = to_intel_framebuffer(fb);
1387 obj_priv = to_intel_bo(intel_fb->obj); 1387 obj = intel_fb->obj;
1388 1388
1389 if (intel_fb->obj->size > dev_priv->cfb_size) { 1389 if (intel_fb->obj->base.size > dev_priv->cfb_size) {
1390 DRM_DEBUG_KMS("framebuffer too large, disabling " 1390 DRM_DEBUG_KMS("framebuffer too large, disabling "
1391 "compression\n"); 1391 "compression\n");
1392 dev_priv->no_fbc_reason = FBC_STOLEN_TOO_SMALL; 1392 dev_priv->no_fbc_reason = FBC_STOLEN_TOO_SMALL;
@@ -1410,7 +1410,7 @@ static void intel_update_fbc(struct drm_device *dev)
1410 dev_priv->no_fbc_reason = FBC_BAD_PLANE; 1410 dev_priv->no_fbc_reason = FBC_BAD_PLANE;
1411 goto out_disable; 1411 goto out_disable;
1412 } 1412 }
1413 if (obj_priv->tiling_mode != I915_TILING_X) { 1413 if (obj->tiling_mode != I915_TILING_X) {
1414 DRM_DEBUG_KMS("framebuffer not tiled, disabling compression\n"); 1414 DRM_DEBUG_KMS("framebuffer not tiled, disabling compression\n");
1415 dev_priv->no_fbc_reason = FBC_NOT_TILED; 1415 dev_priv->no_fbc_reason = FBC_NOT_TILED;
1416 goto out_disable; 1416 goto out_disable;
@@ -1433,14 +1433,13 @@ out_disable:
1433 1433
1434int 1434int
1435intel_pin_and_fence_fb_obj(struct drm_device *dev, 1435intel_pin_and_fence_fb_obj(struct drm_device *dev,
1436 struct drm_gem_object *obj, 1436 struct drm_i915_gem_object *obj,
1437 bool pipelined) 1437 bool pipelined)
1438{ 1438{
1439 struct drm_i915_gem_object *obj_priv = to_intel_bo(obj);
1440 u32 alignment; 1439 u32 alignment;
1441 int ret; 1440 int ret;
1442 1441
1443 switch (obj_priv->tiling_mode) { 1442 switch (obj->tiling_mode) {
1444 case I915_TILING_NONE: 1443 case I915_TILING_NONE:
1445 if (IS_BROADWATER(dev) || IS_CRESTLINE(dev)) 1444 if (IS_BROADWATER(dev) || IS_CRESTLINE(dev))
1446 alignment = 128 * 1024; 1445 alignment = 128 * 1024;
@@ -1474,7 +1473,7 @@ intel_pin_and_fence_fb_obj(struct drm_device *dev,
1474 * framebuffer compression. For simplicity, we always install 1473 * framebuffer compression. For simplicity, we always install
1475 * a fence as the cost is not that onerous. 1474 * a fence as the cost is not that onerous.
1476 */ 1475 */
1477 if (obj_priv->tiling_mode != I915_TILING_NONE) { 1476 if (obj->tiling_mode != I915_TILING_NONE) {
1478 ret = i915_gem_object_get_fence_reg(obj, false); 1477 ret = i915_gem_object_get_fence_reg(obj, false);
1479 if (ret) 1478 if (ret)
1480 goto err_unpin; 1479 goto err_unpin;
@@ -1496,8 +1495,7 @@ intel_pipe_set_base_atomic(struct drm_crtc *crtc, struct drm_framebuffer *fb,
1496 struct drm_i915_private *dev_priv = dev->dev_private; 1495 struct drm_i915_private *dev_priv = dev->dev_private;
1497 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 1496 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
1498 struct intel_framebuffer *intel_fb; 1497 struct intel_framebuffer *intel_fb;
1499 struct drm_i915_gem_object *obj_priv; 1498 struct drm_i915_gem_object *obj;
1500 struct drm_gem_object *obj;
1501 int plane = intel_crtc->plane; 1499 int plane = intel_crtc->plane;
1502 unsigned long Start, Offset; 1500 unsigned long Start, Offset;
1503 u32 dspcntr; 1501 u32 dspcntr;
@@ -1514,7 +1512,6 @@ intel_pipe_set_base_atomic(struct drm_crtc *crtc, struct drm_framebuffer *fb,
1514 1512
1515 intel_fb = to_intel_framebuffer(fb); 1513 intel_fb = to_intel_framebuffer(fb);
1516 obj = intel_fb->obj; 1514 obj = intel_fb->obj;
1517 obj_priv = to_intel_bo(obj);
1518 1515
1519 reg = DSPCNTR(plane); 1516 reg = DSPCNTR(plane);
1520 dspcntr = I915_READ(reg); 1517 dspcntr = I915_READ(reg);
@@ -1539,7 +1536,7 @@ intel_pipe_set_base_atomic(struct drm_crtc *crtc, struct drm_framebuffer *fb,
1539 return -EINVAL; 1536 return -EINVAL;
1540 } 1537 }
1541 if (INTEL_INFO(dev)->gen >= 4) { 1538 if (INTEL_INFO(dev)->gen >= 4) {
1542 if (obj_priv->tiling_mode != I915_TILING_NONE) 1539 if (obj->tiling_mode != I915_TILING_NONE)
1543 dspcntr |= DISPPLANE_TILED; 1540 dspcntr |= DISPPLANE_TILED;
1544 else 1541 else
1545 dspcntr &= ~DISPPLANE_TILED; 1542 dspcntr &= ~DISPPLANE_TILED;
@@ -1551,7 +1548,7 @@ intel_pipe_set_base_atomic(struct drm_crtc *crtc, struct drm_framebuffer *fb,
1551 1548
1552 I915_WRITE(reg, dspcntr); 1549 I915_WRITE(reg, dspcntr);
1553 1550
1554 Start = obj_priv->gtt_offset; 1551 Start = obj->gtt_offset;
1555 Offset = y * fb->pitch + x * (fb->bits_per_pixel / 8); 1552 Offset = y * fb->pitch + x * (fb->bits_per_pixel / 8);
1556 1553
1557 DRM_DEBUG_KMS("Writing base %08lX %08lX %d %d %d\n", 1554 DRM_DEBUG_KMS("Writing base %08lX %08lX %d %d %d\n",
@@ -1605,18 +1602,17 @@ intel_pipe_set_base(struct drm_crtc *crtc, int x, int y,
1605 1602
1606 if (old_fb) { 1603 if (old_fb) {
1607 struct drm_i915_private *dev_priv = dev->dev_private; 1604 struct drm_i915_private *dev_priv = dev->dev_private;
1608 struct drm_gem_object *obj = to_intel_framebuffer(old_fb)->obj; 1605 struct drm_i915_gem_object *obj = to_intel_framebuffer(old_fb)->obj;
1609 struct drm_i915_gem_object *obj_priv = to_intel_bo(obj);
1610 1606
1611 wait_event(dev_priv->pending_flip_queue, 1607 wait_event(dev_priv->pending_flip_queue,
1612 atomic_read(&obj_priv->pending_flip) == 0); 1608 atomic_read(&obj->pending_flip) == 0);
1613 1609
1614 /* Big Hammer, we also need to ensure that any pending 1610 /* Big Hammer, we also need to ensure that any pending
1615 * MI_WAIT_FOR_EVENT inside a user batch buffer on the 1611 * MI_WAIT_FOR_EVENT inside a user batch buffer on the
1616 * current scanout is retired before unpinning the old 1612 * current scanout is retired before unpinning the old
1617 * framebuffer. 1613 * framebuffer.
1618 */ 1614 */
1619 ret = i915_gem_object_flush_gpu(obj_priv, false); 1615 ret = i915_gem_object_flush_gpu(obj, false);
1620 if (ret) { 1616 if (ret) {
1621 i915_gem_object_unpin(to_intel_framebuffer(crtc->fb)->obj); 1617 i915_gem_object_unpin(to_intel_framebuffer(crtc->fb)->obj);
1622 mutex_unlock(&dev->struct_mutex); 1618 mutex_unlock(&dev->struct_mutex);
@@ -2010,16 +2006,16 @@ static void intel_clear_scanline_wait(struct drm_device *dev)
2010 2006
2011static void intel_crtc_wait_for_pending_flips(struct drm_crtc *crtc) 2007static void intel_crtc_wait_for_pending_flips(struct drm_crtc *crtc)
2012{ 2008{
2013 struct drm_i915_gem_object *obj_priv; 2009 struct drm_i915_gem_object *obj;
2014 struct drm_i915_private *dev_priv; 2010 struct drm_i915_private *dev_priv;
2015 2011
2016 if (crtc->fb == NULL) 2012 if (crtc->fb == NULL)
2017 return; 2013 return;
2018 2014
2019 obj_priv = to_intel_bo(to_intel_framebuffer(crtc->fb)->obj); 2015 obj = to_intel_framebuffer(crtc->fb)->obj;
2020 dev_priv = crtc->dev->dev_private; 2016 dev_priv = crtc->dev->dev_private;
2021 wait_event(dev_priv->pending_flip_queue, 2017 wait_event(dev_priv->pending_flip_queue,
2022 atomic_read(&obj_priv->pending_flip) == 0); 2018 atomic_read(&obj->pending_flip) == 0);
2023} 2019}
2024 2020
2025static void ironlake_crtc_enable(struct drm_crtc *crtc) 2021static void ironlake_crtc_enable(struct drm_crtc *crtc)
@@ -4333,15 +4329,14 @@ static void intel_crtc_update_cursor(struct drm_crtc *crtc,
4333} 4329}
4334 4330
4335static int intel_crtc_cursor_set(struct drm_crtc *crtc, 4331static int intel_crtc_cursor_set(struct drm_crtc *crtc,
4336 struct drm_file *file_priv, 4332 struct drm_file *file,
4337 uint32_t handle, 4333 uint32_t handle,
4338 uint32_t width, uint32_t height) 4334 uint32_t width, uint32_t height)
4339{ 4335{
4340 struct drm_device *dev = crtc->dev; 4336 struct drm_device *dev = crtc->dev;
4341 struct drm_i915_private *dev_priv = dev->dev_private; 4337 struct drm_i915_private *dev_priv = dev->dev_private;
4342 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 4338 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
4343 struct drm_gem_object *bo; 4339 struct drm_i915_gem_object *obj;
4344 struct drm_i915_gem_object *obj_priv;
4345 uint32_t addr; 4340 uint32_t addr;
4346 int ret; 4341 int ret;
4347 4342
@@ -4351,7 +4346,7 @@ static int intel_crtc_cursor_set(struct drm_crtc *crtc,
4351 if (!handle) { 4346 if (!handle) {
4352 DRM_DEBUG_KMS("cursor off\n"); 4347 DRM_DEBUG_KMS("cursor off\n");
4353 addr = 0; 4348 addr = 0;
4354 bo = NULL; 4349 obj = NULL;
4355 mutex_lock(&dev->struct_mutex); 4350 mutex_lock(&dev->struct_mutex);
4356 goto finish; 4351 goto finish;
4357 } 4352 }
@@ -4362,13 +4357,11 @@ static int intel_crtc_cursor_set(struct drm_crtc *crtc,
4362 return -EINVAL; 4357 return -EINVAL;
4363 } 4358 }
4364 4359
4365 bo = drm_gem_object_lookup(dev, file_priv, handle); 4360 obj = to_intel_bo(drm_gem_object_lookup(dev, file, handle));
4366 if (!bo) 4361 if (!obj)
4367 return -ENOENT; 4362 return -ENOENT;
4368 4363
4369 obj_priv = to_intel_bo(bo); 4364 if (obj->base.size < width * height * 4) {
4370
4371 if (bo->size < width * height * 4) {
4372 DRM_ERROR("buffer is to small\n"); 4365 DRM_ERROR("buffer is to small\n");
4373 ret = -ENOMEM; 4366 ret = -ENOMEM;
4374 goto fail; 4367 goto fail;
@@ -4377,29 +4370,29 @@ static int intel_crtc_cursor_set(struct drm_crtc *crtc,
4377 /* we only need to pin inside GTT if cursor is non-phy */ 4370 /* we only need to pin inside GTT if cursor is non-phy */
4378 mutex_lock(&dev->struct_mutex); 4371 mutex_lock(&dev->struct_mutex);
4379 if (!dev_priv->info->cursor_needs_physical) { 4372 if (!dev_priv->info->cursor_needs_physical) {
4380 ret = i915_gem_object_pin(bo, PAGE_SIZE, true); 4373 ret = i915_gem_object_pin(obj, PAGE_SIZE, true);
4381 if (ret) { 4374 if (ret) {
4382 DRM_ERROR("failed to pin cursor bo\n"); 4375 DRM_ERROR("failed to pin cursor bo\n");
4383 goto fail_locked; 4376 goto fail_locked;
4384 } 4377 }
4385 4378
4386 ret = i915_gem_object_set_to_gtt_domain(bo, 0); 4379 ret = i915_gem_object_set_to_gtt_domain(obj, 0);
4387 if (ret) { 4380 if (ret) {
4388 DRM_ERROR("failed to move cursor bo into the GTT\n"); 4381 DRM_ERROR("failed to move cursor bo into the GTT\n");
4389 goto fail_unpin; 4382 goto fail_unpin;
4390 } 4383 }
4391 4384
4392 addr = obj_priv->gtt_offset; 4385 addr = obj->gtt_offset;
4393 } else { 4386 } else {
4394 int align = IS_I830(dev) ? 16 * 1024 : 256; 4387 int align = IS_I830(dev) ? 16 * 1024 : 256;
4395 ret = i915_gem_attach_phys_object(dev, bo, 4388 ret = i915_gem_attach_phys_object(dev, obj,
4396 (intel_crtc->pipe == 0) ? I915_GEM_PHYS_CURSOR_0 : I915_GEM_PHYS_CURSOR_1, 4389 (intel_crtc->pipe == 0) ? I915_GEM_PHYS_CURSOR_0 : I915_GEM_PHYS_CURSOR_1,
4397 align); 4390 align);
4398 if (ret) { 4391 if (ret) {
4399 DRM_ERROR("failed to attach phys object\n"); 4392 DRM_ERROR("failed to attach phys object\n");
4400 goto fail_locked; 4393 goto fail_locked;
4401 } 4394 }
4402 addr = obj_priv->phys_obj->handle->busaddr; 4395 addr = obj->phys_obj->handle->busaddr;
4403 } 4396 }
4404 4397
4405 if (IS_GEN2(dev)) 4398 if (IS_GEN2(dev))
@@ -4408,17 +4401,17 @@ static int intel_crtc_cursor_set(struct drm_crtc *crtc,
4408 finish: 4401 finish:
4409 if (intel_crtc->cursor_bo) { 4402 if (intel_crtc->cursor_bo) {
4410 if (dev_priv->info->cursor_needs_physical) { 4403 if (dev_priv->info->cursor_needs_physical) {
4411 if (intel_crtc->cursor_bo != bo) 4404 if (intel_crtc->cursor_bo != obj)
4412 i915_gem_detach_phys_object(dev, intel_crtc->cursor_bo); 4405 i915_gem_detach_phys_object(dev, intel_crtc->cursor_bo);
4413 } else 4406 } else
4414 i915_gem_object_unpin(intel_crtc->cursor_bo); 4407 i915_gem_object_unpin(intel_crtc->cursor_bo);
4415 drm_gem_object_unreference(intel_crtc->cursor_bo); 4408 drm_gem_object_unreference(&intel_crtc->cursor_bo->base);
4416 } 4409 }
4417 4410
4418 mutex_unlock(&dev->struct_mutex); 4411 mutex_unlock(&dev->struct_mutex);
4419 4412
4420 intel_crtc->cursor_addr = addr; 4413 intel_crtc->cursor_addr = addr;
4421 intel_crtc->cursor_bo = bo; 4414 intel_crtc->cursor_bo = obj;
4422 intel_crtc->cursor_width = width; 4415 intel_crtc->cursor_width = width;
4423 intel_crtc->cursor_height = height; 4416 intel_crtc->cursor_height = height;
4424 4417
@@ -4426,11 +4419,11 @@ static int intel_crtc_cursor_set(struct drm_crtc *crtc,
4426 4419
4427 return 0; 4420 return 0;
4428fail_unpin: 4421fail_unpin:
4429 i915_gem_object_unpin(bo); 4422 i915_gem_object_unpin(obj);
4430fail_locked: 4423fail_locked:
4431 mutex_unlock(&dev->struct_mutex); 4424 mutex_unlock(&dev->struct_mutex);
4432fail: 4425fail:
4433 drm_gem_object_unreference_unlocked(bo); 4426 drm_gem_object_unreference_unlocked(&obj->base);
4434 return ret; 4427 return ret;
4435} 4428}
4436 4429
@@ -4890,7 +4883,7 @@ static void intel_idle_update(struct work_struct *work)
4890 * buffer), we'll also mark the display as busy, so we know to increase its 4883 * buffer), we'll also mark the display as busy, so we know to increase its
4891 * clock frequency. 4884 * clock frequency.
4892 */ 4885 */
4893void intel_mark_busy(struct drm_device *dev, struct drm_gem_object *obj) 4886void intel_mark_busy(struct drm_device *dev, struct drm_i915_gem_object *obj)
4894{ 4887{
4895 drm_i915_private_t *dev_priv = dev->dev_private; 4888 drm_i915_private_t *dev_priv = dev->dev_private;
4896 struct drm_crtc *crtc = NULL; 4889 struct drm_crtc *crtc = NULL;
@@ -4971,8 +4964,8 @@ static void intel_unpin_work_fn(struct work_struct *__work)
4971 4964
4972 mutex_lock(&work->dev->struct_mutex); 4965 mutex_lock(&work->dev->struct_mutex);
4973 i915_gem_object_unpin(work->old_fb_obj); 4966 i915_gem_object_unpin(work->old_fb_obj);
4974 drm_gem_object_unreference(work->pending_flip_obj); 4967 drm_gem_object_unreference(&work->pending_flip_obj->base);
4975 drm_gem_object_unreference(work->old_fb_obj); 4968 drm_gem_object_unreference(&work->old_fb_obj->base);
4976 mutex_unlock(&work->dev->struct_mutex); 4969 mutex_unlock(&work->dev->struct_mutex);
4977 kfree(work); 4970 kfree(work);
4978} 4971}
@@ -4983,7 +4976,7 @@ static void do_intel_finish_page_flip(struct drm_device *dev,
4983 drm_i915_private_t *dev_priv = dev->dev_private; 4976 drm_i915_private_t *dev_priv = dev->dev_private;
4984 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 4977 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
4985 struct intel_unpin_work *work; 4978 struct intel_unpin_work *work;
4986 struct drm_i915_gem_object *obj_priv; 4979 struct drm_i915_gem_object *obj;
4987 struct drm_pending_vblank_event *e; 4980 struct drm_pending_vblank_event *e;
4988 struct timeval now; 4981 struct timeval now;
4989 unsigned long flags; 4982 unsigned long flags;
@@ -5015,10 +5008,10 @@ static void do_intel_finish_page_flip(struct drm_device *dev,
5015 5008
5016 spin_unlock_irqrestore(&dev->event_lock, flags); 5009 spin_unlock_irqrestore(&dev->event_lock, flags);
5017 5010
5018 obj_priv = to_intel_bo(work->old_fb_obj); 5011 obj = work->old_fb_obj;
5019 atomic_clear_mask(1 << intel_crtc->plane, 5012 atomic_clear_mask(1 << intel_crtc->plane,
5020 &obj_priv->pending_flip.counter); 5013 &obj->pending_flip.counter);
5021 if (atomic_read(&obj_priv->pending_flip) == 0) 5014 if (atomic_read(&obj->pending_flip) == 0)
5022 wake_up(&dev_priv->pending_flip_queue); 5015 wake_up(&dev_priv->pending_flip_queue);
5023 schedule_work(&work->work); 5016 schedule_work(&work->work);
5024 5017
@@ -5065,8 +5058,7 @@ static int intel_crtc_page_flip(struct drm_crtc *crtc,
5065 struct drm_device *dev = crtc->dev; 5058 struct drm_device *dev = crtc->dev;
5066 struct drm_i915_private *dev_priv = dev->dev_private; 5059 struct drm_i915_private *dev_priv = dev->dev_private;
5067 struct intel_framebuffer *intel_fb; 5060 struct intel_framebuffer *intel_fb;
5068 struct drm_i915_gem_object *obj_priv; 5061 struct drm_i915_gem_object *obj;
5069 struct drm_gem_object *obj;
5070 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 5062 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
5071 struct intel_unpin_work *work; 5063 struct intel_unpin_work *work;
5072 unsigned long flags, offset; 5064 unsigned long flags, offset;
@@ -5105,8 +5097,8 @@ static int intel_crtc_page_flip(struct drm_crtc *crtc,
5105 goto cleanup_work; 5097 goto cleanup_work;
5106 5098
5107 /* Reference the objects for the scheduled work. */ 5099 /* Reference the objects for the scheduled work. */
5108 drm_gem_object_reference(work->old_fb_obj); 5100 drm_gem_object_reference(&work->old_fb_obj->base);
5109 drm_gem_object_reference(obj); 5101 drm_gem_object_reference(&obj->base);
5110 5102
5111 crtc->fb = fb; 5103 crtc->fb = fb;
5112 5104
@@ -5134,7 +5126,6 @@ static int intel_crtc_page_flip(struct drm_crtc *crtc,
5134 } 5126 }
5135 5127
5136 work->pending_flip_obj = obj; 5128 work->pending_flip_obj = obj;
5137 obj_priv = to_intel_bo(obj);
5138 5129
5139 work->enable_stall_check = true; 5130 work->enable_stall_check = true;
5140 5131
@@ -5148,15 +5139,14 @@ static int intel_crtc_page_flip(struct drm_crtc *crtc,
5148 /* Block clients from rendering to the new back buffer until 5139 /* Block clients from rendering to the new back buffer until
5149 * the flip occurs and the object is no longer visible. 5140 * the flip occurs and the object is no longer visible.
5150 */ 5141 */
5151 atomic_add(1 << intel_crtc->plane, 5142 atomic_add(1 << intel_crtc->plane, &work->old_fb_obj->pending_flip);
5152 &to_intel_bo(work->old_fb_obj)->pending_flip);
5153 5143
5154 switch (INTEL_INFO(dev)->gen) { 5144 switch (INTEL_INFO(dev)->gen) {
5155 case 2: 5145 case 2:
5156 OUT_RING(MI_DISPLAY_FLIP | 5146 OUT_RING(MI_DISPLAY_FLIP |
5157 MI_DISPLAY_FLIP_PLANE(intel_crtc->plane)); 5147 MI_DISPLAY_FLIP_PLANE(intel_crtc->plane));
5158 OUT_RING(fb->pitch); 5148 OUT_RING(fb->pitch);
5159 OUT_RING(obj_priv->gtt_offset + offset); 5149 OUT_RING(obj->gtt_offset + offset);
5160 OUT_RING(MI_NOOP); 5150 OUT_RING(MI_NOOP);
5161 break; 5151 break;
5162 5152
@@ -5164,7 +5154,7 @@ static int intel_crtc_page_flip(struct drm_crtc *crtc,
5164 OUT_RING(MI_DISPLAY_FLIP_I915 | 5154 OUT_RING(MI_DISPLAY_FLIP_I915 |
5165 MI_DISPLAY_FLIP_PLANE(intel_crtc->plane)); 5155 MI_DISPLAY_FLIP_PLANE(intel_crtc->plane));
5166 OUT_RING(fb->pitch); 5156 OUT_RING(fb->pitch);
5167 OUT_RING(obj_priv->gtt_offset + offset); 5157 OUT_RING(obj->gtt_offset + offset);
5168 OUT_RING(MI_NOOP); 5158 OUT_RING(MI_NOOP);
5169 break; 5159 break;
5170 5160
@@ -5177,7 +5167,7 @@ static int intel_crtc_page_flip(struct drm_crtc *crtc,
5177 OUT_RING(MI_DISPLAY_FLIP | 5167 OUT_RING(MI_DISPLAY_FLIP |
5178 MI_DISPLAY_FLIP_PLANE(intel_crtc->plane)); 5168 MI_DISPLAY_FLIP_PLANE(intel_crtc->plane));
5179 OUT_RING(fb->pitch); 5169 OUT_RING(fb->pitch);
5180 OUT_RING(obj_priv->gtt_offset | obj_priv->tiling_mode); 5170 OUT_RING(obj->gtt_offset | obj->tiling_mode);
5181 5171
5182 /* XXX Enabling the panel-fitter across page-flip is so far 5172 /* XXX Enabling the panel-fitter across page-flip is so far
5183 * untested on non-native modes, so ignore it for now. 5173 * untested on non-native modes, so ignore it for now.
@@ -5191,8 +5181,8 @@ static int intel_crtc_page_flip(struct drm_crtc *crtc,
5191 case 6: 5181 case 6:
5192 OUT_RING(MI_DISPLAY_FLIP | 5182 OUT_RING(MI_DISPLAY_FLIP |
5193 MI_DISPLAY_FLIP_PLANE(intel_crtc->plane)); 5183 MI_DISPLAY_FLIP_PLANE(intel_crtc->plane));
5194 OUT_RING(fb->pitch | obj_priv->tiling_mode); 5184 OUT_RING(fb->pitch | obj->tiling_mode);
5195 OUT_RING(obj_priv->gtt_offset); 5185 OUT_RING(obj->gtt_offset);
5196 5186
5197 pf = I915_READ(pipe == 0 ? PFA_CTL_1 : PFB_CTL_1) & PF_ENABLE; 5187 pf = I915_READ(pipe == 0 ? PFA_CTL_1 : PFB_CTL_1) & PF_ENABLE;
5198 pipesrc = I915_READ(pipe == 0 ? PIPEASRC : PIPEBSRC) & 0x0fff0fff; 5188 pipesrc = I915_READ(pipe == 0 ? PIPEASRC : PIPEBSRC) & 0x0fff0fff;
@@ -5208,8 +5198,8 @@ static int intel_crtc_page_flip(struct drm_crtc *crtc,
5208 return 0; 5198 return 0;
5209 5199
5210cleanup_objs: 5200cleanup_objs:
5211 drm_gem_object_unreference(work->old_fb_obj); 5201 drm_gem_object_unreference(&work->old_fb_obj->base);
5212 drm_gem_object_unreference(obj); 5202 drm_gem_object_unreference(&obj->base);
5213cleanup_work: 5203cleanup_work:
5214 mutex_unlock(&dev->struct_mutex); 5204 mutex_unlock(&dev->struct_mutex);
5215 5205
@@ -5295,7 +5285,7 @@ static void intel_crtc_init(struct drm_device *dev, int pipe)
5295} 5285}
5296 5286
5297int intel_get_pipe_from_crtc_id(struct drm_device *dev, void *data, 5287int intel_get_pipe_from_crtc_id(struct drm_device *dev, void *data,
5298 struct drm_file *file_priv) 5288 struct drm_file *file)
5299{ 5289{
5300 drm_i915_private_t *dev_priv = dev->dev_private; 5290 drm_i915_private_t *dev_priv = dev->dev_private;
5301 struct drm_i915_get_pipe_from_crtc_id *pipe_from_crtc_id = data; 5291 struct drm_i915_get_pipe_from_crtc_id *pipe_from_crtc_id = data;
@@ -5440,19 +5430,19 @@ static void intel_user_framebuffer_destroy(struct drm_framebuffer *fb)
5440 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb); 5430 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb);
5441 5431
5442 drm_framebuffer_cleanup(fb); 5432 drm_framebuffer_cleanup(fb);
5443 drm_gem_object_unreference_unlocked(intel_fb->obj); 5433 drm_gem_object_unreference_unlocked(&intel_fb->obj->base);
5444 5434
5445 kfree(intel_fb); 5435 kfree(intel_fb);
5446} 5436}
5447 5437
5448static int intel_user_framebuffer_create_handle(struct drm_framebuffer *fb, 5438static int intel_user_framebuffer_create_handle(struct drm_framebuffer *fb,
5449 struct drm_file *file_priv, 5439 struct drm_file *file,
5450 unsigned int *handle) 5440 unsigned int *handle)
5451{ 5441{
5452 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb); 5442 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb);
5453 struct drm_gem_object *object = intel_fb->obj; 5443 struct drm_i915_gem_object *obj = intel_fb->obj;
5454 5444
5455 return drm_gem_handle_create(file_priv, object, handle); 5445 return drm_gem_handle_create(file, &obj->base, handle);
5456} 5446}
5457 5447
5458static const struct drm_framebuffer_funcs intel_fb_funcs = { 5448static const struct drm_framebuffer_funcs intel_fb_funcs = {
@@ -5463,12 +5453,11 @@ static const struct drm_framebuffer_funcs intel_fb_funcs = {
5463int intel_framebuffer_init(struct drm_device *dev, 5453int intel_framebuffer_init(struct drm_device *dev,
5464 struct intel_framebuffer *intel_fb, 5454 struct intel_framebuffer *intel_fb,
5465 struct drm_mode_fb_cmd *mode_cmd, 5455 struct drm_mode_fb_cmd *mode_cmd,
5466 struct drm_gem_object *obj) 5456 struct drm_i915_gem_object *obj)
5467{ 5457{
5468 struct drm_i915_gem_object *obj_priv = to_intel_bo(obj);
5469 int ret; 5458 int ret;
5470 5459
5471 if (obj_priv->tiling_mode == I915_TILING_Y) 5460 if (obj->tiling_mode == I915_TILING_Y)
5472 return -EINVAL; 5461 return -EINVAL;
5473 5462
5474 if (mode_cmd->pitch & 63) 5463 if (mode_cmd->pitch & 63)
@@ -5500,11 +5489,11 @@ intel_user_framebuffer_create(struct drm_device *dev,
5500 struct drm_file *filp, 5489 struct drm_file *filp,
5501 struct drm_mode_fb_cmd *mode_cmd) 5490 struct drm_mode_fb_cmd *mode_cmd)
5502{ 5491{
5503 struct drm_gem_object *obj; 5492 struct drm_i915_gem_object *obj;
5504 struct intel_framebuffer *intel_fb; 5493 struct intel_framebuffer *intel_fb;
5505 int ret; 5494 int ret;
5506 5495
5507 obj = drm_gem_object_lookup(dev, filp, mode_cmd->handle); 5496 obj = to_intel_bo(drm_gem_object_lookup(dev, filp, mode_cmd->handle));
5508 if (!obj) 5497 if (!obj)
5509 return ERR_PTR(-ENOENT); 5498 return ERR_PTR(-ENOENT);
5510 5499
@@ -5512,10 +5501,9 @@ intel_user_framebuffer_create(struct drm_device *dev,
5512 if (!intel_fb) 5501 if (!intel_fb)
5513 return ERR_PTR(-ENOMEM); 5502 return ERR_PTR(-ENOMEM);
5514 5503
5515 ret = intel_framebuffer_init(dev, intel_fb, 5504 ret = intel_framebuffer_init(dev, intel_fb, mode_cmd, obj);
5516 mode_cmd, obj);
5517 if (ret) { 5505 if (ret) {
5518 drm_gem_object_unreference_unlocked(obj); 5506 drm_gem_object_unreference_unlocked(&obj->base);
5519 kfree(intel_fb); 5507 kfree(intel_fb);
5520 return ERR_PTR(ret); 5508 return ERR_PTR(ret);
5521 } 5509 }
@@ -5528,10 +5516,10 @@ static const struct drm_mode_config_funcs intel_mode_funcs = {
5528 .output_poll_changed = intel_fb_output_poll_changed, 5516 .output_poll_changed = intel_fb_output_poll_changed,
5529}; 5517};
5530 5518
5531static struct drm_gem_object * 5519static struct drm_i915_gem_object *
5532intel_alloc_context_page(struct drm_device *dev) 5520intel_alloc_context_page(struct drm_device *dev)
5533{ 5521{
5534 struct drm_gem_object *ctx; 5522 struct drm_i915_gem_object *ctx;
5535 int ret; 5523 int ret;
5536 5524
5537 ctx = i915_gem_alloc_object(dev, 4096); 5525 ctx = i915_gem_alloc_object(dev, 4096);
@@ -5559,7 +5547,7 @@ intel_alloc_context_page(struct drm_device *dev)
5559err_unpin: 5547err_unpin:
5560 i915_gem_object_unpin(ctx); 5548 i915_gem_object_unpin(ctx);
5561err_unref: 5549err_unref:
5562 drm_gem_object_unreference(ctx); 5550 drm_gem_object_unreference(&ctx->base);
5563 mutex_unlock(&dev->struct_mutex); 5551 mutex_unlock(&dev->struct_mutex);
5564 return NULL; 5552 return NULL;
5565} 5553}
@@ -5886,20 +5874,17 @@ void intel_init_clock_gating(struct drm_device *dev)
5886 if (dev_priv->renderctx == NULL) 5874 if (dev_priv->renderctx == NULL)
5887 dev_priv->renderctx = intel_alloc_context_page(dev); 5875 dev_priv->renderctx = intel_alloc_context_page(dev);
5888 if (dev_priv->renderctx) { 5876 if (dev_priv->renderctx) {
5889 struct drm_i915_gem_object *obj_priv; 5877 struct drm_i915_gem_object *obj = dev_priv->renderctx;
5890 obj_priv = to_intel_bo(dev_priv->renderctx); 5878 if (BEGIN_LP_RING(4) == 0) {
5891 if (obj_priv) { 5879 OUT_RING(MI_SET_CONTEXT);
5892 if (BEGIN_LP_RING(4) == 0) { 5880 OUT_RING(obj->gtt_offset |
5893 OUT_RING(MI_SET_CONTEXT); 5881 MI_MM_SPACE_GTT |
5894 OUT_RING(obj_priv->gtt_offset | 5882 MI_SAVE_EXT_STATE_EN |
5895 MI_MM_SPACE_GTT | 5883 MI_RESTORE_EXT_STATE_EN |
5896 MI_SAVE_EXT_STATE_EN | 5884 MI_RESTORE_INHIBIT);
5897 MI_RESTORE_EXT_STATE_EN | 5885 OUT_RING(MI_NOOP);
5898 MI_RESTORE_INHIBIT); 5886 OUT_RING(MI_FLUSH);
5899 OUT_RING(MI_NOOP); 5887 ADVANCE_LP_RING();
5900 OUT_RING(MI_FLUSH);
5901 ADVANCE_LP_RING();
5902 }
5903 } 5888 }
5904 } else 5889 } else
5905 DRM_DEBUG_KMS("Failed to allocate render context." 5890 DRM_DEBUG_KMS("Failed to allocate render context."
@@ -5907,22 +5892,11 @@ void intel_init_clock_gating(struct drm_device *dev)
5907 } 5892 }
5908 5893
5909 if (I915_HAS_RC6(dev) && drm_core_check_feature(dev, DRIVER_MODESET)) { 5894 if (I915_HAS_RC6(dev) && drm_core_check_feature(dev, DRIVER_MODESET)) {
5910 struct drm_i915_gem_object *obj_priv = NULL; 5895 if (dev_priv->pwrctx == NULL)
5911 5896 dev_priv->pwrctx = intel_alloc_context_page(dev);
5912 if (dev_priv->pwrctx) { 5897 if (dev_priv->pwrctx) {
5913 obj_priv = to_intel_bo(dev_priv->pwrctx); 5898 struct drm_i915_gem_object *obj = dev_priv->pwrctx;
5914 } else { 5899 I915_WRITE(PWRCTXA, obj->gtt_offset | PWRCTX_EN);
5915 struct drm_gem_object *pwrctx;
5916
5917 pwrctx = intel_alloc_context_page(dev);
5918 if (pwrctx) {
5919 dev_priv->pwrctx = pwrctx;
5920 obj_priv = to_intel_bo(pwrctx);
5921 }
5922 }
5923
5924 if (obj_priv) {
5925 I915_WRITE(PWRCTXA, obj_priv->gtt_offset | PWRCTX_EN);
5926 I915_WRITE(MCHBAR_RENDER_STANDBY, 5900 I915_WRITE(MCHBAR_RENDER_STANDBY,
5927 I915_READ(MCHBAR_RENDER_STANDBY) & ~RCX_SW_EXIT); 5901 I915_READ(MCHBAR_RENDER_STANDBY) & ~RCX_SW_EXIT);
5928 } 5902 }
@@ -6197,23 +6171,25 @@ void intel_modeset_cleanup(struct drm_device *dev)
6197 dev_priv->display.disable_fbc(dev); 6171 dev_priv->display.disable_fbc(dev);
6198 6172
6199 if (dev_priv->renderctx) { 6173 if (dev_priv->renderctx) {
6200 struct drm_i915_gem_object *obj_priv; 6174 struct drm_i915_gem_object *obj = dev_priv->renderctx;
6175
6176 I915_WRITE(CCID, obj->gtt_offset &~ CCID_EN);
6177 POSTING_READ(CCID);
6201 6178
6202 obj_priv = to_intel_bo(dev_priv->renderctx); 6179 i915_gem_object_unpin(obj);
6203 I915_WRITE(CCID, obj_priv->gtt_offset &~ CCID_EN); 6180 drm_gem_object_unreference(&obj->base);
6204 I915_READ(CCID); 6181 dev_priv->renderctx = NULL;
6205 i915_gem_object_unpin(dev_priv->renderctx);
6206 drm_gem_object_unreference(dev_priv->renderctx);
6207 } 6182 }
6208 6183
6209 if (dev_priv->pwrctx) { 6184 if (dev_priv->pwrctx) {
6210 struct drm_i915_gem_object *obj_priv; 6185 struct drm_i915_gem_object *obj = dev_priv->pwrctx;
6186
6187 I915_WRITE(PWRCTXA, obj->gtt_offset &~ PWRCTX_EN);
6188 POSTING_READ(PWRCTXA);
6211 6189
6212 obj_priv = to_intel_bo(dev_priv->pwrctx); 6190 i915_gem_object_unpin(obj);
6213 I915_WRITE(PWRCTXA, obj_priv->gtt_offset &~ PWRCTX_EN); 6191 drm_gem_object_unreference(&obj->base);
6214 I915_READ(PWRCTXA); 6192 dev_priv->pwrctx = NULL;
6215 i915_gem_object_unpin(dev_priv->pwrctx);
6216 drm_gem_object_unreference(dev_priv->pwrctx);
6217 } 6193 }
6218 6194
6219 if (IS_IRONLAKE_M(dev)) 6195 if (IS_IRONLAKE_M(dev))