aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/i915/i915_gem.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/drm/i915/i915_gem.c')
-rw-r--r--drivers/gpu/drm/i915/i915_gem.c70
1 files changed, 34 insertions, 36 deletions
diff --git a/drivers/gpu/drm/i915/i915_gem.c b/drivers/gpu/drm/i915/i915_gem.c
index c4c2855d002d..7ce3f353af33 100644
--- a/drivers/gpu/drm/i915/i915_gem.c
+++ b/drivers/gpu/drm/i915/i915_gem.c
@@ -224,7 +224,7 @@ i915_gem_dumb_create(struct drm_file *file,
224 struct drm_mode_create_dumb *args) 224 struct drm_mode_create_dumb *args)
225{ 225{
226 /* have to work out size/pitch and return them */ 226 /* have to work out size/pitch and return them */
227 args->pitch = ALIGN(args->width & ((args->bpp + 1) / 8), 64); 227 args->pitch = ALIGN(args->width * ((args->bpp + 7) / 8), 64);
228 args->size = args->pitch * args->height; 228 args->size = args->pitch * args->height;
229 return i915_gem_create(file, dev, 229 return i915_gem_create(file, dev,
230 args->size, &args->handle); 230 args->size, &args->handle);
@@ -1356,9 +1356,10 @@ i915_gem_release_mmap(struct drm_i915_gem_object *obj)
1356 if (!obj->fault_mappable) 1356 if (!obj->fault_mappable)
1357 return; 1357 return;
1358 1358
1359 unmap_mapping_range(obj->base.dev->dev_mapping, 1359 if (obj->base.dev->dev_mapping)
1360 (loff_t)obj->base.map_list.hash.key<<PAGE_SHIFT, 1360 unmap_mapping_range(obj->base.dev->dev_mapping,
1361 obj->base.size, 1); 1361 (loff_t)obj->base.map_list.hash.key<<PAGE_SHIFT,
1362 obj->base.size, 1);
1362 1363
1363 obj->fault_mappable = false; 1364 obj->fault_mappable = false;
1364} 1365}
@@ -1796,8 +1797,10 @@ i915_gem_request_remove_from_client(struct drm_i915_gem_request *request)
1796 return; 1797 return;
1797 1798
1798 spin_lock(&file_priv->mm.lock); 1799 spin_lock(&file_priv->mm.lock);
1799 list_del(&request->client_list); 1800 if (request->file_priv) {
1800 request->file_priv = NULL; 1801 list_del(&request->client_list);
1802 request->file_priv = NULL;
1803 }
1801 spin_unlock(&file_priv->mm.lock); 1804 spin_unlock(&file_priv->mm.lock);
1802} 1805}
1803 1806
@@ -2217,13 +2220,18 @@ i915_gem_flush_ring(struct intel_ring_buffer *ring,
2217{ 2220{
2218 int ret; 2221 int ret;
2219 2222
2223 if (((invalidate_domains | flush_domains) & I915_GEM_GPU_DOMAINS) == 0)
2224 return 0;
2225
2220 trace_i915_gem_ring_flush(ring, invalidate_domains, flush_domains); 2226 trace_i915_gem_ring_flush(ring, invalidate_domains, flush_domains);
2221 2227
2222 ret = ring->flush(ring, invalidate_domains, flush_domains); 2228 ret = ring->flush(ring, invalidate_domains, flush_domains);
2223 if (ret) 2229 if (ret)
2224 return ret; 2230 return ret;
2225 2231
2226 i915_gem_process_flushing_list(ring, flush_domains); 2232 if (flush_domains & I915_GEM_GPU_DOMAINS)
2233 i915_gem_process_flushing_list(ring, flush_domains);
2234
2227 return 0; 2235 return 0;
2228} 2236}
2229 2237
@@ -2579,8 +2587,23 @@ i915_gem_object_get_fence(struct drm_i915_gem_object *obj,
2579 reg = &dev_priv->fence_regs[obj->fence_reg]; 2587 reg = &dev_priv->fence_regs[obj->fence_reg];
2580 list_move_tail(&reg->lru_list, &dev_priv->mm.fence_list); 2588 list_move_tail(&reg->lru_list, &dev_priv->mm.fence_list);
2581 2589
2582 if (!obj->fenced_gpu_access && !obj->last_fenced_seqno) 2590 if (obj->tiling_changed) {
2583 pipelined = NULL; 2591 ret = i915_gem_object_flush_fence(obj, pipelined);
2592 if (ret)
2593 return ret;
2594
2595 if (!obj->fenced_gpu_access && !obj->last_fenced_seqno)
2596 pipelined = NULL;
2597
2598 if (pipelined) {
2599 reg->setup_seqno =
2600 i915_gem_next_request_seqno(pipelined);
2601 obj->last_fenced_seqno = reg->setup_seqno;
2602 obj->last_fenced_ring = pipelined;
2603 }
2604
2605 goto update;
2606 }
2584 2607
2585 if (!pipelined) { 2608 if (!pipelined) {
2586 if (reg->setup_seqno) { 2609 if (reg->setup_seqno) {
@@ -2599,31 +2622,6 @@ i915_gem_object_get_fence(struct drm_i915_gem_object *obj,
2599 ret = i915_gem_object_flush_fence(obj, pipelined); 2622 ret = i915_gem_object_flush_fence(obj, pipelined);
2600 if (ret) 2623 if (ret)
2601 return ret; 2624 return ret;
2602 } else if (obj->tiling_changed) {
2603 if (obj->fenced_gpu_access) {
2604 if (obj->base.write_domain & I915_GEM_GPU_DOMAINS) {
2605 ret = i915_gem_flush_ring(obj->ring,
2606 0, obj->base.write_domain);
2607 if (ret)
2608 return ret;
2609 }
2610
2611 obj->fenced_gpu_access = false;
2612 }
2613 }
2614
2615 if (!obj->fenced_gpu_access && !obj->last_fenced_seqno)
2616 pipelined = NULL;
2617 BUG_ON(!pipelined && reg->setup_seqno);
2618
2619 if (obj->tiling_changed) {
2620 if (pipelined) {
2621 reg->setup_seqno =
2622 i915_gem_next_request_seqno(pipelined);
2623 obj->last_fenced_seqno = reg->setup_seqno;
2624 obj->last_fenced_ring = pipelined;
2625 }
2626 goto update;
2627 } 2625 }
2628 2626
2629 return 0; 2627 return 0;
@@ -3606,6 +3604,8 @@ static void i915_gem_free_object_tail(struct drm_i915_gem_object *obj)
3606 return; 3604 return;
3607 } 3605 }
3608 3606
3607 trace_i915_gem_object_destroy(obj);
3608
3609 if (obj->base.map_list.map) 3609 if (obj->base.map_list.map)
3610 i915_gem_free_mmap_offset(obj); 3610 i915_gem_free_mmap_offset(obj);
3611 3611
@@ -3615,8 +3615,6 @@ static void i915_gem_free_object_tail(struct drm_i915_gem_object *obj)
3615 kfree(obj->page_cpu_valid); 3615 kfree(obj->page_cpu_valid);
3616 kfree(obj->bit_17); 3616 kfree(obj->bit_17);
3617 kfree(obj); 3617 kfree(obj);
3618
3619 trace_i915_gem_object_destroy(obj);
3620} 3618}
3621 3619
3622void i915_gem_free_object(struct drm_gem_object *gem_obj) 3620void i915_gem_free_object(struct drm_gem_object *gem_obj)