aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorChris Wilson <chris@chris-wilson.co.uk>2017-01-19 14:26:58 -0500
committerChris Wilson <chris@chris-wilson.co.uk>2017-01-21 05:32:27 -0500
commit44a0ec0d3bd5ebbe2233b48cc53b5c79b66277b7 (patch)
tree3dac5a68089709a8840acdc7d819e78eb35666bd
parente0216b762a7096639db395a22bdda7d1a7213c0f (diff)
drm/i915: Assert the drm_mm_node is allocated when on the VM lists
Before moving the vma between the VM active/inactive lists, assert that the node is still allocated. Signed-off-by: Chris Wilson <chris@chris-wilson.co.uk> Link: http://patchwork.freedesktop.org/patch/msgid/20170119192659.31789-5-chris@chris-wilson.co.uk Reviewed-by: Joonas Lahtinen <joonas.lahtinen@linux.intel.com>
-rw-r--r--drivers/gpu/drm/i915/i915_gem_stolen.c2
-rw-r--r--drivers/gpu/drm/i915/i915_vma.c2
2 files changed, 4 insertions, 0 deletions
diff --git a/drivers/gpu/drm/i915/i915_gem_stolen.c b/drivers/gpu/drm/i915/i915_gem_stolen.c
index 61cc0fcae3d8..127d698e7c84 100644
--- a/drivers/gpu/drm/i915/i915_gem_stolen.c
+++ b/drivers/gpu/drm/i915/i915_gem_stolen.c
@@ -702,6 +702,8 @@ i915_gem_object_create_stolen_for_preallocated(struct drm_i915_private *dev_priv
702 goto err_pages; 702 goto err_pages;
703 } 703 }
704 704
705 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node));
706
705 vma->pages = obj->mm.pages; 707 vma->pages = obj->mm.pages;
706 vma->flags |= I915_VMA_GLOBAL_BIND; 708 vma->flags |= I915_VMA_GLOBAL_BIND;
707 __i915_vma_set_map_and_fenceable(vma); 709 __i915_vma_set_map_and_fenceable(vma);
diff --git a/drivers/gpu/drm/i915/i915_vma.c b/drivers/gpu/drm/i915/i915_vma.c
index e58d8799bee2..ecb495b1c5d3 100644
--- a/drivers/gpu/drm/i915/i915_vma.c
+++ b/drivers/gpu/drm/i915/i915_vma.c
@@ -45,6 +45,7 @@ i915_vma_retire(struct i915_gem_active *active,
45 if (i915_vma_is_active(vma)) 45 if (i915_vma_is_active(vma))
46 return; 46 return;
47 47
48 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node));
48 list_move_tail(&vma->vm_link, &vma->vm->inactive_list); 49 list_move_tail(&vma->vm_link, &vma->vm->inactive_list);
49 if (unlikely(i915_vma_is_closed(vma) && !i915_vma_is_pinned(vma))) 50 if (unlikely(i915_vma_is_closed(vma) && !i915_vma_is_pinned(vma)))
50 WARN_ON(i915_vma_unbind(vma)); 51 WARN_ON(i915_vma_unbind(vma));
@@ -493,6 +494,7 @@ i915_vma_insert(struct i915_vma *vma, u64 size, u64 alignment, u64 flags)
493 GEM_BUG_ON(vma->node.start < start); 494 GEM_BUG_ON(vma->node.start < start);
494 GEM_BUG_ON(vma->node.start + vma->node.size > end); 495 GEM_BUG_ON(vma->node.start + vma->node.size > end);
495 } 496 }
497 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node));
496 GEM_BUG_ON(!i915_gem_valid_gtt_space(vma, obj->cache_level)); 498 GEM_BUG_ON(!i915_gem_valid_gtt_space(vma, obj->cache_level));
497 499
498 list_move_tail(&obj->global_link, &dev_priv->mm.bound_list); 500 list_move_tail(&obj->global_link, &dev_priv->mm.bound_list);