aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMatthew Auld <matthew.auld@intel.com>2017-05-12 05:14:23 -0400
committerJani Nikula <jani.nikula@intel.com>2017-05-15 07:44:33 -0400
commit2f720aac936dc7a301b757d3b197d86c333d59b8 (patch)
treedf469f9fe4c6296eae84d2ec5ed7c85f06f73ab1
parent82f2b4aca8fd90476fc3fd1786d107163ab17201 (diff)
drm/i915: don't do allocate_va_range again on PIN_UPDATE
If a vma is already bound to a ppgtt, we incorrectly call allocate_va_range again when doing a PIN_UPDATE, which will result in over accounting within our paging structures, such that when we do unbind something we don't actually destroy the structures and end up inadvertently recycling them. In reality this probably isn't too bad, but once we start touching PDEs and PDPEs for 64K/2M/1G pages this apparent recycling will manifest into lots of really, really subtle bugs. v2: Fix the testing of vma->flags for aliasing_ppgtt_bind_vma Fixes: ff685975d97f ("drm/i915: Move allocate_va_range to GTT") Signed-off-by: Matthew Auld <matthew.auld@intel.com> Cc: Chris Wilson <chris@chris-wilson.co.uk> Cc: Joonas Lahtinen <joonas.lahtinen@linux.intel.com> Reviewed-by: Chris Wilson <chris@chris-wilson.co.uk> Link: http://patchwork.freedesktop.org/patch/msgid/20170512091423.26085-1-chris@chris-wilson.co.uk (cherry picked from commit 1f23475c893a85c934143cd64865ebb9b6af383f) Signed-off-by: Jani Nikula <jani.nikula@intel.com>
-rw-r--r--drivers/gpu/drm/i915/i915_gem_gtt.c12
1 files changed, 8 insertions, 4 deletions
diff --git a/drivers/gpu/drm/i915/i915_gem_gtt.c b/drivers/gpu/drm/i915/i915_gem_gtt.c
index 2aa6b97fd22f..a0563e18d753 100644
--- a/drivers/gpu/drm/i915/i915_gem_gtt.c
+++ b/drivers/gpu/drm/i915/i915_gem_gtt.c
@@ -195,9 +195,12 @@ static int ppgtt_bind_vma(struct i915_vma *vma,
195 u32 pte_flags; 195 u32 pte_flags;
196 int ret; 196 int ret;
197 197
198 ret = vma->vm->allocate_va_range(vma->vm, vma->node.start, vma->size); 198 if (!(vma->flags & I915_VMA_LOCAL_BIND)) {
199 if (ret) 199 ret = vma->vm->allocate_va_range(vma->vm, vma->node.start,
200 return ret; 200 vma->size);
201 if (ret)
202 return ret;
203 }
201 204
202 vma->pages = vma->obj->mm.pages; 205 vma->pages = vma->obj->mm.pages;
203 206
@@ -2306,7 +2309,8 @@ static int aliasing_gtt_bind_vma(struct i915_vma *vma,
2306 if (flags & I915_VMA_LOCAL_BIND) { 2309 if (flags & I915_VMA_LOCAL_BIND) {
2307 struct i915_hw_ppgtt *appgtt = i915->mm.aliasing_ppgtt; 2310 struct i915_hw_ppgtt *appgtt = i915->mm.aliasing_ppgtt;
2308 2311
2309 if (appgtt->base.allocate_va_range) { 2312 if (!(vma->flags & I915_VMA_LOCAL_BIND) &&
2313 appgtt->base.allocate_va_range) {
2310 ret = appgtt->base.allocate_va_range(&appgtt->base, 2314 ret = appgtt->base.allocate_va_range(&appgtt->base,
2311 vma->node.start, 2315 vma->node.start,
2312 vma->node.size); 2316 vma->node.size);