aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/i915/i915_gem.c
diff options
context:
space:
mode:
authorBen Widawsky <ben@bwidawsk.net>2013-08-14 05:38:35 -0400
committerDaniel Vetter <daniel.vetter@ffwll.ch>2013-08-22 07:31:53 -0400
commitaccfef2e5a8f713bfa0c06696b5e10754686dc72 (patch)
tree95c48d0b04366d1bcd35d269a464f7bf99b1ec03 /drivers/gpu/drm/i915/i915_gem.c
parent82a55ad1a0585e4e01a47f72fe81fb5a2d2c0fb1 (diff)
drm/i915: prepare bind_to_vm for preallocated vma
In the new execbuf code we want to track buffers using the vmas even before they're all properly mapped. Which means that bind_to_vm needs to deal with buffers which have preallocated vmas which aren't yet bound. This patch implements this prep work and adjusts our WARN/BUG checks. Signed-off-by: Ben Widawsky <ben@bwidawsk.net> [danvet: Split out from Ben's big execbuf patch. Also move one BUG back to its original place to deflate the diff a notch.] Signed-off-by: Daniel Vetter <daniel.vetter@ffwll.ch>
Diffstat (limited to 'drivers/gpu/drm/i915/i915_gem.c')
-rw-r--r--drivers/gpu/drm/i915/i915_gem.c23
1 files changed, 17 insertions, 6 deletions
diff --git a/drivers/gpu/drm/i915/i915_gem.c b/drivers/gpu/drm/i915/i915_gem.c
index ca29055ae206..449575b85b31 100644
--- a/drivers/gpu/drm/i915/i915_gem.c
+++ b/drivers/gpu/drm/i915/i915_gem.c
@@ -3124,9 +3124,6 @@ i915_gem_object_bind_to_vm(struct drm_i915_gem_object *obj,
3124 struct i915_vma *vma; 3124 struct i915_vma *vma;
3125 int ret; 3125 int ret;
3126 3126
3127 if (WARN_ON(!list_empty(&obj->vma_list)))
3128 return -EBUSY;
3129
3130 fence_size = i915_gem_get_gtt_size(dev, 3127 fence_size = i915_gem_get_gtt_size(dev,
3131 obj->base.size, 3128 obj->base.size,
3132 obj->tiling_mode); 3129 obj->tiling_mode);
@@ -3165,16 +3162,17 @@ i915_gem_object_bind_to_vm(struct drm_i915_gem_object *obj,
3165 3162
3166 i915_gem_object_pin_pages(obj); 3163 i915_gem_object_pin_pages(obj);
3167 3164
3168 /* FIXME: For now we only ever use 1 VMA per object */
3169 BUG_ON(!i915_is_ggtt(vm)); 3165 BUG_ON(!i915_is_ggtt(vm));
3170 WARN_ON(!list_empty(&obj->vma_list));
3171 3166
3172 vma = i915_gem_vma_create(obj, vm); 3167 vma = i915_gem_obj_lookup_or_create_vma(obj, vm);
3173 if (IS_ERR(vma)) { 3168 if (IS_ERR(vma)) {
3174 ret = PTR_ERR(vma); 3169 ret = PTR_ERR(vma);
3175 goto err_unpin; 3170 goto err_unpin;
3176 } 3171 }
3177 3172
3173 /* For now we only ever use 1 vma per object */
3174 WARN_ON(!list_is_singular(&obj->vma_list));
3175
3178search_free: 3176search_free:
3179 ret = drm_mm_insert_node_in_range_generic(&vm->mm, &vma->node, 3177 ret = drm_mm_insert_node_in_range_generic(&vm->mm, &vma->node,
3180 size, alignment, 3178 size, alignment,
@@ -4882,3 +4880,16 @@ struct i915_vma *i915_gem_obj_to_vma(struct drm_i915_gem_object *obj,
4882 4880
4883 return NULL; 4881 return NULL;
4884} 4882}
4883
4884struct i915_vma *
4885i915_gem_obj_lookup_or_create_vma(struct drm_i915_gem_object *obj,
4886 struct i915_address_space *vm)
4887{
4888 struct i915_vma *vma;
4889
4890 vma = i915_gem_obj_to_vma(obj, vm);
4891 if (!vma)
4892 vma = i915_gem_vma_create(obj, vm);
4893
4894 return vma;
4895}