aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/i915/i915_gem.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/drm/i915/i915_gem.c')
-rw-r--r--drivers/gpu/drm/i915/i915_gem.c50
1 files changed, 25 insertions, 25 deletions
diff --git a/drivers/gpu/drm/i915/i915_gem.c b/drivers/gpu/drm/i915/i915_gem.c
index f68f34606f2f..bcd2e481d014 100644
--- a/drivers/gpu/drm/i915/i915_gem.c
+++ b/drivers/gpu/drm/i915/i915_gem.c
@@ -138,10 +138,10 @@ i915_gem_get_aperture_ioctl(struct drm_device *dev, void *data,
138 138
139 pinned = 0; 139 pinned = 0;
140 mutex_lock(&dev->struct_mutex); 140 mutex_lock(&dev->struct_mutex);
141 list_for_each_entry(vma, &ggtt->base.active_list, mm_list) 141 list_for_each_entry(vma, &ggtt->base.active_list, vm_link)
142 if (vma->pin_count) 142 if (vma->pin_count)
143 pinned += vma->node.size; 143 pinned += vma->node.size;
144 list_for_each_entry(vma, &ggtt->base.inactive_list, mm_list) 144 list_for_each_entry(vma, &ggtt->base.inactive_list, vm_link)
145 if (vma->pin_count) 145 if (vma->pin_count)
146 pinned += vma->node.size; 146 pinned += vma->node.size;
147 mutex_unlock(&dev->struct_mutex); 147 mutex_unlock(&dev->struct_mutex);
@@ -272,7 +272,7 @@ drop_pages(struct drm_i915_gem_object *obj)
272 int ret; 272 int ret;
273 273
274 drm_gem_object_reference(&obj->base); 274 drm_gem_object_reference(&obj->base);
275 list_for_each_entry_safe(vma, next, &obj->vma_list, vma_link) 275 list_for_each_entry_safe(vma, next, &obj->vma_list, obj_link)
276 if (i915_vma_unbind(vma)) 276 if (i915_vma_unbind(vma))
277 break; 277 break;
278 278
@@ -2416,7 +2416,7 @@ void i915_vma_move_to_active(struct i915_vma *vma,
2416 list_move_tail(&obj->ring_list[ring->id], &ring->active_list); 2416 list_move_tail(&obj->ring_list[ring->id], &ring->active_list);
2417 i915_gem_request_assign(&obj->last_read_req[ring->id], req); 2417 i915_gem_request_assign(&obj->last_read_req[ring->id], req);
2418 2418
2419 list_move_tail(&vma->mm_list, &vma->vm->active_list); 2419 list_move_tail(&vma->vm_link, &vma->vm->active_list);
2420} 2420}
2421 2421
2422static void 2422static void
@@ -2454,9 +2454,9 @@ i915_gem_object_retire__read(struct drm_i915_gem_object *obj, int ring)
2454 list_move_tail(&obj->global_list, 2454 list_move_tail(&obj->global_list,
2455 &to_i915(obj->base.dev)->mm.bound_list); 2455 &to_i915(obj->base.dev)->mm.bound_list);
2456 2456
2457 list_for_each_entry(vma, &obj->vma_list, vma_link) { 2457 list_for_each_entry(vma, &obj->vma_list, obj_link) {
2458 if (!list_empty(&vma->mm_list)) 2458 if (!list_empty(&vma->vm_link))
2459 list_move_tail(&vma->mm_list, &vma->vm->inactive_list); 2459 list_move_tail(&vma->vm_link, &vma->vm->inactive_list);
2460 } 2460 }
2461 2461
2462 i915_gem_request_assign(&obj->last_fenced_req, NULL); 2462 i915_gem_request_assign(&obj->last_fenced_req, NULL);
@@ -3317,7 +3317,7 @@ static int __i915_vma_unbind(struct i915_vma *vma, bool wait)
3317 struct drm_i915_private *dev_priv = obj->base.dev->dev_private; 3317 struct drm_i915_private *dev_priv = obj->base.dev->dev_private;
3318 int ret; 3318 int ret;
3319 3319
3320 if (list_empty(&vma->vma_link)) 3320 if (list_empty(&vma->obj_link))
3321 return 0; 3321 return 0;
3322 3322
3323 if (!drm_mm_node_allocated(&vma->node)) { 3323 if (!drm_mm_node_allocated(&vma->node)) {
@@ -3351,7 +3351,7 @@ static int __i915_vma_unbind(struct i915_vma *vma, bool wait)
3351 vma->vm->unbind_vma(vma); 3351 vma->vm->unbind_vma(vma);
3352 vma->bound = 0; 3352 vma->bound = 0;
3353 3353
3354 list_del_init(&vma->mm_list); 3354 list_del_init(&vma->vm_link);
3355 if (i915_is_ggtt(vma->vm)) { 3355 if (i915_is_ggtt(vma->vm)) {
3356 if (vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL) { 3356 if (vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL) {
3357 obj->map_and_fenceable = false; 3357 obj->map_and_fenceable = false;
@@ -3609,7 +3609,7 @@ search_free:
3609 goto err_remove_node; 3609 goto err_remove_node;
3610 3610
3611 list_move_tail(&obj->global_list, &dev_priv->mm.bound_list); 3611 list_move_tail(&obj->global_list, &dev_priv->mm.bound_list);
3612 list_add_tail(&vma->mm_list, &vm->inactive_list); 3612 list_add_tail(&vma->vm_link, &vm->inactive_list);
3613 3613
3614 return vma; 3614 return vma;
3615 3615
@@ -3774,7 +3774,7 @@ i915_gem_object_set_to_gtt_domain(struct drm_i915_gem_object *obj, bool write)
3774 /* And bump the LRU for this access */ 3774 /* And bump the LRU for this access */
3775 vma = i915_gem_obj_to_ggtt(obj); 3775 vma = i915_gem_obj_to_ggtt(obj);
3776 if (vma && drm_mm_node_allocated(&vma->node) && !obj->active) 3776 if (vma && drm_mm_node_allocated(&vma->node) && !obj->active)
3777 list_move_tail(&vma->mm_list, 3777 list_move_tail(&vma->vm_link,
3778 &to_i915(obj->base.dev)->gtt.base.inactive_list); 3778 &to_i915(obj->base.dev)->gtt.base.inactive_list);
3779 3779
3780 return 0; 3780 return 0;
@@ -3809,7 +3809,7 @@ int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj,
3809 * catch the issue of the CS prefetch crossing page boundaries and 3809 * catch the issue of the CS prefetch crossing page boundaries and
3810 * reading an invalid PTE on older architectures. 3810 * reading an invalid PTE on older architectures.
3811 */ 3811 */
3812 list_for_each_entry_safe(vma, next, &obj->vma_list, vma_link) { 3812 list_for_each_entry_safe(vma, next, &obj->vma_list, obj_link) {
3813 if (!drm_mm_node_allocated(&vma->node)) 3813 if (!drm_mm_node_allocated(&vma->node))
3814 continue; 3814 continue;
3815 3815
@@ -3872,7 +3872,7 @@ int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj,
3872 */ 3872 */
3873 } 3873 }
3874 3874
3875 list_for_each_entry(vma, &obj->vma_list, vma_link) { 3875 list_for_each_entry(vma, &obj->vma_list, obj_link) {
3876 if (!drm_mm_node_allocated(&vma->node)) 3876 if (!drm_mm_node_allocated(&vma->node))
3877 continue; 3877 continue;
3878 3878
@@ -3882,7 +3882,7 @@ int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj,
3882 } 3882 }
3883 } 3883 }
3884 3884
3885 list_for_each_entry(vma, &obj->vma_list, vma_link) 3885 list_for_each_entry(vma, &obj->vma_list, obj_link)
3886 vma->node.color = cache_level; 3886 vma->node.color = cache_level;
3887 obj->cache_level = cache_level; 3887 obj->cache_level = cache_level;
3888 3888
@@ -4556,7 +4556,7 @@ void i915_gem_free_object(struct drm_gem_object *gem_obj)
4556 4556
4557 trace_i915_gem_object_destroy(obj); 4557 trace_i915_gem_object_destroy(obj);
4558 4558
4559 list_for_each_entry_safe(vma, next, &obj->vma_list, vma_link) { 4559 list_for_each_entry_safe(vma, next, &obj->vma_list, obj_link) {
4560 int ret; 4560 int ret;
4561 4561
4562 vma->pin_count = 0; 4562 vma->pin_count = 0;
@@ -4613,7 +4613,7 @@ struct i915_vma *i915_gem_obj_to_vma(struct drm_i915_gem_object *obj,
4613 struct i915_address_space *vm) 4613 struct i915_address_space *vm)
4614{ 4614{
4615 struct i915_vma *vma; 4615 struct i915_vma *vma;
4616 list_for_each_entry(vma, &obj->vma_list, vma_link) { 4616 list_for_each_entry(vma, &obj->vma_list, obj_link) {
4617 if (vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL && 4617 if (vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL &&
4618 vma->vm == vm) 4618 vma->vm == vm)
4619 return vma; 4619 return vma;
@@ -4630,7 +4630,7 @@ struct i915_vma *i915_gem_obj_to_ggtt_view(struct drm_i915_gem_object *obj,
4630 if (WARN_ONCE(!view, "no view specified")) 4630 if (WARN_ONCE(!view, "no view specified"))
4631 return ERR_PTR(-EINVAL); 4631 return ERR_PTR(-EINVAL);
4632 4632
4633 list_for_each_entry(vma, &obj->vma_list, vma_link) 4633 list_for_each_entry(vma, &obj->vma_list, obj_link)
4634 if (vma->vm == ggtt && 4634 if (vma->vm == ggtt &&
4635 i915_ggtt_view_equal(&vma->ggtt_view, view)) 4635 i915_ggtt_view_equal(&vma->ggtt_view, view))
4636 return vma; 4636 return vma;
@@ -4651,7 +4651,7 @@ void i915_gem_vma_destroy(struct i915_vma *vma)
4651 if (!i915_is_ggtt(vm)) 4651 if (!i915_is_ggtt(vm))
4652 i915_ppgtt_put(i915_vm_to_ppgtt(vm)); 4652 i915_ppgtt_put(i915_vm_to_ppgtt(vm));
4653 4653
4654 list_del(&vma->vma_link); 4654 list_del(&vma->obj_link);
4655 4655
4656 kmem_cache_free(to_i915(vma->obj->base.dev)->vmas, vma); 4656 kmem_cache_free(to_i915(vma->obj->base.dev)->vmas, vma);
4657} 4657}
@@ -5201,7 +5201,7 @@ u64 i915_gem_obj_offset(struct drm_i915_gem_object *o,
5201 5201
5202 WARN_ON(vm == &dev_priv->mm.aliasing_ppgtt->base); 5202 WARN_ON(vm == &dev_priv->mm.aliasing_ppgtt->base);
5203 5203
5204 list_for_each_entry(vma, &o->vma_list, vma_link) { 5204 list_for_each_entry(vma, &o->vma_list, obj_link) {
5205 if (i915_is_ggtt(vma->vm) && 5205 if (i915_is_ggtt(vma->vm) &&
5206 vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL) 5206 vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL)
5207 continue; 5207 continue;
@@ -5220,7 +5220,7 @@ u64 i915_gem_obj_ggtt_offset_view(struct drm_i915_gem_object *o,
5220 struct i915_address_space *ggtt = i915_obj_to_ggtt(o); 5220 struct i915_address_space *ggtt = i915_obj_to_ggtt(o);
5221 struct i915_vma *vma; 5221 struct i915_vma *vma;
5222 5222
5223 list_for_each_entry(vma, &o->vma_list, vma_link) 5223 list_for_each_entry(vma, &o->vma_list, obj_link)
5224 if (vma->vm == ggtt && 5224 if (vma->vm == ggtt &&
5225 i915_ggtt_view_equal(&vma->ggtt_view, view)) 5225 i915_ggtt_view_equal(&vma->ggtt_view, view))
5226 return vma->node.start; 5226 return vma->node.start;
@@ -5234,7 +5234,7 @@ bool i915_gem_obj_bound(struct drm_i915_gem_object *o,
5234{ 5234{
5235 struct i915_vma *vma; 5235 struct i915_vma *vma;
5236 5236
5237 list_for_each_entry(vma, &o->vma_list, vma_link) { 5237 list_for_each_entry(vma, &o->vma_list, obj_link) {
5238 if (i915_is_ggtt(vma->vm) && 5238 if (i915_is_ggtt(vma->vm) &&
5239 vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL) 5239 vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL)
5240 continue; 5240 continue;
@@ -5251,7 +5251,7 @@ bool i915_gem_obj_ggtt_bound_view(struct drm_i915_gem_object *o,
5251 struct i915_address_space *ggtt = i915_obj_to_ggtt(o); 5251 struct i915_address_space *ggtt = i915_obj_to_ggtt(o);
5252 struct i915_vma *vma; 5252 struct i915_vma *vma;
5253 5253
5254 list_for_each_entry(vma, &o->vma_list, vma_link) 5254 list_for_each_entry(vma, &o->vma_list, obj_link)
5255 if (vma->vm == ggtt && 5255 if (vma->vm == ggtt &&
5256 i915_ggtt_view_equal(&vma->ggtt_view, view) && 5256 i915_ggtt_view_equal(&vma->ggtt_view, view) &&
5257 drm_mm_node_allocated(&vma->node)) 5257 drm_mm_node_allocated(&vma->node))
@@ -5264,7 +5264,7 @@ bool i915_gem_obj_bound_any(struct drm_i915_gem_object *o)
5264{ 5264{
5265 struct i915_vma *vma; 5265 struct i915_vma *vma;
5266 5266
5267 list_for_each_entry(vma, &o->vma_list, vma_link) 5267 list_for_each_entry(vma, &o->vma_list, obj_link)
5268 if (drm_mm_node_allocated(&vma->node)) 5268 if (drm_mm_node_allocated(&vma->node))
5269 return true; 5269 return true;
5270 5270
@@ -5281,7 +5281,7 @@ unsigned long i915_gem_obj_size(struct drm_i915_gem_object *o,
5281 5281
5282 BUG_ON(list_empty(&o->vma_list)); 5282 BUG_ON(list_empty(&o->vma_list));
5283 5283
5284 list_for_each_entry(vma, &o->vma_list, vma_link) { 5284 list_for_each_entry(vma, &o->vma_list, obj_link) {
5285 if (i915_is_ggtt(vma->vm) && 5285 if (i915_is_ggtt(vma->vm) &&
5286 vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL) 5286 vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL)
5287 continue; 5287 continue;
@@ -5294,7 +5294,7 @@ unsigned long i915_gem_obj_size(struct drm_i915_gem_object *o,
5294bool i915_gem_obj_is_pinned(struct drm_i915_gem_object *obj) 5294bool i915_gem_obj_is_pinned(struct drm_i915_gem_object *obj)
5295{ 5295{
5296 struct i915_vma *vma; 5296 struct i915_vma *vma;
5297 list_for_each_entry(vma, &obj->vma_list, vma_link) 5297 list_for_each_entry(vma, &obj->vma_list, obj_link)
5298 if (vma->pin_count > 0) 5298 if (vma->pin_count > 0)
5299 return true; 5299 return true;
5300 5300