aboutsummaryrefslogtreecommitdiffstats
path: root/drivers
diff options
context:
space:
mode:
authorTvrtko Ursulin <tvrtko.ursulin@intel.com>2014-12-10 12:27:58 -0500
committerDaniel Vetter <daniel.vetter@ffwll.ch>2014-12-15 05:25:04 -0500
commitfe14d5f4e5468c5b80a24f1a64abcbe116143670 (patch)
tree0b685fdf444fe53cfb010ca30e439fcb39a4e29d /drivers
parentdb5ff4ac97f6602360645414b698a05f91b40542 (diff)
drm/i915: Infrastructure for supporting different GGTT views per object
Things like reliable GGTT mappings and mirrored 2d-on-3d display will need to map objects into the same address space multiple times. Added a GGTT view concept and linked it with the VMA to distinguish between multiple instances per address space. New objects and GEM functions which do not take this new view as a parameter assume the default of zero (I915_GGTT_VIEW_NORMAL) which preserves the previous behaviour. This now means that objects can have multiple VMA entries so the code which assumed there will only be one also had to be modified. Alternative GGTT views are supposed to borrow DMA addresses from obj->pages which is DMA mapped on first VMA instantiation and unmapped on the last one going away. v2: * Removed per view special casing in i915_gem_ggtt_prepare / finish_object in favour of creating and destroying DMA mappings on first VMA instantiation and last VMA destruction. (Daniel Vetter) * Simplified i915_vma_unbind which does not need to count the GGTT views. (Daniel Vetter) * Also moved obj->map_and_fenceable reset under the same check. * Checkpatch cleanups. v3: * Only retire objects once the last VMA is unbound. v4: * Keep scatter-gather table for alternative views persistent for the lifetime of the VMA. * Propagate binding errors to callers and handle appropriately. v5: * Explicitly look for normal GGTT view in i915_gem_obj_bound to align usage in i915_gem_object_ggtt_unpin. (Michel Thierry) * Change to single if statement in i915_gem_obj_to_ggtt. (Michel Thierry) * Removed stray semi-colon in i915_gem_object_set_cache_level. For: VIZ-4544 Signed-off-by: Tvrtko Ursulin <tvrtko.ursulin@intel.com> Cc: Daniel Vetter <daniel.vetter@ffwll.ch> Reviewed-by: Michel Thierry <michel.thierry@intel.com> [danvet: Drop hunk from i915_gem_shrink since it's just prettification but upsets a __must_check warning.] Signed-off-by: Daniel Vetter <daniel.vetter@ffwll.ch>
Diffstat (limited to 'drivers')
-rw-r--r--drivers/gpu/drm/i915/i915_debugfs.c5
-rw-r--r--drivers/gpu/drm/i915/i915_drv.h56
-rw-r--r--drivers/gpu/drm/i915/i915_gem.c109
-rw-r--r--drivers/gpu/drm/i915/i915_gem_context.c11
-rw-r--r--drivers/gpu/drm/i915/i915_gem_execbuffer.c9
-rw-r--r--drivers/gpu/drm/i915/i915_gem_gtt.c70
-rw-r--r--drivers/gpu/drm/i915/i915_gem_gtt.h22
-rw-r--r--drivers/gpu/drm/i915/i915_gpu_error.c8
8 files changed, 220 insertions, 70 deletions
diff --git a/drivers/gpu/drm/i915/i915_debugfs.c b/drivers/gpu/drm/i915/i915_debugfs.c
index 479e0c119111..8d2988ae3c46 100644
--- a/drivers/gpu/drm/i915/i915_debugfs.c
+++ b/drivers/gpu/drm/i915/i915_debugfs.c
@@ -152,8 +152,9 @@ describe_obj(struct seq_file *m, struct drm_i915_gem_object *obj)
152 seq_puts(m, " (pp"); 152 seq_puts(m, " (pp");
153 else 153 else
154 seq_puts(m, " (g"); 154 seq_puts(m, " (g");
155 seq_printf(m, "gtt offset: %08lx, size: %08lx)", 155 seq_printf(m, "gtt offset: %08lx, size: %08lx, type: %u)",
156 vma->node.start, vma->node.size); 156 vma->node.start, vma->node.size,
157 vma->ggtt_view.type);
157 } 158 }
158 if (obj->stolen) 159 if (obj->stolen)
159 seq_printf(m, " (stolen: %08lx)", obj->stolen->start); 160 seq_printf(m, " (stolen: %08lx)", obj->stolen->start);
diff --git a/drivers/gpu/drm/i915/i915_drv.h b/drivers/gpu/drm/i915/i915_drv.h
index 5f4cca3ef01c..eb47990f827c 100644
--- a/drivers/gpu/drm/i915/i915_drv.h
+++ b/drivers/gpu/drm/i915/i915_drv.h
@@ -2530,10 +2530,23 @@ void i915_gem_vma_destroy(struct i915_vma *vma);
2530#define PIN_GLOBAL 0x4 2530#define PIN_GLOBAL 0x4
2531#define PIN_OFFSET_BIAS 0x8 2531#define PIN_OFFSET_BIAS 0x8
2532#define PIN_OFFSET_MASK (~4095) 2532#define PIN_OFFSET_MASK (~4095)
2533int __must_check i915_gem_object_pin_view(struct drm_i915_gem_object *obj,
2534 struct i915_address_space *vm,
2535 uint32_t alignment,
2536 uint64_t flags,
2537 const struct i915_ggtt_view *view);
2538static inline
2533int __must_check i915_gem_object_pin(struct drm_i915_gem_object *obj, 2539int __must_check i915_gem_object_pin(struct drm_i915_gem_object *obj,
2534 struct i915_address_space *vm, 2540 struct i915_address_space *vm,
2535 uint32_t alignment, 2541 uint32_t alignment,
2536 uint64_t flags); 2542 uint64_t flags)
2543{
2544 return i915_gem_object_pin_view(obj, vm, alignment, flags,
2545 &i915_ggtt_view_normal);
2546}
2547
2548int i915_vma_bind(struct i915_vma *vma, enum i915_cache_level cache_level,
2549 u32 flags);
2537int __must_check i915_vma_unbind(struct i915_vma *vma); 2550int __must_check i915_vma_unbind(struct i915_vma *vma);
2538int i915_gem_object_put_pages(struct drm_i915_gem_object *obj); 2551int i915_gem_object_put_pages(struct drm_i915_gem_object *obj);
2539void i915_gem_release_all_mmaps(struct drm_i915_private *dev_priv); 2552void i915_gem_release_all_mmaps(struct drm_i915_private *dev_priv);
@@ -2695,18 +2708,51 @@ struct dma_buf *i915_gem_prime_export(struct drm_device *dev,
2695 2708
2696void i915_gem_restore_fences(struct drm_device *dev); 2709void i915_gem_restore_fences(struct drm_device *dev);
2697 2710
2711unsigned long i915_gem_obj_offset_view(struct drm_i915_gem_object *o,
2712 struct i915_address_space *vm,
2713 enum i915_ggtt_view_type view);
2714static inline
2698unsigned long i915_gem_obj_offset(struct drm_i915_gem_object *o, 2715unsigned long i915_gem_obj_offset(struct drm_i915_gem_object *o,
2699 struct i915_address_space *vm); 2716 struct i915_address_space *vm)
2717{
2718 return i915_gem_obj_offset_view(o, vm, I915_GGTT_VIEW_NORMAL);
2719}
2700bool i915_gem_obj_bound_any(struct drm_i915_gem_object *o); 2720bool i915_gem_obj_bound_any(struct drm_i915_gem_object *o);
2721bool i915_gem_obj_bound_view(struct drm_i915_gem_object *o,
2722 struct i915_address_space *vm,
2723 enum i915_ggtt_view_type view);
2724static inline
2701bool i915_gem_obj_bound(struct drm_i915_gem_object *o, 2725bool i915_gem_obj_bound(struct drm_i915_gem_object *o,
2702 struct i915_address_space *vm); 2726 struct i915_address_space *vm)
2727{
2728 return i915_gem_obj_bound_view(o, vm, I915_GGTT_VIEW_NORMAL);
2729}
2730
2703unsigned long i915_gem_obj_size(struct drm_i915_gem_object *o, 2731unsigned long i915_gem_obj_size(struct drm_i915_gem_object *o,
2704 struct i915_address_space *vm); 2732 struct i915_address_space *vm);
2733struct i915_vma *i915_gem_obj_to_vma_view(struct drm_i915_gem_object *obj,
2734 struct i915_address_space *vm,
2735 const struct i915_ggtt_view *view);
2736static inline
2705struct i915_vma *i915_gem_obj_to_vma(struct drm_i915_gem_object *obj, 2737struct i915_vma *i915_gem_obj_to_vma(struct drm_i915_gem_object *obj,
2706 struct i915_address_space *vm); 2738 struct i915_address_space *vm)
2739{
2740 return i915_gem_obj_to_vma_view(obj, vm, &i915_ggtt_view_normal);
2741}
2742
2743struct i915_vma *
2744i915_gem_obj_lookup_or_create_vma_view(struct drm_i915_gem_object *obj,
2745 struct i915_address_space *vm,
2746 const struct i915_ggtt_view *view);
2747
2748static inline
2707struct i915_vma * 2749struct i915_vma *
2708i915_gem_obj_lookup_or_create_vma(struct drm_i915_gem_object *obj, 2750i915_gem_obj_lookup_or_create_vma(struct drm_i915_gem_object *obj,
2709 struct i915_address_space *vm); 2751 struct i915_address_space *vm)
2752{
2753 return i915_gem_obj_lookup_or_create_vma_view(obj, vm,
2754 &i915_ggtt_view_normal);
2755}
2710 2756
2711struct i915_vma *i915_gem_obj_to_ggtt(struct drm_i915_gem_object *obj); 2757struct i915_vma *i915_gem_obj_to_ggtt(struct drm_i915_gem_object *obj);
2712static inline bool i915_gem_obj_is_pinned(struct drm_i915_gem_object *obj) { 2758static inline bool i915_gem_obj_is_pinned(struct drm_i915_gem_object *obj) {
diff --git a/drivers/gpu/drm/i915/i915_gem.c b/drivers/gpu/drm/i915/i915_gem.c
index e3ce4bef22a3..c26d4ccd183a 100644
--- a/drivers/gpu/drm/i915/i915_gem.c
+++ b/drivers/gpu/drm/i915/i915_gem.c
@@ -2297,17 +2297,14 @@ void i915_vma_move_to_active(struct i915_vma *vma,
2297static void 2297static void
2298i915_gem_object_move_to_inactive(struct drm_i915_gem_object *obj) 2298i915_gem_object_move_to_inactive(struct drm_i915_gem_object *obj)
2299{ 2299{
2300 struct drm_i915_private *dev_priv = obj->base.dev->dev_private;
2301 struct i915_address_space *vm;
2302 struct i915_vma *vma; 2300 struct i915_vma *vma;
2303 2301
2304 BUG_ON(obj->base.write_domain & ~I915_GEM_GPU_DOMAINS); 2302 BUG_ON(obj->base.write_domain & ~I915_GEM_GPU_DOMAINS);
2305 BUG_ON(!obj->active); 2303 BUG_ON(!obj->active);
2306 2304
2307 list_for_each_entry(vm, &dev_priv->vm_list, global_link) { 2305 list_for_each_entry(vma, &obj->vma_list, vma_link) {
2308 vma = i915_gem_obj_to_vma(obj, vm); 2306 if (!list_empty(&vma->mm_list))
2309 if (vma && !list_empty(&vma->mm_list)) 2307 list_move_tail(&vma->mm_list, &vma->vm->inactive_list);
2310 list_move_tail(&vma->mm_list, &vm->inactive_list);
2311 } 2308 }
2312 2309
2313 intel_fb_obj_flush(obj, true); 2310 intel_fb_obj_flush(obj, true);
@@ -3062,10 +3059,8 @@ int i915_vma_unbind(struct i915_vma *vma)
3062 * cause memory corruption through use-after-free. 3059 * cause memory corruption through use-after-free.
3063 */ 3060 */
3064 3061
3065 /* Throw away the active reference before moving to the unbound list */ 3062 if (i915_is_ggtt(vma->vm) &&
3066 i915_gem_object_retire(obj); 3063 vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL) {
3067
3068 if (i915_is_ggtt(vma->vm)) {
3069 i915_gem_object_finish_gtt(obj); 3064 i915_gem_object_finish_gtt(obj);
3070 3065
3071 /* release the fence reg _after_ flushing */ 3066 /* release the fence reg _after_ flushing */
@@ -3079,8 +3074,15 @@ int i915_vma_unbind(struct i915_vma *vma)
3079 vma->unbind_vma(vma); 3074 vma->unbind_vma(vma);
3080 3075
3081 list_del_init(&vma->mm_list); 3076 list_del_init(&vma->mm_list);
3082 if (i915_is_ggtt(vma->vm)) 3077 if (i915_is_ggtt(vma->vm)) {
3083 obj->map_and_fenceable = false; 3078 if (vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL) {
3079 obj->map_and_fenceable = false;
3080 } else if (vma->ggtt_view.pages) {
3081 sg_free_table(vma->ggtt_view.pages);
3082 kfree(vma->ggtt_view.pages);
3083 vma->ggtt_view.pages = NULL;
3084 }
3085 }
3084 3086
3085 drm_mm_remove_node(&vma->node); 3087 drm_mm_remove_node(&vma->node);
3086 i915_gem_vma_destroy(vma); 3088 i915_gem_vma_destroy(vma);
@@ -3088,6 +3090,10 @@ int i915_vma_unbind(struct i915_vma *vma)
3088 /* Since the unbound list is global, only move to that list if 3090 /* Since the unbound list is global, only move to that list if
3089 * no more VMAs exist. */ 3091 * no more VMAs exist. */
3090 if (list_empty(&obj->vma_list)) { 3092 if (list_empty(&obj->vma_list)) {
3093 /* Throw away the active reference before
3094 * moving to the unbound list. */
3095 i915_gem_object_retire(obj);
3096
3091 i915_gem_gtt_finish_object(obj); 3097 i915_gem_gtt_finish_object(obj);
3092 list_move_tail(&obj->global_list, &dev_priv->mm.unbound_list); 3098 list_move_tail(&obj->global_list, &dev_priv->mm.unbound_list);
3093 } 3099 }
@@ -3498,7 +3504,8 @@ static struct i915_vma *
3498i915_gem_object_bind_to_vm(struct drm_i915_gem_object *obj, 3504i915_gem_object_bind_to_vm(struct drm_i915_gem_object *obj,
3499 struct i915_address_space *vm, 3505 struct i915_address_space *vm,
3500 unsigned alignment, 3506 unsigned alignment,
3501 uint64_t flags) 3507 uint64_t flags,
3508 const struct i915_ggtt_view *view)
3502{ 3509{
3503 struct drm_device *dev = obj->base.dev; 3510 struct drm_device *dev = obj->base.dev;
3504 struct drm_i915_private *dev_priv = dev->dev_private; 3511 struct drm_i915_private *dev_priv = dev->dev_private;
@@ -3548,7 +3555,7 @@ i915_gem_object_bind_to_vm(struct drm_i915_gem_object *obj,
3548 3555
3549 i915_gem_object_pin_pages(obj); 3556 i915_gem_object_pin_pages(obj);
3550 3557
3551 vma = i915_gem_obj_lookup_or_create_vma(obj, vm); 3558 vma = i915_gem_obj_lookup_or_create_vma_view(obj, vm, view);
3552 if (IS_ERR(vma)) 3559 if (IS_ERR(vma))
3553 goto err_unpin; 3560 goto err_unpin;
3554 3561
@@ -3578,15 +3585,19 @@ search_free:
3578 if (ret) 3585 if (ret)
3579 goto err_remove_node; 3586 goto err_remove_node;
3580 3587
3588 trace_i915_vma_bind(vma, flags);
3589 ret = i915_vma_bind(vma, obj->cache_level,
3590 flags & PIN_GLOBAL ? GLOBAL_BIND : 0);
3591 if (ret)
3592 goto err_finish_gtt;
3593
3581 list_move_tail(&obj->global_list, &dev_priv->mm.bound_list); 3594 list_move_tail(&obj->global_list, &dev_priv->mm.bound_list);
3582 list_add_tail(&vma->mm_list, &vm->inactive_list); 3595 list_add_tail(&vma->mm_list, &vm->inactive_list);
3583 3596
3584 trace_i915_vma_bind(vma, flags);
3585 vma->bind_vma(vma, obj->cache_level,
3586 flags & PIN_GLOBAL ? GLOBAL_BIND : 0);
3587
3588 return vma; 3597 return vma;
3589 3598
3599err_finish_gtt:
3600 i915_gem_gtt_finish_object(obj);
3590err_remove_node: 3601err_remove_node:
3591 drm_mm_remove_node(&vma->node); 3602 drm_mm_remove_node(&vma->node);
3592err_free_vma: 3603err_free_vma:
@@ -3789,9 +3800,12 @@ int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj,
3789 } 3800 }
3790 3801
3791 list_for_each_entry(vma, &obj->vma_list, vma_link) 3802 list_for_each_entry(vma, &obj->vma_list, vma_link)
3792 if (drm_mm_node_allocated(&vma->node)) 3803 if (drm_mm_node_allocated(&vma->node)) {
3793 vma->bind_vma(vma, cache_level, 3804 ret = i915_vma_bind(vma, cache_level,
3794 vma->bound & GLOBAL_BIND); 3805 vma->bound & GLOBAL_BIND);
3806 if (ret)
3807 return ret;
3808 }
3795 } 3809 }
3796 3810
3797 list_for_each_entry(vma, &obj->vma_list, vma_link) 3811 list_for_each_entry(vma, &obj->vma_list, vma_link)
@@ -4144,10 +4158,11 @@ i915_vma_misplaced(struct i915_vma *vma, uint32_t alignment, uint64_t flags)
4144} 4158}
4145 4159
4146int 4160int
4147i915_gem_object_pin(struct drm_i915_gem_object *obj, 4161i915_gem_object_pin_view(struct drm_i915_gem_object *obj,
4148 struct i915_address_space *vm, 4162 struct i915_address_space *vm,
4149 uint32_t alignment, 4163 uint32_t alignment,
4150 uint64_t flags) 4164 uint64_t flags,
4165 const struct i915_ggtt_view *view)
4151{ 4166{
4152 struct drm_i915_private *dev_priv = obj->base.dev->dev_private; 4167 struct drm_i915_private *dev_priv = obj->base.dev->dev_private;
4153 struct i915_vma *vma; 4168 struct i915_vma *vma;
@@ -4163,7 +4178,7 @@ i915_gem_object_pin(struct drm_i915_gem_object *obj,
4163 if (WARN_ON((flags & (PIN_MAPPABLE | PIN_GLOBAL)) == PIN_MAPPABLE)) 4178 if (WARN_ON((flags & (PIN_MAPPABLE | PIN_GLOBAL)) == PIN_MAPPABLE))
4164 return -EINVAL; 4179 return -EINVAL;
4165 4180
4166 vma = i915_gem_obj_to_vma(obj, vm); 4181 vma = i915_gem_obj_to_vma_view(obj, vm, view);
4167 if (vma) { 4182 if (vma) {
4168 if (WARN_ON(vma->pin_count == DRM_I915_GEM_OBJECT_MAX_PIN_COUNT)) 4183 if (WARN_ON(vma->pin_count == DRM_I915_GEM_OBJECT_MAX_PIN_COUNT))
4169 return -EBUSY; 4184 return -EBUSY;
@@ -4173,7 +4188,8 @@ i915_gem_object_pin(struct drm_i915_gem_object *obj,
4173 "bo is already pinned with incorrect alignment:" 4188 "bo is already pinned with incorrect alignment:"
4174 " offset=%lx, req.alignment=%x, req.map_and_fenceable=%d," 4189 " offset=%lx, req.alignment=%x, req.map_and_fenceable=%d,"
4175 " obj->map_and_fenceable=%d\n", 4190 " obj->map_and_fenceable=%d\n",
4176 i915_gem_obj_offset(obj, vm), alignment, 4191 i915_gem_obj_offset_view(obj, vm, view->type),
4192 alignment,
4177 !!(flags & PIN_MAPPABLE), 4193 !!(flags & PIN_MAPPABLE),
4178 obj->map_and_fenceable); 4194 obj->map_and_fenceable);
4179 ret = i915_vma_unbind(vma); 4195 ret = i915_vma_unbind(vma);
@@ -4186,13 +4202,17 @@ i915_gem_object_pin(struct drm_i915_gem_object *obj,
4186 4202
4187 bound = vma ? vma->bound : 0; 4203 bound = vma ? vma->bound : 0;
4188 if (vma == NULL || !drm_mm_node_allocated(&vma->node)) { 4204 if (vma == NULL || !drm_mm_node_allocated(&vma->node)) {
4189 vma = i915_gem_object_bind_to_vm(obj, vm, alignment, flags); 4205 vma = i915_gem_object_bind_to_vm(obj, vm, alignment,
4206 flags, view);
4190 if (IS_ERR(vma)) 4207 if (IS_ERR(vma))
4191 return PTR_ERR(vma); 4208 return PTR_ERR(vma);
4192 } 4209 }
4193 4210
4194 if (flags & PIN_GLOBAL && !(vma->bound & GLOBAL_BIND)) 4211 if (flags & PIN_GLOBAL && !(vma->bound & GLOBAL_BIND)) {
4195 vma->bind_vma(vma, obj->cache_level, GLOBAL_BIND); 4212 ret = i915_vma_bind(vma, obj->cache_level, GLOBAL_BIND);
4213 if (ret)
4214 return ret;
4215 }
4196 4216
4197 if ((bound ^ vma->bound) & GLOBAL_BIND) { 4217 if ((bound ^ vma->bound) & GLOBAL_BIND) {
4198 bool mappable, fenceable; 4218 bool mappable, fenceable;
@@ -4528,12 +4548,13 @@ void i915_gem_free_object(struct drm_gem_object *gem_obj)
4528 intel_runtime_pm_put(dev_priv); 4548 intel_runtime_pm_put(dev_priv);
4529} 4549}
4530 4550
4531struct i915_vma *i915_gem_obj_to_vma(struct drm_i915_gem_object *obj, 4551struct i915_vma *i915_gem_obj_to_vma_view(struct drm_i915_gem_object *obj,
4532 struct i915_address_space *vm) 4552 struct i915_address_space *vm,
4553 const struct i915_ggtt_view *view)
4533{ 4554{
4534 struct i915_vma *vma; 4555 struct i915_vma *vma;
4535 list_for_each_entry(vma, &obj->vma_list, vma_link) 4556 list_for_each_entry(vma, &obj->vma_list, vma_link)
4536 if (vma->vm == vm) 4557 if (vma->vm == vm && vma->ggtt_view.type == view->type)
4537 return vma; 4558 return vma;
4538 4559
4539 return NULL; 4560 return NULL;
@@ -5145,8 +5166,9 @@ i915_gem_shrinker_count(struct shrinker *shrinker, struct shrink_control *sc)
5145} 5166}
5146 5167
5147/* All the new VM stuff */ 5168/* All the new VM stuff */
5148unsigned long i915_gem_obj_offset(struct drm_i915_gem_object *o, 5169unsigned long i915_gem_obj_offset_view(struct drm_i915_gem_object *o,
5149 struct i915_address_space *vm) 5170 struct i915_address_space *vm,
5171 enum i915_ggtt_view_type view)
5150{ 5172{
5151 struct drm_i915_private *dev_priv = o->base.dev->dev_private; 5173 struct drm_i915_private *dev_priv = o->base.dev->dev_private;
5152 struct i915_vma *vma; 5174 struct i915_vma *vma;
@@ -5154,7 +5176,7 @@ unsigned long i915_gem_obj_offset(struct drm_i915_gem_object *o,
5154 WARN_ON(vm == &dev_priv->mm.aliasing_ppgtt->base); 5176 WARN_ON(vm == &dev_priv->mm.aliasing_ppgtt->base);
5155 5177
5156 list_for_each_entry(vma, &o->vma_list, vma_link) { 5178 list_for_each_entry(vma, &o->vma_list, vma_link) {
5157 if (vma->vm == vm) 5179 if (vma->vm == vm && vma->ggtt_view.type == view)
5158 return vma->node.start; 5180 return vma->node.start;
5159 5181
5160 } 5182 }
@@ -5163,13 +5185,16 @@ unsigned long i915_gem_obj_offset(struct drm_i915_gem_object *o,
5163 return -1; 5185 return -1;
5164} 5186}
5165 5187
5166bool i915_gem_obj_bound(struct drm_i915_gem_object *o, 5188bool i915_gem_obj_bound_view(struct drm_i915_gem_object *o,
5167 struct i915_address_space *vm) 5189 struct i915_address_space *vm,
5190 enum i915_ggtt_view_type view)
5168{ 5191{
5169 struct i915_vma *vma; 5192 struct i915_vma *vma;
5170 5193
5171 list_for_each_entry(vma, &o->vma_list, vma_link) 5194 list_for_each_entry(vma, &o->vma_list, vma_link)
5172 if (vma->vm == vm && drm_mm_node_allocated(&vma->node)) 5195 if (vma->vm == vm &&
5196 vma->ggtt_view.type == view &&
5197 drm_mm_node_allocated(&vma->node))
5173 return true; 5198 return true;
5174 5199
5175 return false; 5200 return false;
@@ -5304,10 +5329,10 @@ struct i915_vma *i915_gem_obj_to_ggtt(struct drm_i915_gem_object *obj)
5304 struct i915_address_space *ggtt = i915_obj_to_ggtt(obj); 5329 struct i915_address_space *ggtt = i915_obj_to_ggtt(obj);
5305 struct i915_vma *vma; 5330 struct i915_vma *vma;
5306 5331
5307 list_for_each_entry(vma, &obj->vma_list, vma_link) { 5332 list_for_each_entry(vma, &obj->vma_list, vma_link)
5308 if (vma->vm == ggtt) 5333 if (vma->vm == ggtt &&
5334 vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL)
5309 return vma; 5335 return vma;
5310 }
5311 5336
5312 return NULL; 5337 return NULL;
5313} 5338}
diff --git a/drivers/gpu/drm/i915/i915_gem_context.c b/drivers/gpu/drm/i915/i915_gem_context.c
index 2acf5803cf32..b9deade53f2e 100644
--- a/drivers/gpu/drm/i915/i915_gem_context.c
+++ b/drivers/gpu/drm/i915/i915_gem_context.c
@@ -590,9 +590,14 @@ static int do_switch(struct intel_engine_cs *ring,
590 goto unpin_out; 590 goto unpin_out;
591 591
592 vma = i915_gem_obj_to_ggtt(to->legacy_hw_ctx.rcs_state); 592 vma = i915_gem_obj_to_ggtt(to->legacy_hw_ctx.rcs_state);
593 if (!(vma->bound & GLOBAL_BIND)) 593 if (!(vma->bound & GLOBAL_BIND)) {
594 vma->bind_vma(vma, to->legacy_hw_ctx.rcs_state->cache_level, 594 ret = i915_vma_bind(vma,
595 GLOBAL_BIND); 595 to->legacy_hw_ctx.rcs_state->cache_level,
596 GLOBAL_BIND);
597 /* This shouldn't ever fail. */
598 if (WARN_ONCE(ret, "GGTT context bind failed!"))
599 goto unpin_out;
600 }
596 601
597 if (!to->legacy_hw_ctx.initialized || i915_gem_context_is_default(to)) 602 if (!to->legacy_hw_ctx.initialized || i915_gem_context_is_default(to))
598 hw_flags |= MI_RESTORE_INHIBIT; 603 hw_flags |= MI_RESTORE_INHIBIT;
diff --git a/drivers/gpu/drm/i915/i915_gem_execbuffer.c b/drivers/gpu/drm/i915/i915_gem_execbuffer.c
index 0c25f6202ca4..3927d931ad73 100644
--- a/drivers/gpu/drm/i915/i915_gem_execbuffer.c
+++ b/drivers/gpu/drm/i915/i915_gem_execbuffer.c
@@ -360,9 +360,12 @@ i915_gem_execbuffer_relocate_entry(struct drm_i915_gem_object *obj,
360 * through the ppgtt for non_secure batchbuffers. */ 360 * through the ppgtt for non_secure batchbuffers. */
361 if (unlikely(IS_GEN6(dev) && 361 if (unlikely(IS_GEN6(dev) &&
362 reloc->write_domain == I915_GEM_DOMAIN_INSTRUCTION && 362 reloc->write_domain == I915_GEM_DOMAIN_INSTRUCTION &&
363 !(target_vma->bound & GLOBAL_BIND))) 363 !(target_vma->bound & GLOBAL_BIND))) {
364 target_vma->bind_vma(target_vma, target_i915_obj->cache_level, 364 ret = i915_vma_bind(target_vma, target_i915_obj->cache_level,
365 GLOBAL_BIND); 365 GLOBAL_BIND);
366 if (WARN_ONCE(ret, "Unexpected failure to bind target VMA!"))
367 return ret;
368 }
366 369
367 /* Validate that the target is in a valid r/w GPU domain */ 370 /* Validate that the target is in a valid r/w GPU domain */
368 if (unlikely(reloc->write_domain & (reloc->write_domain - 1))) { 371 if (unlikely(reloc->write_domain & (reloc->write_domain - 1))) {
diff --git a/drivers/gpu/drm/i915/i915_gem_gtt.c b/drivers/gpu/drm/i915/i915_gem_gtt.c
index ce4e46c443a1..9821a6095e53 100644
--- a/drivers/gpu/drm/i915/i915_gem_gtt.c
+++ b/drivers/gpu/drm/i915/i915_gem_gtt.c
@@ -30,6 +30,8 @@
30#include "i915_trace.h" 30#include "i915_trace.h"
31#include "intel_drv.h" 31#include "intel_drv.h"
32 32
33const struct i915_ggtt_view i915_ggtt_view_normal;
34
33static void bdw_setup_private_ppat(struct drm_i915_private *dev_priv); 35static void bdw_setup_private_ppat(struct drm_i915_private *dev_priv);
34static void chv_setup_private_ppat(struct drm_i915_private *dev_priv); 36static void chv_setup_private_ppat(struct drm_i915_private *dev_priv);
35 37
@@ -1341,9 +1343,12 @@ void i915_gem_restore_gtt_mappings(struct drm_device *dev)
1341 /* The bind_vma code tries to be smart about tracking mappings. 1343 /* The bind_vma code tries to be smart about tracking mappings.
1342 * Unfortunately above, we've just wiped out the mappings 1344 * Unfortunately above, we've just wiped out the mappings
1343 * without telling our object about it. So we need to fake it. 1345 * without telling our object about it. So we need to fake it.
1346 *
1347 * Bind is not expected to fail since this is only called on
1348 * resume and assumption is all requirements exist already.
1344 */ 1349 */
1345 vma->bound &= ~GLOBAL_BIND; 1350 vma->bound &= ~GLOBAL_BIND;
1346 vma->bind_vma(vma, obj->cache_level, GLOBAL_BIND); 1351 WARN_ON(i915_vma_bind(vma, obj->cache_level, GLOBAL_BIND));
1347 } 1352 }
1348 1353
1349 1354
@@ -1538,7 +1543,7 @@ static void i915_ggtt_bind_vma(struct i915_vma *vma,
1538 AGP_USER_MEMORY : AGP_USER_CACHED_MEMORY; 1543 AGP_USER_MEMORY : AGP_USER_CACHED_MEMORY;
1539 1544
1540 BUG_ON(!i915_is_ggtt(vma->vm)); 1545 BUG_ON(!i915_is_ggtt(vma->vm));
1541 intel_gtt_insert_sg_entries(vma->obj->pages, entry, flags); 1546 intel_gtt_insert_sg_entries(vma->ggtt_view.pages, entry, flags);
1542 vma->bound = GLOBAL_BIND; 1547 vma->bound = GLOBAL_BIND;
1543} 1548}
1544 1549
@@ -1588,7 +1593,7 @@ static void ggtt_bind_vma(struct i915_vma *vma,
1588 if (!dev_priv->mm.aliasing_ppgtt || flags & GLOBAL_BIND) { 1593 if (!dev_priv->mm.aliasing_ppgtt || flags & GLOBAL_BIND) {
1589 if (!(vma->bound & GLOBAL_BIND) || 1594 if (!(vma->bound & GLOBAL_BIND) ||
1590 (cache_level != obj->cache_level)) { 1595 (cache_level != obj->cache_level)) {
1591 vma->vm->insert_entries(vma->vm, obj->pages, 1596 vma->vm->insert_entries(vma->vm, vma->ggtt_view.pages,
1592 vma->node.start, 1597 vma->node.start,
1593 cache_level, flags); 1598 cache_level, flags);
1594 vma->bound |= GLOBAL_BIND; 1599 vma->bound |= GLOBAL_BIND;
@@ -1600,7 +1605,7 @@ static void ggtt_bind_vma(struct i915_vma *vma,
1600 (cache_level != obj->cache_level))) { 1605 (cache_level != obj->cache_level))) {
1601 struct i915_hw_ppgtt *appgtt = dev_priv->mm.aliasing_ppgtt; 1606 struct i915_hw_ppgtt *appgtt = dev_priv->mm.aliasing_ppgtt;
1602 appgtt->base.insert_entries(&appgtt->base, 1607 appgtt->base.insert_entries(&appgtt->base,
1603 vma->obj->pages, 1608 vma->ggtt_view.pages,
1604 vma->node.start, 1609 vma->node.start,
1605 cache_level, flags); 1610 cache_level, flags);
1606 vma->bound |= LOCAL_BIND; 1611 vma->bound |= LOCAL_BIND;
@@ -2165,7 +2170,8 @@ int i915_gem_gtt_init(struct drm_device *dev)
2165} 2170}
2166 2171
2167static struct i915_vma *__i915_gem_vma_create(struct drm_i915_gem_object *obj, 2172static struct i915_vma *__i915_gem_vma_create(struct drm_i915_gem_object *obj,
2168 struct i915_address_space *vm) 2173 struct i915_address_space *vm,
2174 const struct i915_ggtt_view *view)
2169{ 2175{
2170 struct i915_vma *vma = kzalloc(sizeof(*vma), GFP_KERNEL); 2176 struct i915_vma *vma = kzalloc(sizeof(*vma), GFP_KERNEL);
2171 if (vma == NULL) 2177 if (vma == NULL)
@@ -2176,6 +2182,7 @@ static struct i915_vma *__i915_gem_vma_create(struct drm_i915_gem_object *obj,
2176 INIT_LIST_HEAD(&vma->exec_list); 2182 INIT_LIST_HEAD(&vma->exec_list);
2177 vma->vm = vm; 2183 vma->vm = vm;
2178 vma->obj = obj; 2184 vma->obj = obj;
2185 vma->ggtt_view = *view;
2179 2186
2180 switch (INTEL_INFO(vm->dev)->gen) { 2187 switch (INTEL_INFO(vm->dev)->gen) {
2181 case 9: 2188 case 9:
@@ -2210,14 +2217,59 @@ static struct i915_vma *__i915_gem_vma_create(struct drm_i915_gem_object *obj,
2210} 2217}
2211 2218
2212struct i915_vma * 2219struct i915_vma *
2213i915_gem_obj_lookup_or_create_vma(struct drm_i915_gem_object *obj, 2220i915_gem_obj_lookup_or_create_vma_view(struct drm_i915_gem_object *obj,
2214 struct i915_address_space *vm) 2221 struct i915_address_space *vm,
2222 const struct i915_ggtt_view *view)
2215{ 2223{
2216 struct i915_vma *vma; 2224 struct i915_vma *vma;
2217 2225
2218 vma = i915_gem_obj_to_vma(obj, vm); 2226 vma = i915_gem_obj_to_vma_view(obj, vm, view);
2219 if (!vma) 2227 if (!vma)
2220 vma = __i915_gem_vma_create(obj, vm); 2228 vma = __i915_gem_vma_create(obj, vm, view);
2221 2229
2222 return vma; 2230 return vma;
2223} 2231}
2232
2233static inline
2234int i915_get_vma_pages(struct i915_vma *vma)
2235{
2236 if (vma->ggtt_view.pages)
2237 return 0;
2238
2239 if (vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL)
2240 vma->ggtt_view.pages = vma->obj->pages;
2241 else
2242 WARN_ONCE(1, "GGTT view %u not implemented!\n",
2243 vma->ggtt_view.type);
2244
2245 if (!vma->ggtt_view.pages) {
2246 DRM_ERROR("Failed to get pages for VMA view type %u!\n",
2247 vma->ggtt_view.type);
2248 return -EINVAL;
2249 }
2250
2251 return 0;
2252}
2253
2254/**
2255 * i915_vma_bind - Sets up PTEs for an VMA in it's corresponding address space.
2256 * @vma: VMA to map
2257 * @cache_level: mapping cache level
2258 * @flags: flags like global or local mapping
2259 *
2260 * DMA addresses are taken from the scatter-gather table of this object (or of
2261 * this VMA in case of non-default GGTT views) and PTE entries set up.
2262 * Note that DMA addresses are also the only part of the SG table we care about.
2263 */
2264int i915_vma_bind(struct i915_vma *vma, enum i915_cache_level cache_level,
2265 u32 flags)
2266{
2267 int ret = i915_get_vma_pages(vma);
2268
2269 if (ret)
2270 return ret;
2271
2272 vma->bind_vma(vma, cache_level, flags);
2273
2274 return 0;
2275}
diff --git a/drivers/gpu/drm/i915/i915_gem_gtt.h b/drivers/gpu/drm/i915/i915_gem_gtt.h
index dd849df6a268..e377c7d27bd4 100644
--- a/drivers/gpu/drm/i915/i915_gem_gtt.h
+++ b/drivers/gpu/drm/i915/i915_gem_gtt.h
@@ -109,7 +109,20 @@ typedef gen8_gtt_pte_t gen8_ppgtt_pde_t;
109#define GEN8_PPAT_ELLC_OVERRIDE (0<<2) 109#define GEN8_PPAT_ELLC_OVERRIDE (0<<2)
110#define GEN8_PPAT(i, x) ((uint64_t) (x) << ((i) * 8)) 110#define GEN8_PPAT(i, x) ((uint64_t) (x) << ((i) * 8))
111 111
112enum i915_ggtt_view_type {
113 I915_GGTT_VIEW_NORMAL = 0,
114};
115
116struct i915_ggtt_view {
117 enum i915_ggtt_view_type type;
118
119 struct sg_table *pages;
120};
121
122extern const struct i915_ggtt_view i915_ggtt_view_normal;
123
112enum i915_cache_level; 124enum i915_cache_level;
125
113/** 126/**
114 * A VMA represents a GEM BO that is bound into an address space. Therefore, a 127 * A VMA represents a GEM BO that is bound into an address space. Therefore, a
115 * VMA's presence cannot be guaranteed before binding, or after unbinding the 128 * VMA's presence cannot be guaranteed before binding, or after unbinding the
@@ -129,6 +142,15 @@ struct i915_vma {
129#define PTE_READ_ONLY (1<<2) 142#define PTE_READ_ONLY (1<<2)
130 unsigned int bound : 4; 143 unsigned int bound : 4;
131 144
145 /**
146 * Support different GGTT views into the same object.
147 * This means there can be multiple VMA mappings per object and per VM.
148 * i915_ggtt_view_type is used to distinguish between those entries.
149 * The default one of zero (I915_GGTT_VIEW_NORMAL) is default and also
150 * assumed in GEM functions which take no ggtt view parameter.
151 */
152 struct i915_ggtt_view ggtt_view;
153
132 /** This object's place on the active/inactive lists */ 154 /** This object's place on the active/inactive lists */
133 struct list_head mm_list; 155 struct list_head mm_list;
134 156
diff --git a/drivers/gpu/drm/i915/i915_gpu_error.c b/drivers/gpu/drm/i915/i915_gpu_error.c
index c4536e185b75..f97479a2ea6f 100644
--- a/drivers/gpu/drm/i915/i915_gpu_error.c
+++ b/drivers/gpu/drm/i915/i915_gpu_error.c
@@ -718,10 +718,8 @@ static u32 capture_pinned_bo(struct drm_i915_error_buffer *err,
718 break; 718 break;
719 719
720 list_for_each_entry(vma, &obj->vma_list, vma_link) 720 list_for_each_entry(vma, &obj->vma_list, vma_link)
721 if (vma->vm == vm && vma->pin_count > 0) { 721 if (vma->vm == vm && vma->pin_count > 0)
722 capture_bo(err++, vma); 722 capture_bo(err++, vma);
723 break;
724 }
725 } 723 }
726 724
727 return err - first; 725 return err - first;
@@ -1096,10 +1094,8 @@ static void i915_gem_capture_vm(struct drm_i915_private *dev_priv,
1096 1094
1097 list_for_each_entry(obj, &dev_priv->mm.bound_list, global_list) { 1095 list_for_each_entry(obj, &dev_priv->mm.bound_list, global_list) {
1098 list_for_each_entry(vma, &obj->vma_list, vma_link) 1096 list_for_each_entry(vma, &obj->vma_list, vma_link)
1099 if (vma->vm == vm && vma->pin_count > 0) { 1097 if (vma->vm == vm && vma->pin_count > 0)
1100 i++; 1098 i++;
1101 break;
1102 }
1103 } 1099 }
1104 error->pinned_bo_count[ndx] = i - error->active_bo_count[ndx]; 1100 error->pinned_bo_count[ndx] = i - error->active_bo_count[ndx];
1105 1101