diff options
author | Tvrtko Ursulin <tvrtko.ursulin@intel.com> | 2014-12-10 12:27:58 -0500 |
---|---|---|
committer | Daniel Vetter <daniel.vetter@ffwll.ch> | 2014-12-15 05:25:04 -0500 |
commit | fe14d5f4e5468c5b80a24f1a64abcbe116143670 (patch) | |
tree | 0b685fdf444fe53cfb010ca30e439fcb39a4e29d /drivers/gpu/drm/i915/i915_gem_gtt.c | |
parent | db5ff4ac97f6602360645414b698a05f91b40542 (diff) |
drm/i915: Infrastructure for supporting different GGTT views per object
Things like reliable GGTT mappings and mirrored 2d-on-3d display will need
to map objects into the same address space multiple times.
Added a GGTT view concept and linked it with the VMA to distinguish between
multiple instances per address space.
New objects and GEM functions which do not take this new view as a parameter
assume the default of zero (I915_GGTT_VIEW_NORMAL) which preserves the
previous behaviour.
This now means that objects can have multiple VMA entries so the code which
assumed there will only be one also had to be modified.
Alternative GGTT views are supposed to borrow DMA addresses from obj->pages
which is DMA mapped on first VMA instantiation and unmapped on the last one
going away.
v2:
* Removed per view special casing in i915_gem_ggtt_prepare /
finish_object in favour of creating and destroying DMA mappings
on first VMA instantiation and last VMA destruction. (Daniel Vetter)
* Simplified i915_vma_unbind which does not need to count the GGTT views.
(Daniel Vetter)
* Also moved obj->map_and_fenceable reset under the same check.
* Checkpatch cleanups.
v3:
* Only retire objects once the last VMA is unbound.
v4:
* Keep scatter-gather table for alternative views persistent for the
lifetime of the VMA.
* Propagate binding errors to callers and handle appropriately.
v5:
* Explicitly look for normal GGTT view in i915_gem_obj_bound to align
usage in i915_gem_object_ggtt_unpin. (Michel Thierry)
* Change to single if statement in i915_gem_obj_to_ggtt. (Michel Thierry)
* Removed stray semi-colon in i915_gem_object_set_cache_level.
For: VIZ-4544
Signed-off-by: Tvrtko Ursulin <tvrtko.ursulin@intel.com>
Cc: Daniel Vetter <daniel.vetter@ffwll.ch>
Reviewed-by: Michel Thierry <michel.thierry@intel.com>
[danvet: Drop hunk from i915_gem_shrink since it's just prettification
but upsets a __must_check warning.]
Signed-off-by: Daniel Vetter <daniel.vetter@ffwll.ch>
Diffstat (limited to 'drivers/gpu/drm/i915/i915_gem_gtt.c')
-rw-r--r-- | drivers/gpu/drm/i915/i915_gem_gtt.c | 70 |
1 files changed, 61 insertions, 9 deletions
diff --git a/drivers/gpu/drm/i915/i915_gem_gtt.c b/drivers/gpu/drm/i915/i915_gem_gtt.c index ce4e46c443a1..9821a6095e53 100644 --- a/drivers/gpu/drm/i915/i915_gem_gtt.c +++ b/drivers/gpu/drm/i915/i915_gem_gtt.c | |||
@@ -30,6 +30,8 @@ | |||
30 | #include "i915_trace.h" | 30 | #include "i915_trace.h" |
31 | #include "intel_drv.h" | 31 | #include "intel_drv.h" |
32 | 32 | ||
33 | const struct i915_ggtt_view i915_ggtt_view_normal; | ||
34 | |||
33 | static void bdw_setup_private_ppat(struct drm_i915_private *dev_priv); | 35 | static void bdw_setup_private_ppat(struct drm_i915_private *dev_priv); |
34 | static void chv_setup_private_ppat(struct drm_i915_private *dev_priv); | 36 | static void chv_setup_private_ppat(struct drm_i915_private *dev_priv); |
35 | 37 | ||
@@ -1341,9 +1343,12 @@ void i915_gem_restore_gtt_mappings(struct drm_device *dev) | |||
1341 | /* The bind_vma code tries to be smart about tracking mappings. | 1343 | /* The bind_vma code tries to be smart about tracking mappings. |
1342 | * Unfortunately above, we've just wiped out the mappings | 1344 | * Unfortunately above, we've just wiped out the mappings |
1343 | * without telling our object about it. So we need to fake it. | 1345 | * without telling our object about it. So we need to fake it. |
1346 | * | ||
1347 | * Bind is not expected to fail since this is only called on | ||
1348 | * resume and assumption is all requirements exist already. | ||
1344 | */ | 1349 | */ |
1345 | vma->bound &= ~GLOBAL_BIND; | 1350 | vma->bound &= ~GLOBAL_BIND; |
1346 | vma->bind_vma(vma, obj->cache_level, GLOBAL_BIND); | 1351 | WARN_ON(i915_vma_bind(vma, obj->cache_level, GLOBAL_BIND)); |
1347 | } | 1352 | } |
1348 | 1353 | ||
1349 | 1354 | ||
@@ -1538,7 +1543,7 @@ static void i915_ggtt_bind_vma(struct i915_vma *vma, | |||
1538 | AGP_USER_MEMORY : AGP_USER_CACHED_MEMORY; | 1543 | AGP_USER_MEMORY : AGP_USER_CACHED_MEMORY; |
1539 | 1544 | ||
1540 | BUG_ON(!i915_is_ggtt(vma->vm)); | 1545 | BUG_ON(!i915_is_ggtt(vma->vm)); |
1541 | intel_gtt_insert_sg_entries(vma->obj->pages, entry, flags); | 1546 | intel_gtt_insert_sg_entries(vma->ggtt_view.pages, entry, flags); |
1542 | vma->bound = GLOBAL_BIND; | 1547 | vma->bound = GLOBAL_BIND; |
1543 | } | 1548 | } |
1544 | 1549 | ||
@@ -1588,7 +1593,7 @@ static void ggtt_bind_vma(struct i915_vma *vma, | |||
1588 | if (!dev_priv->mm.aliasing_ppgtt || flags & GLOBAL_BIND) { | 1593 | if (!dev_priv->mm.aliasing_ppgtt || flags & GLOBAL_BIND) { |
1589 | if (!(vma->bound & GLOBAL_BIND) || | 1594 | if (!(vma->bound & GLOBAL_BIND) || |
1590 | (cache_level != obj->cache_level)) { | 1595 | (cache_level != obj->cache_level)) { |
1591 | vma->vm->insert_entries(vma->vm, obj->pages, | 1596 | vma->vm->insert_entries(vma->vm, vma->ggtt_view.pages, |
1592 | vma->node.start, | 1597 | vma->node.start, |
1593 | cache_level, flags); | 1598 | cache_level, flags); |
1594 | vma->bound |= GLOBAL_BIND; | 1599 | vma->bound |= GLOBAL_BIND; |
@@ -1600,7 +1605,7 @@ static void ggtt_bind_vma(struct i915_vma *vma, | |||
1600 | (cache_level != obj->cache_level))) { | 1605 | (cache_level != obj->cache_level))) { |
1601 | struct i915_hw_ppgtt *appgtt = dev_priv->mm.aliasing_ppgtt; | 1606 | struct i915_hw_ppgtt *appgtt = dev_priv->mm.aliasing_ppgtt; |
1602 | appgtt->base.insert_entries(&appgtt->base, | 1607 | appgtt->base.insert_entries(&appgtt->base, |
1603 | vma->obj->pages, | 1608 | vma->ggtt_view.pages, |
1604 | vma->node.start, | 1609 | vma->node.start, |
1605 | cache_level, flags); | 1610 | cache_level, flags); |
1606 | vma->bound |= LOCAL_BIND; | 1611 | vma->bound |= LOCAL_BIND; |
@@ -2165,7 +2170,8 @@ int i915_gem_gtt_init(struct drm_device *dev) | |||
2165 | } | 2170 | } |
2166 | 2171 | ||
2167 | static struct i915_vma *__i915_gem_vma_create(struct drm_i915_gem_object *obj, | 2172 | static struct i915_vma *__i915_gem_vma_create(struct drm_i915_gem_object *obj, |
2168 | struct i915_address_space *vm) | 2173 | struct i915_address_space *vm, |
2174 | const struct i915_ggtt_view *view) | ||
2169 | { | 2175 | { |
2170 | struct i915_vma *vma = kzalloc(sizeof(*vma), GFP_KERNEL); | 2176 | struct i915_vma *vma = kzalloc(sizeof(*vma), GFP_KERNEL); |
2171 | if (vma == NULL) | 2177 | if (vma == NULL) |
@@ -2176,6 +2182,7 @@ static struct i915_vma *__i915_gem_vma_create(struct drm_i915_gem_object *obj, | |||
2176 | INIT_LIST_HEAD(&vma->exec_list); | 2182 | INIT_LIST_HEAD(&vma->exec_list); |
2177 | vma->vm = vm; | 2183 | vma->vm = vm; |
2178 | vma->obj = obj; | 2184 | vma->obj = obj; |
2185 | vma->ggtt_view = *view; | ||
2179 | 2186 | ||
2180 | switch (INTEL_INFO(vm->dev)->gen) { | 2187 | switch (INTEL_INFO(vm->dev)->gen) { |
2181 | case 9: | 2188 | case 9: |
@@ -2210,14 +2217,59 @@ static struct i915_vma *__i915_gem_vma_create(struct drm_i915_gem_object *obj, | |||
2210 | } | 2217 | } |
2211 | 2218 | ||
2212 | struct i915_vma * | 2219 | struct i915_vma * |
2213 | i915_gem_obj_lookup_or_create_vma(struct drm_i915_gem_object *obj, | 2220 | i915_gem_obj_lookup_or_create_vma_view(struct drm_i915_gem_object *obj, |
2214 | struct i915_address_space *vm) | 2221 | struct i915_address_space *vm, |
2222 | const struct i915_ggtt_view *view) | ||
2215 | { | 2223 | { |
2216 | struct i915_vma *vma; | 2224 | struct i915_vma *vma; |
2217 | 2225 | ||
2218 | vma = i915_gem_obj_to_vma(obj, vm); | 2226 | vma = i915_gem_obj_to_vma_view(obj, vm, view); |
2219 | if (!vma) | 2227 | if (!vma) |
2220 | vma = __i915_gem_vma_create(obj, vm); | 2228 | vma = __i915_gem_vma_create(obj, vm, view); |
2221 | 2229 | ||
2222 | return vma; | 2230 | return vma; |
2223 | } | 2231 | } |
2232 | |||
2233 | static inline | ||
2234 | int i915_get_vma_pages(struct i915_vma *vma) | ||
2235 | { | ||
2236 | if (vma->ggtt_view.pages) | ||
2237 | return 0; | ||
2238 | |||
2239 | if (vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL) | ||
2240 | vma->ggtt_view.pages = vma->obj->pages; | ||
2241 | else | ||
2242 | WARN_ONCE(1, "GGTT view %u not implemented!\n", | ||
2243 | vma->ggtt_view.type); | ||
2244 | |||
2245 | if (!vma->ggtt_view.pages) { | ||
2246 | DRM_ERROR("Failed to get pages for VMA view type %u!\n", | ||
2247 | vma->ggtt_view.type); | ||
2248 | return -EINVAL; | ||
2249 | } | ||
2250 | |||
2251 | return 0; | ||
2252 | } | ||
2253 | |||
2254 | /** | ||
2255 | * i915_vma_bind - Sets up PTEs for an VMA in it's corresponding address space. | ||
2256 | * @vma: VMA to map | ||
2257 | * @cache_level: mapping cache level | ||
2258 | * @flags: flags like global or local mapping | ||
2259 | * | ||
2260 | * DMA addresses are taken from the scatter-gather table of this object (or of | ||
2261 | * this VMA in case of non-default GGTT views) and PTE entries set up. | ||
2262 | * Note that DMA addresses are also the only part of the SG table we care about. | ||
2263 | */ | ||
2264 | int i915_vma_bind(struct i915_vma *vma, enum i915_cache_level cache_level, | ||
2265 | u32 flags) | ||
2266 | { | ||
2267 | int ret = i915_get_vma_pages(vma); | ||
2268 | |||
2269 | if (ret) | ||
2270 | return ret; | ||
2271 | |||
2272 | vma->bind_vma(vma, cache_level, flags); | ||
2273 | |||
2274 | return 0; | ||
2275 | } | ||