aboutsummaryrefslogtreecommitdiffstats
path: root/drivers
diff options
context:
space:
mode:
authorChris Wilson <chris@chris-wilson.co.uk>2012-11-21 08:04:03 -0500
committerDaniel Vetter <daniel.vetter@ffwll.ch>2012-11-21 11:47:13 -0500
commit8742267af4043606869f5b8dadbef635405543c3 (patch)
tree07378203a3fb2e9822256f5b672619f89ef6c7b4 /drivers
parent776ca7cf5bcc6892ad5bd781279744a654a8ed23 (diff)
drm/i915: Defer assignment of obj->gtt_space until after all possible mallocs
As we may invoke the shrinker whilst trying to allocate memory to hold the gtt_space for this object, we need to be careful not to mark the drm_mm_node as activated (by assigning it to this object) before we have finished our sequence of allocations. Note: We also need to move the binding of the object into the actual pagetables down a bit. The best way seems to be to move it out into the callsites. Reported-by: Imre Deak <imre.deak@gmail.com> Signed-off-by: Chris Wilson <chris@chris-wilson.co.uk> [danvet: Added small note to commit message to summarize review discussion.] Signed-off-by: Daniel Vetter <daniel.vetter@ffwll.ch>
Diffstat (limited to 'drivers')
-rw-r--r--drivers/gpu/drm/i915/i915_gem.c39
1 files changed, 19 insertions, 20 deletions
diff --git a/drivers/gpu/drm/i915/i915_gem.c b/drivers/gpu/drm/i915/i915_gem.c
index e9c80065805b..71b5129947ba 100644
--- a/drivers/gpu/drm/i915/i915_gem.c
+++ b/drivers/gpu/drm/i915/i915_gem.c
@@ -2918,11 +2918,10 @@ i915_gem_object_bind_to_gtt(struct drm_i915_gem_object *obj,
2918 2918
2919 search_free: 2919 search_free:
2920 if (map_and_fenceable) 2920 if (map_and_fenceable)
2921 free_space = 2921 free_space = drm_mm_search_free_in_range_color(&dev_priv->mm.gtt_space,
2922 drm_mm_search_free_in_range_color(&dev_priv->mm.gtt_space, 2922 size, alignment, obj->cache_level,
2923 size, alignment, obj->cache_level, 2923 0, dev_priv->mm.gtt_mappable_end,
2924 0, dev_priv->mm.gtt_mappable_end, 2924 false);
2925 false);
2926 else 2925 else
2927 free_space = drm_mm_search_free_color(&dev_priv->mm.gtt_space, 2926 free_space = drm_mm_search_free_color(&dev_priv->mm.gtt_space,
2928 size, alignment, obj->cache_level, 2927 size, alignment, obj->cache_level,
@@ -2930,18 +2929,18 @@ i915_gem_object_bind_to_gtt(struct drm_i915_gem_object *obj,
2930 2929
2931 if (free_space != NULL) { 2930 if (free_space != NULL) {
2932 if (map_and_fenceable) 2931 if (map_and_fenceable)
2933 obj->gtt_space = 2932 free_space =
2934 drm_mm_get_block_range_generic(free_space, 2933 drm_mm_get_block_range_generic(free_space,
2935 size, alignment, obj->cache_level, 2934 size, alignment, obj->cache_level,
2936 0, dev_priv->mm.gtt_mappable_end, 2935 0, dev_priv->mm.gtt_mappable_end,
2937 false); 2936 false);
2938 else 2937 else
2939 obj->gtt_space = 2938 free_space =
2940 drm_mm_get_block_generic(free_space, 2939 drm_mm_get_block_generic(free_space,
2941 size, alignment, obj->cache_level, 2940 size, alignment, obj->cache_level,
2942 false); 2941 false);
2943 } 2942 }
2944 if (obj->gtt_space == NULL) { 2943 if (free_space == NULL) {
2945 ret = i915_gem_evict_something(dev, size, alignment, 2944 ret = i915_gem_evict_something(dev, size, alignment,
2946 obj->cache_level, 2945 obj->cache_level,
2947 map_and_fenceable, 2946 map_and_fenceable,
@@ -2954,34 +2953,29 @@ i915_gem_object_bind_to_gtt(struct drm_i915_gem_object *obj,
2954 goto search_free; 2953 goto search_free;
2955 } 2954 }
2956 if (WARN_ON(!i915_gem_valid_gtt_space(dev, 2955 if (WARN_ON(!i915_gem_valid_gtt_space(dev,
2957 obj->gtt_space, 2956 free_space,
2958 obj->cache_level))) { 2957 obj->cache_level))) {
2959 i915_gem_object_unpin_pages(obj); 2958 i915_gem_object_unpin_pages(obj);
2960 drm_mm_put_block(obj->gtt_space); 2959 drm_mm_put_block(free_space);
2961 obj->gtt_space = NULL;
2962 return -EINVAL; 2960 return -EINVAL;
2963 } 2961 }
2964 2962
2965
2966 ret = i915_gem_gtt_prepare_object(obj); 2963 ret = i915_gem_gtt_prepare_object(obj);
2967 if (ret) { 2964 if (ret) {
2968 i915_gem_object_unpin_pages(obj); 2965 i915_gem_object_unpin_pages(obj);
2969 drm_mm_put_block(obj->gtt_space); 2966 drm_mm_put_block(free_space);
2970 obj->gtt_space = NULL;
2971 return ret; 2967 return ret;
2972 } 2968 }
2973 2969
2974 if (!dev_priv->mm.aliasing_ppgtt)
2975 i915_gem_gtt_bind_object(obj, obj->cache_level);
2976
2977 list_move_tail(&obj->gtt_list, &dev_priv->mm.bound_list); 2970 list_move_tail(&obj->gtt_list, &dev_priv->mm.bound_list);
2978 list_add_tail(&obj->mm_list, &dev_priv->mm.inactive_list); 2971 list_add_tail(&obj->mm_list, &dev_priv->mm.inactive_list);
2979 2972
2980 obj->gtt_offset = obj->gtt_space->start; 2973 obj->gtt_space = free_space;
2974 obj->gtt_offset = free_space->start;
2981 2975
2982 fenceable = 2976 fenceable =
2983 obj->gtt_space->size == fence_size && 2977 free_space->size == fence_size &&
2984 (obj->gtt_space->start & (fence_alignment - 1)) == 0; 2978 (free_space->start & (fence_alignment - 1)) == 0;
2985 2979
2986 mappable = 2980 mappable =
2987 obj->gtt_offset + obj->base.size <= dev_priv->mm.gtt_mappable_end; 2981 obj->gtt_offset + obj->base.size <= dev_priv->mm.gtt_mappable_end;
@@ -3452,11 +3446,16 @@ i915_gem_object_pin(struct drm_i915_gem_object *obj,
3452 } 3446 }
3453 3447
3454 if (obj->gtt_space == NULL) { 3448 if (obj->gtt_space == NULL) {
3449 struct drm_i915_private *dev_priv = obj->base.dev->dev_private;
3450
3455 ret = i915_gem_object_bind_to_gtt(obj, alignment, 3451 ret = i915_gem_object_bind_to_gtt(obj, alignment,
3456 map_and_fenceable, 3452 map_and_fenceable,
3457 nonblocking); 3453 nonblocking);
3458 if (ret) 3454 if (ret)
3459 return ret; 3455 return ret;
3456
3457 if (!dev_priv->mm.aliasing_ppgtt)
3458 i915_gem_gtt_bind_object(obj, obj->cache_level);
3460 } 3459 }
3461 3460
3462 if (!obj->has_global_gtt_mapping && map_and_fenceable) 3461 if (!obj->has_global_gtt_mapping && map_and_fenceable)