diff options
-rw-r--r-- | drivers/gpu/drm/i915/i915_drv.h | 5 | ||||
-rw-r--r-- | drivers/gpu/drm/i915/i915_gem.c | 6 | ||||
-rw-r--r-- | drivers/gpu/drm/i915/i915_gem_gtt.c | 45 |
3 files changed, 24 insertions, 32 deletions
diff --git a/drivers/gpu/drm/i915/i915_drv.h b/drivers/gpu/drm/i915/i915_drv.h index e7a00b7cd372..3619f76367b2 100644 --- a/drivers/gpu/drm/i915/i915_drv.h +++ b/drivers/gpu/drm/i915/i915_drv.h | |||
@@ -1291,10 +1291,11 @@ void i915_ppgtt_unbind_object(struct i915_hw_ppgtt *ppgtt, | |||
1291 | struct drm_i915_gem_object *obj); | 1291 | struct drm_i915_gem_object *obj); |
1292 | 1292 | ||
1293 | void i915_gem_restore_gtt_mappings(struct drm_device *dev); | 1293 | void i915_gem_restore_gtt_mappings(struct drm_device *dev); |
1294 | int __must_check i915_gem_gtt_bind_object(struct drm_i915_gem_object *obj); | 1294 | int __must_check i915_gem_gtt_prepare_object(struct drm_i915_gem_object *obj); |
1295 | void i915_gem_gtt_rebind_object(struct drm_i915_gem_object *obj, | 1295 | void i915_gem_gtt_bind_object(struct drm_i915_gem_object *obj, |
1296 | enum i915_cache_level cache_level); | 1296 | enum i915_cache_level cache_level); |
1297 | void i915_gem_gtt_unbind_object(struct drm_i915_gem_object *obj); | 1297 | void i915_gem_gtt_unbind_object(struct drm_i915_gem_object *obj); |
1298 | void i915_gem_gtt_finish_object(struct drm_i915_gem_object *obj); | ||
1298 | 1299 | ||
1299 | /* i915_gem_evict.c */ | 1300 | /* i915_gem_evict.c */ |
1300 | int __must_check i915_gem_evict_something(struct drm_device *dev, int min_size, | 1301 | int __must_check i915_gem_evict_something(struct drm_device *dev, int min_size, |
diff --git a/drivers/gpu/drm/i915/i915_gem.c b/drivers/gpu/drm/i915/i915_gem.c index 1f441f5c2405..031ca5bc1be8 100644 --- a/drivers/gpu/drm/i915/i915_gem.c +++ b/drivers/gpu/drm/i915/i915_gem.c | |||
@@ -2102,6 +2102,7 @@ i915_gem_object_unbind(struct drm_i915_gem_object *obj) | |||
2102 | i915_ppgtt_unbind_object(dev_priv->mm.aliasing_ppgtt, obj); | 2102 | i915_ppgtt_unbind_object(dev_priv->mm.aliasing_ppgtt, obj); |
2103 | obj->has_aliasing_ppgtt_mapping = 0; | 2103 | obj->has_aliasing_ppgtt_mapping = 0; |
2104 | } | 2104 | } |
2105 | i915_gem_gtt_finish_object(obj); | ||
2105 | 2106 | ||
2106 | i915_gem_object_put_pages_gtt(obj); | 2107 | i915_gem_object_put_pages_gtt(obj); |
2107 | 2108 | ||
@@ -2746,7 +2747,7 @@ i915_gem_object_bind_to_gtt(struct drm_i915_gem_object *obj, | |||
2746 | return ret; | 2747 | return ret; |
2747 | } | 2748 | } |
2748 | 2749 | ||
2749 | ret = i915_gem_gtt_bind_object(obj); | 2750 | ret = i915_gem_gtt_prepare_object(obj); |
2750 | if (ret) { | 2751 | if (ret) { |
2751 | i915_gem_object_put_pages_gtt(obj); | 2752 | i915_gem_object_put_pages_gtt(obj); |
2752 | drm_mm_put_block(obj->gtt_space); | 2753 | drm_mm_put_block(obj->gtt_space); |
@@ -2757,6 +2758,7 @@ i915_gem_object_bind_to_gtt(struct drm_i915_gem_object *obj, | |||
2757 | 2758 | ||
2758 | goto search_free; | 2759 | goto search_free; |
2759 | } | 2760 | } |
2761 | i915_gem_gtt_bind_object(obj, obj->cache_level); | ||
2760 | 2762 | ||
2761 | list_add_tail(&obj->gtt_list, &dev_priv->mm.gtt_list); | 2763 | list_add_tail(&obj->gtt_list, &dev_priv->mm.gtt_list); |
2762 | list_add_tail(&obj->mm_list, &dev_priv->mm.inactive_list); | 2764 | list_add_tail(&obj->mm_list, &dev_priv->mm.inactive_list); |
@@ -2950,7 +2952,7 @@ int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj, | |||
2950 | return ret; | 2952 | return ret; |
2951 | } | 2953 | } |
2952 | 2954 | ||
2953 | i915_gem_gtt_rebind_object(obj, cache_level); | 2955 | i915_gem_gtt_bind_object(obj, cache_level); |
2954 | if (obj->has_aliasing_ppgtt_mapping) | 2956 | if (obj->has_aliasing_ppgtt_mapping) |
2955 | i915_ppgtt_bind_object(dev_priv->mm.aliasing_ppgtt, | 2957 | i915_ppgtt_bind_object(dev_priv->mm.aliasing_ppgtt, |
2956 | obj, cache_level); | 2958 | obj, cache_level); |
diff --git a/drivers/gpu/drm/i915/i915_gem_gtt.c b/drivers/gpu/drm/i915/i915_gem_gtt.c index 2eacd78bb93b..bf33eaf045b2 100644 --- a/drivers/gpu/drm/i915/i915_gem_gtt.c +++ b/drivers/gpu/drm/i915/i915_gem_gtt.c | |||
@@ -355,42 +355,28 @@ void i915_gem_restore_gtt_mappings(struct drm_device *dev) | |||
355 | 355 | ||
356 | list_for_each_entry(obj, &dev_priv->mm.gtt_list, gtt_list) { | 356 | list_for_each_entry(obj, &dev_priv->mm.gtt_list, gtt_list) { |
357 | i915_gem_clflush_object(obj); | 357 | i915_gem_clflush_object(obj); |
358 | i915_gem_gtt_rebind_object(obj, obj->cache_level); | 358 | i915_gem_gtt_bind_object(obj, obj->cache_level); |
359 | } | 359 | } |
360 | 360 | ||
361 | intel_gtt_chipset_flush(); | 361 | intel_gtt_chipset_flush(); |
362 | } | 362 | } |
363 | 363 | ||
364 | int i915_gem_gtt_bind_object(struct drm_i915_gem_object *obj) | 364 | int i915_gem_gtt_prepare_object(struct drm_i915_gem_object *obj) |
365 | { | 365 | { |
366 | struct drm_device *dev = obj->base.dev; | 366 | struct drm_device *dev = obj->base.dev; |
367 | struct drm_i915_private *dev_priv = dev->dev_private; | 367 | struct drm_i915_private *dev_priv = dev->dev_private; |
368 | unsigned int agp_type = cache_level_to_agp_type(dev, obj->cache_level); | ||
369 | int ret; | ||
370 | 368 | ||
371 | if (dev_priv->mm.gtt->needs_dmar) { | 369 | if (dev_priv->mm.gtt->needs_dmar) |
372 | ret = intel_gtt_map_memory(obj->pages, | 370 | return intel_gtt_map_memory(obj->pages, |
373 | obj->base.size >> PAGE_SHIFT, | 371 | obj->base.size >> PAGE_SHIFT, |
374 | &obj->sg_list, | 372 | &obj->sg_list, |
375 | &obj->num_sg); | 373 | &obj->num_sg); |
376 | if (ret != 0) | 374 | else |
377 | return ret; | 375 | return 0; |
378 | |||
379 | intel_gtt_insert_sg_entries(obj->sg_list, | ||
380 | obj->num_sg, | ||
381 | obj->gtt_space->start >> PAGE_SHIFT, | ||
382 | agp_type); | ||
383 | } else | ||
384 | intel_gtt_insert_pages(obj->gtt_space->start >> PAGE_SHIFT, | ||
385 | obj->base.size >> PAGE_SHIFT, | ||
386 | obj->pages, | ||
387 | agp_type); | ||
388 | |||
389 | return 0; | ||
390 | } | 376 | } |
391 | 377 | ||
392 | void i915_gem_gtt_rebind_object(struct drm_i915_gem_object *obj, | 378 | void i915_gem_gtt_bind_object(struct drm_i915_gem_object *obj, |
393 | enum i915_cache_level cache_level) | 379 | enum i915_cache_level cache_level) |
394 | { | 380 | { |
395 | struct drm_device *dev = obj->base.dev; | 381 | struct drm_device *dev = obj->base.dev; |
396 | struct drm_i915_private *dev_priv = dev->dev_private; | 382 | struct drm_i915_private *dev_priv = dev->dev_private; |
@@ -412,15 +398,18 @@ void i915_gem_gtt_rebind_object(struct drm_i915_gem_object *obj, | |||
412 | 398 | ||
413 | void i915_gem_gtt_unbind_object(struct drm_i915_gem_object *obj) | 399 | void i915_gem_gtt_unbind_object(struct drm_i915_gem_object *obj) |
414 | { | 400 | { |
401 | intel_gtt_clear_range(obj->gtt_space->start >> PAGE_SHIFT, | ||
402 | obj->base.size >> PAGE_SHIFT); | ||
403 | } | ||
404 | |||
405 | void i915_gem_gtt_finish_object(struct drm_i915_gem_object *obj) | ||
406 | { | ||
415 | struct drm_device *dev = obj->base.dev; | 407 | struct drm_device *dev = obj->base.dev; |
416 | struct drm_i915_private *dev_priv = dev->dev_private; | 408 | struct drm_i915_private *dev_priv = dev->dev_private; |
417 | bool interruptible; | 409 | bool interruptible; |
418 | 410 | ||
419 | interruptible = do_idling(dev_priv); | 411 | interruptible = do_idling(dev_priv); |
420 | 412 | ||
421 | intel_gtt_clear_range(obj->gtt_space->start >> PAGE_SHIFT, | ||
422 | obj->base.size >> PAGE_SHIFT); | ||
423 | |||
424 | if (obj->sg_list) { | 413 | if (obj->sg_list) { |
425 | intel_gtt_unmap_memory(obj->sg_list, obj->num_sg); | 414 | intel_gtt_unmap_memory(obj->sg_list, obj->num_sg); |
426 | obj->sg_list = NULL; | 415 | obj->sg_list = NULL; |