diff options
-rw-r--r-- | drivers/gpu/drm/i915/i915_drv.h | 11 | ||||
-rw-r--r-- | drivers/gpu/drm/i915/i915_gem.c | 23 |
2 files changed, 30 insertions, 4 deletions
diff --git a/drivers/gpu/drm/i915/i915_drv.h b/drivers/gpu/drm/i915/i915_drv.h index aaf934d96f21..b99b6a841d95 100644 --- a/drivers/gpu/drm/i915/i915_drv.h +++ b/drivers/gpu/drm/i915/i915_drv.h | |||
@@ -493,6 +493,15 @@ typedef struct drm_i915_private { | |||
493 | struct list_head flushing_list; | 493 | struct list_head flushing_list; |
494 | 494 | ||
495 | /** | 495 | /** |
496 | * List of objects currently pending a GPU write flush. | ||
497 | * | ||
498 | * All elements on this list will belong to either the | ||
499 | * active_list or flushing_list, last_rendering_seqno can | ||
500 | * be used to differentiate between the two elements. | ||
501 | */ | ||
502 | struct list_head gpu_write_list; | ||
503 | |||
504 | /** | ||
496 | * LRU list of objects which are not in the ringbuffer and | 505 | * LRU list of objects which are not in the ringbuffer and |
497 | * are ready to unbind, but are still in the GTT. | 506 | * are ready to unbind, but are still in the GTT. |
498 | * | 507 | * |
@@ -592,6 +601,8 @@ struct drm_i915_gem_object { | |||
592 | 601 | ||
593 | /** This object's place on the active/flushing/inactive lists */ | 602 | /** This object's place on the active/flushing/inactive lists */ |
594 | struct list_head list; | 603 | struct list_head list; |
604 | /** This object's place on GPU write list */ | ||
605 | struct list_head gpu_write_list; | ||
595 | 606 | ||
596 | /** This object's place on the fenced object LRU */ | 607 | /** This object's place on the fenced object LRU */ |
597 | struct list_head fence_list; | 608 | struct list_head fence_list; |
diff --git a/drivers/gpu/drm/i915/i915_gem.c b/drivers/gpu/drm/i915/i915_gem.c index b4c8c0230689..11daa618385f 100644 --- a/drivers/gpu/drm/i915/i915_gem.c +++ b/drivers/gpu/drm/i915/i915_gem.c | |||
@@ -1552,6 +1552,8 @@ i915_gem_object_move_to_inactive(struct drm_gem_object *obj) | |||
1552 | else | 1552 | else |
1553 | list_move_tail(&obj_priv->list, &dev_priv->mm.inactive_list); | 1553 | list_move_tail(&obj_priv->list, &dev_priv->mm.inactive_list); |
1554 | 1554 | ||
1555 | BUG_ON(!list_empty(&obj_priv->gpu_write_list)); | ||
1556 | |||
1555 | obj_priv->last_rendering_seqno = 0; | 1557 | obj_priv->last_rendering_seqno = 0; |
1556 | if (obj_priv->active) { | 1558 | if (obj_priv->active) { |
1557 | obj_priv->active = 0; | 1559 | obj_priv->active = 0; |
@@ -1622,7 +1624,8 @@ i915_add_request(struct drm_device *dev, struct drm_file *file_priv, | |||
1622 | struct drm_i915_gem_object *obj_priv, *next; | 1624 | struct drm_i915_gem_object *obj_priv, *next; |
1623 | 1625 | ||
1624 | list_for_each_entry_safe(obj_priv, next, | 1626 | list_for_each_entry_safe(obj_priv, next, |
1625 | &dev_priv->mm.flushing_list, list) { | 1627 | &dev_priv->mm.gpu_write_list, |
1628 | gpu_write_list) { | ||
1626 | struct drm_gem_object *obj = obj_priv->obj; | 1629 | struct drm_gem_object *obj = obj_priv->obj; |
1627 | 1630 | ||
1628 | if ((obj->write_domain & flush_domains) == | 1631 | if ((obj->write_domain & flush_domains) == |
@@ -1630,6 +1633,7 @@ i915_add_request(struct drm_device *dev, struct drm_file *file_priv, | |||
1630 | uint32_t old_write_domain = obj->write_domain; | 1633 | uint32_t old_write_domain = obj->write_domain; |
1631 | 1634 | ||
1632 | obj->write_domain = 0; | 1635 | obj->write_domain = 0; |
1636 | list_del_init(&obj_priv->gpu_write_list); | ||
1633 | i915_gem_object_move_to_active(obj, seqno); | 1637 | i915_gem_object_move_to_active(obj, seqno); |
1634 | 1638 | ||
1635 | trace_i915_gem_object_change_domain(obj, | 1639 | trace_i915_gem_object_change_domain(obj, |
@@ -2084,8 +2088,8 @@ static int | |||
2084 | i915_gem_evict_everything(struct drm_device *dev) | 2088 | i915_gem_evict_everything(struct drm_device *dev) |
2085 | { | 2089 | { |
2086 | drm_i915_private_t *dev_priv = dev->dev_private; | 2090 | drm_i915_private_t *dev_priv = dev->dev_private; |
2087 | uint32_t seqno; | ||
2088 | int ret; | 2091 | int ret; |
2092 | uint32_t seqno; | ||
2089 | bool lists_empty; | 2093 | bool lists_empty; |
2090 | 2094 | ||
2091 | spin_lock(&dev_priv->mm.active_list_lock); | 2095 | spin_lock(&dev_priv->mm.active_list_lock); |
@@ -2107,6 +2111,8 @@ i915_gem_evict_everything(struct drm_device *dev) | |||
2107 | if (ret) | 2111 | if (ret) |
2108 | return ret; | 2112 | return ret; |
2109 | 2113 | ||
2114 | BUG_ON(!list_empty(&dev_priv->mm.flushing_list)); | ||
2115 | |||
2110 | ret = i915_gem_evict_from_inactive_list(dev); | 2116 | ret = i915_gem_evict_from_inactive_list(dev); |
2111 | if (ret) | 2117 | if (ret) |
2112 | return ret; | 2118 | return ret; |
@@ -2701,7 +2707,7 @@ i915_gem_object_flush_gpu_write_domain(struct drm_gem_object *obj) | |||
2701 | old_write_domain = obj->write_domain; | 2707 | old_write_domain = obj->write_domain; |
2702 | i915_gem_flush(dev, 0, obj->write_domain); | 2708 | i915_gem_flush(dev, 0, obj->write_domain); |
2703 | seqno = i915_add_request(dev, NULL, obj->write_domain); | 2709 | seqno = i915_add_request(dev, NULL, obj->write_domain); |
2704 | obj->write_domain = 0; | 2710 | BUG_ON(obj->write_domain); |
2705 | i915_gem_object_move_to_active(obj, seqno); | 2711 | i915_gem_object_move_to_active(obj, seqno); |
2706 | 2712 | ||
2707 | trace_i915_gem_object_change_domain(obj, | 2713 | trace_i915_gem_object_change_domain(obj, |
@@ -3850,16 +3856,23 @@ i915_gem_do_execbuffer(struct drm_device *dev, void *data, | |||
3850 | i915_gem_flush(dev, | 3856 | i915_gem_flush(dev, |
3851 | dev->invalidate_domains, | 3857 | dev->invalidate_domains, |
3852 | dev->flush_domains); | 3858 | dev->flush_domains); |
3853 | if (dev->flush_domains) | 3859 | if (dev->flush_domains & I915_GEM_GPU_DOMAINS) |
3854 | (void)i915_add_request(dev, file_priv, | 3860 | (void)i915_add_request(dev, file_priv, |
3855 | dev->flush_domains); | 3861 | dev->flush_domains); |
3856 | } | 3862 | } |
3857 | 3863 | ||
3858 | for (i = 0; i < args->buffer_count; i++) { | 3864 | for (i = 0; i < args->buffer_count; i++) { |
3859 | struct drm_gem_object *obj = object_list[i]; | 3865 | struct drm_gem_object *obj = object_list[i]; |
3866 | struct drm_i915_gem_object *obj_priv = obj->driver_private; | ||
3860 | uint32_t old_write_domain = obj->write_domain; | 3867 | uint32_t old_write_domain = obj->write_domain; |
3861 | 3868 | ||
3862 | obj->write_domain = obj->pending_write_domain; | 3869 | obj->write_domain = obj->pending_write_domain; |
3870 | if (obj->write_domain) | ||
3871 | list_move_tail(&obj_priv->gpu_write_list, | ||
3872 | &dev_priv->mm.gpu_write_list); | ||
3873 | else | ||
3874 | list_del_init(&obj_priv->gpu_write_list); | ||
3875 | |||
3863 | trace_i915_gem_object_change_domain(obj, | 3876 | trace_i915_gem_object_change_domain(obj, |
3864 | obj->read_domains, | 3877 | obj->read_domains, |
3865 | old_write_domain); | 3878 | old_write_domain); |
@@ -4370,6 +4383,7 @@ int i915_gem_init_object(struct drm_gem_object *obj) | |||
4370 | obj_priv->obj = obj; | 4383 | obj_priv->obj = obj; |
4371 | obj_priv->fence_reg = I915_FENCE_REG_NONE; | 4384 | obj_priv->fence_reg = I915_FENCE_REG_NONE; |
4372 | INIT_LIST_HEAD(&obj_priv->list); | 4385 | INIT_LIST_HEAD(&obj_priv->list); |
4386 | INIT_LIST_HEAD(&obj_priv->gpu_write_list); | ||
4373 | INIT_LIST_HEAD(&obj_priv->fence_list); | 4387 | INIT_LIST_HEAD(&obj_priv->fence_list); |
4374 | obj_priv->madv = I915_MADV_WILLNEED; | 4388 | obj_priv->madv = I915_MADV_WILLNEED; |
4375 | 4389 | ||
@@ -4821,6 +4835,7 @@ i915_gem_load(struct drm_device *dev) | |||
4821 | spin_lock_init(&dev_priv->mm.active_list_lock); | 4835 | spin_lock_init(&dev_priv->mm.active_list_lock); |
4822 | INIT_LIST_HEAD(&dev_priv->mm.active_list); | 4836 | INIT_LIST_HEAD(&dev_priv->mm.active_list); |
4823 | INIT_LIST_HEAD(&dev_priv->mm.flushing_list); | 4837 | INIT_LIST_HEAD(&dev_priv->mm.flushing_list); |
4838 | INIT_LIST_HEAD(&dev_priv->mm.gpu_write_list); | ||
4824 | INIT_LIST_HEAD(&dev_priv->mm.inactive_list); | 4839 | INIT_LIST_HEAD(&dev_priv->mm.inactive_list); |
4825 | INIT_LIST_HEAD(&dev_priv->mm.request_list); | 4840 | INIT_LIST_HEAD(&dev_priv->mm.request_list); |
4826 | INIT_LIST_HEAD(&dev_priv->mm.fence_list); | 4841 | INIT_LIST_HEAD(&dev_priv->mm.fence_list); |