aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/i915/i915_gem.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/drm/i915/i915_gem.c')
-rw-r--r--drivers/gpu/drm/i915/i915_gem.c66
1 files changed, 39 insertions, 27 deletions
diff --git a/drivers/gpu/drm/i915/i915_gem.c b/drivers/gpu/drm/i915/i915_gem.c
index 3bef7e60ddd6..6a80d6565ef2 100644
--- a/drivers/gpu/drm/i915/i915_gem.c
+++ b/drivers/gpu/drm/i915/i915_gem.c
@@ -1441,7 +1441,7 @@ i915_gem_object_move_to_active(struct drm_i915_gem_object *obj,
1441 list_move_tail(&obj->mm_list, &dev_priv->mm.active_list); 1441 list_move_tail(&obj->mm_list, &dev_priv->mm.active_list);
1442 list_move_tail(&obj->ring_list, &ring->active_list); 1442 list_move_tail(&obj->ring_list, &ring->active_list);
1443 1443
1444 obj->last_rendering_seqno = seqno; 1444 obj->last_read_seqno = seqno;
1445 1445
1446 if (obj->fenced_gpu_access) { 1446 if (obj->fenced_gpu_access) {
1447 obj->last_fenced_seqno = seqno; 1447 obj->last_fenced_seqno = seqno;
@@ -1461,7 +1461,8 @@ static void
1461i915_gem_object_move_off_active(struct drm_i915_gem_object *obj) 1461i915_gem_object_move_off_active(struct drm_i915_gem_object *obj)
1462{ 1462{
1463 list_del_init(&obj->ring_list); 1463 list_del_init(&obj->ring_list);
1464 obj->last_rendering_seqno = 0; 1464 obj->last_read_seqno = 0;
1465 obj->last_write_seqno = 0;
1465 obj->last_fenced_seqno = 0; 1466 obj->last_fenced_seqno = 0;
1466} 1467}
1467 1468
@@ -1493,7 +1494,6 @@ i915_gem_object_move_to_inactive(struct drm_i915_gem_object *obj)
1493 obj->fenced_gpu_access = false; 1494 obj->fenced_gpu_access = false;
1494 1495
1495 obj->active = 0; 1496 obj->active = 0;
1496 obj->pending_gpu_write = false;
1497 drm_gem_object_unreference(&obj->base); 1497 drm_gem_object_unreference(&obj->base);
1498 1498
1499 WARN_ON(i915_verify_lists(dev)); 1499 WARN_ON(i915_verify_lists(dev));
@@ -1812,7 +1812,7 @@ i915_gem_retire_requests_ring(struct intel_ring_buffer *ring)
1812 struct drm_i915_gem_object, 1812 struct drm_i915_gem_object,
1813 ring_list); 1813 ring_list);
1814 1814
1815 if (!i915_seqno_passed(seqno, obj->last_rendering_seqno)) 1815 if (!i915_seqno_passed(seqno, obj->last_read_seqno))
1816 break; 1816 break;
1817 1817
1818 if (obj->base.write_domain != 0) 1818 if (obj->base.write_domain != 0)
@@ -2036,9 +2036,11 @@ i915_wait_seqno(struct intel_ring_buffer *ring, uint32_t seqno)
2036 * Ensures that all rendering to the object has completed and the object is 2036 * Ensures that all rendering to the object has completed and the object is
2037 * safe to unbind from the GTT or access from the CPU. 2037 * safe to unbind from the GTT or access from the CPU.
2038 */ 2038 */
2039int 2039static __must_check int
2040i915_gem_object_wait_rendering(struct drm_i915_gem_object *obj) 2040i915_gem_object_wait_rendering(struct drm_i915_gem_object *obj,
2041 bool readonly)
2041{ 2042{
2043 u32 seqno;
2042 int ret; 2044 int ret;
2043 2045
2044 /* This function only exists to support waiting for existing rendering, 2046 /* This function only exists to support waiting for existing rendering,
@@ -2049,13 +2051,27 @@ i915_gem_object_wait_rendering(struct drm_i915_gem_object *obj)
2049 /* If there is rendering queued on the buffer being evicted, wait for 2051 /* If there is rendering queued on the buffer being evicted, wait for
2050 * it. 2052 * it.
2051 */ 2053 */
2052 if (obj->active) { 2054 if (readonly)
2053 ret = i915_wait_seqno(obj->ring, obj->last_rendering_seqno); 2055 seqno = obj->last_write_seqno;
2054 if (ret) 2056 else
2055 return ret; 2057 seqno = obj->last_read_seqno;
2056 i915_gem_retire_requests_ring(obj->ring); 2058 if (seqno == 0)
2059 return 0;
2060
2061 ret = i915_wait_seqno(obj->ring, seqno);
2062 if (ret)
2063 return ret;
2064
2065 /* Manually manage the write flush as we may have not yet retired
2066 * the buffer.
2067 */
2068 if (obj->last_write_seqno &&
2069 i915_seqno_passed(seqno, obj->last_write_seqno)) {
2070 obj->last_write_seqno = 0;
2071 obj->base.write_domain &= ~I915_GEM_GPU_DOMAINS;
2057 } 2072 }
2058 2073
2074 i915_gem_retire_requests_ring(obj->ring);
2059 return 0; 2075 return 0;
2060} 2076}
2061 2077
@@ -2074,10 +2090,10 @@ i915_gem_object_flush_active(struct drm_i915_gem_object *obj)
2074 if (ret) 2090 if (ret)
2075 return ret; 2091 return ret;
2076 2092
2077 ret = i915_gem_check_olr(obj->ring, 2093 ret = i915_gem_check_olr(obj->ring, obj->last_read_seqno);
2078 obj->last_rendering_seqno);
2079 if (ret) 2094 if (ret)
2080 return ret; 2095 return ret;
2096
2081 i915_gem_retire_requests_ring(obj->ring); 2097 i915_gem_retire_requests_ring(obj->ring);
2082 } 2098 }
2083 2099
@@ -2137,7 +2153,7 @@ i915_gem_wait_ioctl(struct drm_device *dev, void *data, struct drm_file *file)
2137 goto out; 2153 goto out;
2138 2154
2139 if (obj->active) { 2155 if (obj->active) {
2140 seqno = obj->last_rendering_seqno; 2156 seqno = obj->last_read_seqno;
2141 ring = obj->ring; 2157 ring = obj->ring;
2142 } 2158 }
2143 2159
@@ -2192,11 +2208,11 @@ i915_gem_object_sync(struct drm_i915_gem_object *obj,
2192 return 0; 2208 return 0;
2193 2209
2194 if (to == NULL || !i915_semaphore_is_enabled(obj->base.dev)) 2210 if (to == NULL || !i915_semaphore_is_enabled(obj->base.dev))
2195 return i915_gem_object_wait_rendering(obj); 2211 return i915_gem_object_wait_rendering(obj, false);
2196 2212
2197 idx = intel_ring_sync_index(from, to); 2213 idx = intel_ring_sync_index(from, to);
2198 2214
2199 seqno = obj->last_rendering_seqno; 2215 seqno = obj->last_read_seqno;
2200 if (seqno <= from->sync_seqno[idx]) 2216 if (seqno <= from->sync_seqno[idx])
2201 return 0; 2217 return 0;
2202 2218
@@ -2940,11 +2956,9 @@ i915_gem_object_set_to_gtt_domain(struct drm_i915_gem_object *obj, bool write)
2940 if (ret) 2956 if (ret)
2941 return ret; 2957 return ret;
2942 2958
2943 if (obj->pending_gpu_write || write) { 2959 ret = i915_gem_object_wait_rendering(obj, !write);
2944 ret = i915_gem_object_wait_rendering(obj); 2960 if (ret)
2945 if (ret) 2961 return ret;
2946 return ret;
2947 }
2948 2962
2949 i915_gem_object_flush_cpu_write_domain(obj); 2963 i915_gem_object_flush_cpu_write_domain(obj);
2950 2964
@@ -3115,7 +3129,7 @@ i915_gem_object_finish_gpu(struct drm_i915_gem_object *obj)
3115 return ret; 3129 return ret;
3116 } 3130 }
3117 3131
3118 ret = i915_gem_object_wait_rendering(obj); 3132 ret = i915_gem_object_wait_rendering(obj, false);
3119 if (ret) 3133 if (ret)
3120 return ret; 3134 return ret;
3121 3135
@@ -3143,11 +3157,9 @@ i915_gem_object_set_to_cpu_domain(struct drm_i915_gem_object *obj, bool write)
3143 if (ret) 3157 if (ret)
3144 return ret; 3158 return ret;
3145 3159
3146 if (write || obj->pending_gpu_write) { 3160 ret = i915_gem_object_wait_rendering(obj, !write);
3147 ret = i915_gem_object_wait_rendering(obj); 3161 if (ret)
3148 if (ret) 3162 return ret;
3149 return ret;
3150 }
3151 3163
3152 i915_gem_object_flush_gtt_write_domain(obj); 3164 i915_gem_object_flush_gtt_write_domain(obj);
3153 3165