aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorChris Wilson <chris@chris-wilson.co.uk>2010-11-23 10:26:33 -0500
committerChris Wilson <chris@chris-wilson.co.uk>2010-11-25 10:04:04 -0500
commit2021746e1d5ad1e3b51e24480c566acbb833c7c1 (patch)
tree6cefa547a20adfb6ea0c9ad267d6e9c11fdb2405
parentab5793ad3ae11a5cbe2194b449e5fdd80b19f14f (diff)
drm/i915: Mark a few functions as __must_check
... to benefit from the compiler checking that we remember to handle and propagate errors. Signed-off-by: Chris Wilson <chris@chris-wilson.co.uk>
-rw-r--r--drivers/gpu/drm/i915/i915_drv.h77
-rw-r--r--drivers/gpu/drm/i915/i915_gem.c37
2 files changed, 58 insertions, 56 deletions
diff --git a/drivers/gpu/drm/i915/i915_drv.h b/drivers/gpu/drm/i915/i915_drv.h
index ee7df1d2b8c8..b6ca10ade426 100644
--- a/drivers/gpu/drm/i915/i915_drv.h
+++ b/drivers/gpu/drm/i915/i915_drv.h
@@ -1093,11 +1093,11 @@ int i915_gem_init_object(struct drm_gem_object *obj);
1093struct drm_i915_gem_object *i915_gem_alloc_object(struct drm_device *dev, 1093struct drm_i915_gem_object *i915_gem_alloc_object(struct drm_device *dev,
1094 size_t size); 1094 size_t size);
1095void i915_gem_free_object(struct drm_gem_object *obj); 1095void i915_gem_free_object(struct drm_gem_object *obj);
1096int i915_gem_object_pin(struct drm_i915_gem_object *obj, 1096int __must_check i915_gem_object_pin(struct drm_i915_gem_object *obj,
1097 uint32_t alignment, 1097 uint32_t alignment,
1098 bool map_and_fenceable); 1098 bool map_and_fenceable);
1099void i915_gem_object_unpin(struct drm_i915_gem_object *obj); 1099void i915_gem_object_unpin(struct drm_i915_gem_object *obj);
1100int i915_gem_object_unbind(struct drm_i915_gem_object *obj); 1100int __must_check i915_gem_object_unbind(struct drm_i915_gem_object *obj);
1101void i915_gem_release_mmap(struct drm_i915_gem_object *obj); 1101void i915_gem_release_mmap(struct drm_i915_gem_object *obj);
1102void i915_gem_lastclose(struct drm_device *dev); 1102void i915_gem_lastclose(struct drm_device *dev);
1103 1103
@@ -1110,37 +1110,42 @@ i915_seqno_passed(uint32_t seq1, uint32_t seq2)
1110 return (int32_t)(seq1 - seq2) >= 0; 1110 return (int32_t)(seq1 - seq2) >= 0;
1111} 1111}
1112 1112
1113int i915_gem_object_get_fence_reg(struct drm_i915_gem_object *obj, 1113int __must_check i915_gem_object_get_fence_reg(struct drm_i915_gem_object *obj,
1114 bool interruptible); 1114 bool interruptible);
1115int i915_gem_object_put_fence_reg(struct drm_i915_gem_object *obj, 1115int __must_check i915_gem_object_put_fence_reg(struct drm_i915_gem_object *obj,
1116 bool interruptible); 1116 bool interruptible);
1117
1117void i915_gem_retire_requests(struct drm_device *dev); 1118void i915_gem_retire_requests(struct drm_device *dev);
1118void i915_gem_reset(struct drm_device *dev); 1119void i915_gem_reset(struct drm_device *dev);
1119void i915_gem_clflush_object(struct drm_i915_gem_object *obj); 1120void i915_gem_clflush_object(struct drm_i915_gem_object *obj);
1120int i915_gem_object_set_domain(struct drm_i915_gem_object *obj, 1121int __must_check i915_gem_object_set_domain(struct drm_i915_gem_object *obj,
1121 uint32_t read_domains, 1122 uint32_t read_domains,
1122 uint32_t write_domain); 1123 uint32_t write_domain);
1123int i915_gem_object_flush_gpu(struct drm_i915_gem_object *obj, 1124int __must_check i915_gem_object_flush_gpu(struct drm_i915_gem_object *obj,
1124 bool interruptible); 1125 bool interruptible);
1125int i915_gem_init_ringbuffer(struct drm_device *dev); 1126int __must_check i915_gem_init_ringbuffer(struct drm_device *dev);
1126void i915_gem_cleanup_ringbuffer(struct drm_device *dev); 1127void i915_gem_cleanup_ringbuffer(struct drm_device *dev);
1127int i915_gem_do_init(struct drm_device *dev, unsigned long start, 1128void i915_gem_do_init(struct drm_device *dev,
1128 unsigned long mappable_end, unsigned long end); 1129 unsigned long start,
1129int i915_gpu_idle(struct drm_device *dev); 1130 unsigned long mappable_end,
1130int i915_gem_idle(struct drm_device *dev); 1131 unsigned long end);
1131int i915_add_request(struct drm_device *dev, 1132int __must_check i915_gpu_idle(struct drm_device *dev);
1132 struct drm_file *file_priv, 1133int __must_check i915_gem_idle(struct drm_device *dev);
1133 struct drm_i915_gem_request *request, 1134int __must_check i915_add_request(struct drm_device *dev,
1134 struct intel_ring_buffer *ring); 1135 struct drm_file *file_priv,
1135int i915_do_wait_request(struct drm_device *dev, 1136 struct drm_i915_gem_request *request,
1136 uint32_t seqno, 1137 struct intel_ring_buffer *ring);
1137 bool interruptible, 1138int __must_check i915_do_wait_request(struct drm_device *dev,
1138 struct intel_ring_buffer *ring); 1139 uint32_t seqno,
1140 bool interruptible,
1141 struct intel_ring_buffer *ring);
1139int i915_gem_fault(struct vm_area_struct *vma, struct vm_fault *vmf); 1142int i915_gem_fault(struct vm_area_struct *vma, struct vm_fault *vmf);
1140int i915_gem_object_set_to_gtt_domain(struct drm_i915_gem_object *obj, 1143int __must_check
1141 int write); 1144i915_gem_object_set_to_gtt_domain(struct drm_i915_gem_object *obj,
1142int i915_gem_object_set_to_display_plane(struct drm_i915_gem_object *obj, 1145 bool write);
1143 struct intel_ring_buffer *pipelined); 1146int __must_check
1147i915_gem_object_set_to_display_plane(struct drm_i915_gem_object *obj,
1148 struct intel_ring_buffer *pipelined);
1144int i915_gem_attach_phys_object(struct drm_device *dev, 1149int i915_gem_attach_phys_object(struct drm_device *dev,
1145 struct drm_i915_gem_object *obj, 1150 struct drm_i915_gem_object *obj,
1146 int id, 1151 int id,
@@ -1152,14 +1157,16 @@ void i915_gem_release(struct drm_device *dev, struct drm_file *file);
1152 1157
1153/* i915_gem_gtt.c */ 1158/* i915_gem_gtt.c */
1154void i915_gem_restore_gtt_mappings(struct drm_device *dev); 1159void i915_gem_restore_gtt_mappings(struct drm_device *dev);
1155int i915_gem_gtt_bind_object(struct drm_i915_gem_object *obj); 1160int __must_check i915_gem_gtt_bind_object(struct drm_i915_gem_object *obj);
1156void i915_gem_gtt_unbind_object(struct drm_i915_gem_object *obj); 1161void i915_gem_gtt_unbind_object(struct drm_i915_gem_object *obj);
1157 1162
1158/* i915_gem_evict.c */ 1163/* i915_gem_evict.c */
1159int i915_gem_evict_something(struct drm_device *dev, int min_size, 1164int __must_check i915_gem_evict_something(struct drm_device *dev, int min_size,
1160 unsigned alignment, bool mappable); 1165 unsigned alignment, bool mappable);
1161int i915_gem_evict_everything(struct drm_device *dev, bool purgeable_only); 1166int __must_check i915_gem_evict_everything(struct drm_device *dev,
1162int i915_gem_evict_inactive(struct drm_device *dev, bool purgeable_only); 1167 bool purgeable_only);
1168int __must_check i915_gem_evict_inactive(struct drm_device *dev,
1169 bool purgeable_only);
1163 1170
1164/* i915_gem_tiling.c */ 1171/* i915_gem_tiling.c */
1165void i915_gem_detect_bit_6_swizzle(struct drm_device *dev); 1172void i915_gem_detect_bit_6_swizzle(struct drm_device *dev);
diff --git a/drivers/gpu/drm/i915/i915_gem.c b/drivers/gpu/drm/i915/i915_gem.c
index 939c9e34ce96..f6167c55a649 100644
--- a/drivers/gpu/drm/i915/i915_gem.c
+++ b/drivers/gpu/drm/i915/i915_gem.c
@@ -215,27 +215,19 @@ i915_gem_object_is_inactive(struct drm_i915_gem_object *obj)
215 return obj->gtt_space && !obj->active && obj->pin_count == 0; 215 return obj->gtt_space && !obj->active && obj->pin_count == 0;
216} 216}
217 217
218int i915_gem_do_init(struct drm_device *dev, 218void i915_gem_do_init(struct drm_device *dev,
219 unsigned long start, 219 unsigned long start,
220 unsigned long mappable_end, 220 unsigned long mappable_end,
221 unsigned long end) 221 unsigned long end)
222{ 222{
223 drm_i915_private_t *dev_priv = dev->dev_private; 223 drm_i915_private_t *dev_priv = dev->dev_private;
224 224
225 if (start >= end ||
226 (start & (PAGE_SIZE - 1)) != 0 ||
227 (end & (PAGE_SIZE - 1)) != 0) {
228 return -EINVAL;
229 }
230
231 drm_mm_init(&dev_priv->mm.gtt_space, start, 225 drm_mm_init(&dev_priv->mm.gtt_space, start,
232 end - start); 226 end - start);
233 227
234 dev_priv->mm.gtt_total = end - start; 228 dev_priv->mm.gtt_total = end - start;
235 dev_priv->mm.mappable_gtt_total = min(end, mappable_end) - start; 229 dev_priv->mm.mappable_gtt_total = min(end, mappable_end) - start;
236 dev_priv->mm.gtt_mappable_end = mappable_end; 230 dev_priv->mm.gtt_mappable_end = mappable_end;
237
238 return 0;
239} 231}
240 232
241int 233int
@@ -243,13 +235,16 @@ i915_gem_init_ioctl(struct drm_device *dev, void *data,
243 struct drm_file *file) 235 struct drm_file *file)
244{ 236{
245 struct drm_i915_gem_init *args = data; 237 struct drm_i915_gem_init *args = data;
246 int ret; 238
239 if (args->gtt_start >= args->gtt_end ||
240 (args->gtt_end | args->gtt_start) & (PAGE_SIZE - 1))
241 return -EINVAL;
247 242
248 mutex_lock(&dev->struct_mutex); 243 mutex_lock(&dev->struct_mutex);
249 ret = i915_gem_do_init(dev, args->gtt_start, args->gtt_end, args->gtt_end); 244 i915_gem_do_init(dev, args->gtt_start, args->gtt_end, args->gtt_end);
250 mutex_unlock(&dev->struct_mutex); 245 mutex_unlock(&dev->struct_mutex);
251 246
252 return ret; 247 return 0;
253} 248}
254 249
255int 250int
@@ -2949,7 +2944,7 @@ i915_gem_object_flush_cpu_write_domain(struct drm_i915_gem_object *obj)
2949 * flushes to occur. 2944 * flushes to occur.
2950 */ 2945 */
2951int 2946int
2952i915_gem_object_set_to_gtt_domain(struct drm_i915_gem_object *obj, int write) 2947i915_gem_object_set_to_gtt_domain(struct drm_i915_gem_object *obj, bool write)
2953{ 2948{
2954 uint32_t old_write_domain, old_read_domains; 2949 uint32_t old_write_domain, old_read_domains;
2955 int ret; 2950 int ret;
@@ -5177,8 +5172,8 @@ rescan:
5177 &dev_priv->mm.inactive_list, 5172 &dev_priv->mm.inactive_list,
5178 mm_list) { 5173 mm_list) {
5179 if (i915_gem_object_is_purgeable(obj)) { 5174 if (i915_gem_object_is_purgeable(obj)) {
5180 i915_gem_object_unbind(obj); 5175 if (i915_gem_object_unbind(obj) == 0 &&
5181 if (--nr_to_scan == 0) 5176 --nr_to_scan == 0)
5182 break; 5177 break;
5183 } 5178 }
5184 } 5179 }
@@ -5188,10 +5183,10 @@ rescan:
5188 list_for_each_entry_safe(obj, next, 5183 list_for_each_entry_safe(obj, next,
5189 &dev_priv->mm.inactive_list, 5184 &dev_priv->mm.inactive_list,
5190 mm_list) { 5185 mm_list) {
5191 if (nr_to_scan) { 5186 if (nr_to_scan &&
5192 i915_gem_object_unbind(obj); 5187 i915_gem_object_unbind(obj) == 0)
5193 nr_to_scan--; 5188 nr_to_scan--;
5194 } else 5189 else
5195 cnt++; 5190 cnt++;
5196 } 5191 }
5197 5192