aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/i915/selftests/i915_request.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/drm/i915/selftests/i915_request.c')
-rw-r--r--drivers/gpu/drm/i915/selftests/i915_request.c23
1 files changed, 15 insertions, 8 deletions
diff --git a/drivers/gpu/drm/i915/selftests/i915_request.c b/drivers/gpu/drm/i915/selftests/i915_request.c
index 63cd9486cc13..c4aac6141e04 100644
--- a/drivers/gpu/drm/i915/selftests/i915_request.c
+++ b/drivers/gpu/drm/i915/selftests/i915_request.c
@@ -262,7 +262,7 @@ int i915_request_mock_selftests(void)
262 return -ENOMEM; 262 return -ENOMEM;
263 263
264 err = i915_subtests(tests, i915); 264 err = i915_subtests(tests, i915);
265 drm_dev_unref(&i915->drm); 265 drm_dev_put(&i915->drm);
266 266
267 return err; 267 return err;
268} 268}
@@ -286,7 +286,9 @@ static int begin_live_test(struct live_test *t,
286 t->func = func; 286 t->func = func;
287 t->name = name; 287 t->name = name;
288 288
289 err = i915_gem_wait_for_idle(i915, I915_WAIT_LOCKED); 289 err = i915_gem_wait_for_idle(i915,
290 I915_WAIT_LOCKED,
291 MAX_SCHEDULE_TIMEOUT);
290 if (err) { 292 if (err) {
291 pr_err("%s(%s): failed to idle before, with err=%d!", 293 pr_err("%s(%s): failed to idle before, with err=%d!",
292 func, name, err); 294 func, name, err);
@@ -594,11 +596,8 @@ static struct i915_vma *recursive_batch(struct drm_i915_private *i915)
594 } else if (gen >= 6) { 596 } else if (gen >= 6) {
595 *cmd++ = MI_BATCH_BUFFER_START | 1 << 8; 597 *cmd++ = MI_BATCH_BUFFER_START | 1 << 8;
596 *cmd++ = lower_32_bits(vma->node.start); 598 *cmd++ = lower_32_bits(vma->node.start);
597 } else if (gen >= 4) {
598 *cmd++ = MI_BATCH_BUFFER_START | MI_BATCH_GTT;
599 *cmd++ = lower_32_bits(vma->node.start);
600 } else { 599 } else {
601 *cmd++ = MI_BATCH_BUFFER_START | MI_BATCH_GTT | 1; 600 *cmd++ = MI_BATCH_BUFFER_START | MI_BATCH_GTT;
602 *cmd++ = lower_32_bits(vma->node.start); 601 *cmd++ = lower_32_bits(vma->node.start);
603 } 602 }
604 *cmd++ = MI_BATCH_BUFFER_END; /* terminate early in case of error */ 603 *cmd++ = MI_BATCH_BUFFER_END; /* terminate early in case of error */
@@ -678,7 +677,9 @@ static int live_all_engines(void *arg)
678 i915_gem_object_set_active_reference(batch->obj); 677 i915_gem_object_set_active_reference(batch->obj);
679 } 678 }
680 679
681 i915_vma_move_to_active(batch, request[id], 0); 680 err = i915_vma_move_to_active(batch, request[id], 0);
681 GEM_BUG_ON(err);
682
682 i915_request_get(request[id]); 683 i915_request_get(request[id]);
683 i915_request_add(request[id]); 684 i915_request_add(request[id]);
684 } 685 }
@@ -788,7 +789,9 @@ static int live_sequential_engines(void *arg)
788 GEM_BUG_ON(err); 789 GEM_BUG_ON(err);
789 request[id]->batch = batch; 790 request[id]->batch = batch;
790 791
791 i915_vma_move_to_active(batch, request[id], 0); 792 err = i915_vma_move_to_active(batch, request[id], 0);
793 GEM_BUG_ON(err);
794
792 i915_gem_object_set_active_reference(batch->obj); 795 i915_gem_object_set_active_reference(batch->obj);
793 i915_vma_get(batch); 796 i915_vma_get(batch);
794 797
@@ -862,5 +865,9 @@ int i915_request_live_selftests(struct drm_i915_private *i915)
862 SUBTEST(live_sequential_engines), 865 SUBTEST(live_sequential_engines),
863 SUBTEST(live_empty_request), 866 SUBTEST(live_empty_request),
864 }; 867 };
868
869 if (i915_terminally_wedged(&i915->gpu_error))
870 return 0;
871
865 return i915_subtests(tests, i915); 872 return i915_subtests(tests, i915);
866} 873}