aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/radeon/radeon_ttm.c
diff options
context:
space:
mode:
authorMaarten Lankhorst <maarten.lankhorst@canonical.com>2014-04-02 11:14:48 -0400
committerMaarten Lankhorst <maarten.lankhorst@canonical.com>2014-09-02 10:41:50 -0400
commitf2c24b83ae90292d315aa7ac029c6ce7929e01aa (patch)
tree4ef7d29d97cee6231becd7565056d630770d0845 /drivers/gpu/drm/radeon/radeon_ttm.c
parent2f453ed4038526172292fb3250b638b3782c7f2b (diff)
drm/ttm: flip the switch, and convert to dma_fence
Signed-off-by: Maarten Lankhorst <maarten.lankhorst@canonical.com>
Diffstat (limited to 'drivers/gpu/drm/radeon/radeon_ttm.c')
-rw-r--r--drivers/gpu/drm/radeon/radeon_ttm.c34
1 files changed, 2 insertions, 32 deletions
diff --git a/drivers/gpu/drm/radeon/radeon_ttm.c b/drivers/gpu/drm/radeon/radeon_ttm.c
index 822eb3630045..62d1f4d730a2 100644
--- a/drivers/gpu/drm/radeon/radeon_ttm.c
+++ b/drivers/gpu/drm/radeon/radeon_ttm.c
@@ -270,12 +270,12 @@ static int radeon_move_blit(struct ttm_buffer_object *bo,
270 BUILD_BUG_ON((PAGE_SIZE % RADEON_GPU_PAGE_SIZE) != 0); 270 BUILD_BUG_ON((PAGE_SIZE % RADEON_GPU_PAGE_SIZE) != 0);
271 271
272 /* sync other rings */ 272 /* sync other rings */
273 fence = bo->sync_obj; 273 fence = (struct radeon_fence *)reservation_object_get_excl(bo->resv);
274 r = radeon_copy(rdev, old_start, new_start, 274 r = radeon_copy(rdev, old_start, new_start,
275 new_mem->num_pages * (PAGE_SIZE / RADEON_GPU_PAGE_SIZE), /* GPU pages */ 275 new_mem->num_pages * (PAGE_SIZE / RADEON_GPU_PAGE_SIZE), /* GPU pages */
276 &fence); 276 &fence);
277 /* FIXME: handle copy error */ 277 /* FIXME: handle copy error */
278 r = ttm_bo_move_accel_cleanup(bo, (void *)fence, 278 r = ttm_bo_move_accel_cleanup(bo, &fence->base,
279 evict, no_wait_gpu, new_mem); 279 evict, no_wait_gpu, new_mem);
280 radeon_fence_unref(&fence); 280 radeon_fence_unref(&fence);
281 return r; 281 return r;
@@ -488,31 +488,6 @@ static void radeon_ttm_io_mem_free(struct ttm_bo_device *bdev, struct ttm_mem_re
488{ 488{
489} 489}
490 490
491static int radeon_sync_obj_wait(void *sync_obj, bool lazy, bool interruptible)
492{
493 return radeon_fence_wait((struct radeon_fence *)sync_obj, interruptible);
494}
495
496static int radeon_sync_obj_flush(void *sync_obj)
497{
498 return 0;
499}
500
501static void radeon_sync_obj_unref(void **sync_obj)
502{
503 radeon_fence_unref((struct radeon_fence **)sync_obj);
504}
505
506static void *radeon_sync_obj_ref(void *sync_obj)
507{
508 return radeon_fence_ref((struct radeon_fence *)sync_obj);
509}
510
511static bool radeon_sync_obj_signaled(void *sync_obj)
512{
513 return radeon_fence_signaled((struct radeon_fence *)sync_obj);
514}
515
516/* 491/*
517 * TTM backend functions. 492 * TTM backend functions.
518 */ 493 */
@@ -847,11 +822,6 @@ static struct ttm_bo_driver radeon_bo_driver = {
847 .evict_flags = &radeon_evict_flags, 822 .evict_flags = &radeon_evict_flags,
848 .move = &radeon_bo_move, 823 .move = &radeon_bo_move,
849 .verify_access = &radeon_verify_access, 824 .verify_access = &radeon_verify_access,
850 .sync_obj_signaled = &radeon_sync_obj_signaled,
851 .sync_obj_wait = &radeon_sync_obj_wait,
852 .sync_obj_flush = &radeon_sync_obj_flush,
853 .sync_obj_unref = &radeon_sync_obj_unref,
854 .sync_obj_ref = &radeon_sync_obj_ref,
855 .move_notify = &radeon_bo_move_notify, 825 .move_notify = &radeon_bo_move_notify,
856 .fault_reserve_notify = &radeon_bo_fault_reserve_notify, 826 .fault_reserve_notify = &radeon_bo_fault_reserve_notify,
857 .io_mem_reserve = &radeon_ttm_io_mem_reserve, 827 .io_mem_reserve = &radeon_ttm_io_mem_reserve,