aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/virtio
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/drm/virtio')
-rw-r--r--drivers/gpu/drm/virtio/virtgpu_display.c5
-rw-r--r--drivers/gpu/drm/virtio/virtgpu_drv.h9
-rw-r--r--drivers/gpu/drm/virtio/virtgpu_fb.c14
-rw-r--r--drivers/gpu/drm/virtio/virtgpu_gem.c7
-rw-r--r--drivers/gpu/drm/virtio/virtgpu_ioctl.c4
-rw-r--r--drivers/gpu/drm/virtio/virtgpu_object.c19
-rw-r--r--drivers/gpu/drm/virtio/virtgpu_plane.c58
-rw-r--r--drivers/gpu/drm/virtio/virtgpu_prime.c11
-rw-r--r--drivers/gpu/drm/virtio/virtgpu_vq.c20
9 files changed, 53 insertions, 94 deletions
diff --git a/drivers/gpu/drm/virtio/virtgpu_display.c b/drivers/gpu/drm/virtio/virtgpu_display.c
index 0379d6897659..8f8fed471e34 100644
--- a/drivers/gpu/drm/virtio/virtgpu_display.c
+++ b/drivers/gpu/drm/virtio/virtgpu_display.c
@@ -307,6 +307,10 @@ virtio_gpu_user_framebuffer_create(struct drm_device *dev,
307 struct virtio_gpu_framebuffer *virtio_gpu_fb; 307 struct virtio_gpu_framebuffer *virtio_gpu_fb;
308 int ret; 308 int ret;
309 309
310 if (mode_cmd->pixel_format != DRM_FORMAT_HOST_XRGB8888 &&
311 mode_cmd->pixel_format != DRM_FORMAT_HOST_ARGB8888)
312 return ERR_PTR(-ENOENT);
313
310 /* lookup object associated with res handle */ 314 /* lookup object associated with res handle */
311 obj = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]); 315 obj = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]);
312 if (!obj) 316 if (!obj)
@@ -355,6 +359,7 @@ int virtio_gpu_modeset_init(struct virtio_gpu_device *vgdev)
355 int i; 359 int i;
356 360
357 drm_mode_config_init(vgdev->ddev); 361 drm_mode_config_init(vgdev->ddev);
362 vgdev->ddev->mode_config.quirk_addfb_prefer_host_byte_order = true;
358 vgdev->ddev->mode_config.funcs = &virtio_gpu_mode_funcs; 363 vgdev->ddev->mode_config.funcs = &virtio_gpu_mode_funcs;
359 vgdev->ddev->mode_config.helper_private = &virtio_mode_config_helpers; 364 vgdev->ddev->mode_config.helper_private = &virtio_mode_config_helpers;
360 365
diff --git a/drivers/gpu/drm/virtio/virtgpu_drv.h b/drivers/gpu/drm/virtio/virtgpu_drv.h
index a2d79e18bda7..d29f0c7c768c 100644
--- a/drivers/gpu/drm/virtio/virtgpu_drv.h
+++ b/drivers/gpu/drm/virtio/virtgpu_drv.h
@@ -270,7 +270,8 @@ void virtio_gpu_cmd_create_resource(struct virtio_gpu_device *vgdev,
270void virtio_gpu_cmd_unref_resource(struct virtio_gpu_device *vgdev, 270void virtio_gpu_cmd_unref_resource(struct virtio_gpu_device *vgdev,
271 uint32_t resource_id); 271 uint32_t resource_id);
272void virtio_gpu_cmd_transfer_to_host_2d(struct virtio_gpu_device *vgdev, 272void virtio_gpu_cmd_transfer_to_host_2d(struct virtio_gpu_device *vgdev,
273 uint32_t resource_id, uint64_t offset, 273 struct virtio_gpu_object *bo,
274 uint64_t offset,
274 __le32 width, __le32 height, 275 __le32 width, __le32 height,
275 __le32 x, __le32 y, 276 __le32 x, __le32 y,
276 struct virtio_gpu_fence **fence); 277 struct virtio_gpu_fence **fence);
@@ -316,7 +317,8 @@ void virtio_gpu_cmd_transfer_from_host_3d(struct virtio_gpu_device *vgdev,
316 struct virtio_gpu_box *box, 317 struct virtio_gpu_box *box,
317 struct virtio_gpu_fence **fence); 318 struct virtio_gpu_fence **fence);
318void virtio_gpu_cmd_transfer_to_host_3d(struct virtio_gpu_device *vgdev, 319void virtio_gpu_cmd_transfer_to_host_3d(struct virtio_gpu_device *vgdev,
319 uint32_t resource_id, uint32_t ctx_id, 320 struct virtio_gpu_object *bo,
321 uint32_t ctx_id,
320 uint64_t offset, uint32_t level, 322 uint64_t offset, uint32_t level,
321 struct virtio_gpu_box *box, 323 struct virtio_gpu_box *box,
322 struct virtio_gpu_fence **fence); 324 struct virtio_gpu_fence **fence);
@@ -361,7 +363,8 @@ void virtio_gpu_fence_event_process(struct virtio_gpu_device *vdev,
361int virtio_gpu_object_create(struct virtio_gpu_device *vgdev, 363int virtio_gpu_object_create(struct virtio_gpu_device *vgdev,
362 unsigned long size, bool kernel, bool pinned, 364 unsigned long size, bool kernel, bool pinned,
363 struct virtio_gpu_object **bo_ptr); 365 struct virtio_gpu_object **bo_ptr);
364int virtio_gpu_object_kmap(struct virtio_gpu_object *bo, void **ptr); 366void virtio_gpu_object_kunmap(struct virtio_gpu_object *bo);
367int virtio_gpu_object_kmap(struct virtio_gpu_object *bo);
365int virtio_gpu_object_get_sg_table(struct virtio_gpu_device *qdev, 368int virtio_gpu_object_get_sg_table(struct virtio_gpu_device *qdev,
366 struct virtio_gpu_object *bo); 369 struct virtio_gpu_object *bo);
367void virtio_gpu_object_free_sg_table(struct virtio_gpu_object *bo); 370void virtio_gpu_object_free_sg_table(struct virtio_gpu_object *bo);
diff --git a/drivers/gpu/drm/virtio/virtgpu_fb.c b/drivers/gpu/drm/virtio/virtgpu_fb.c
index b9678c4082ac..cea749f4ec39 100644
--- a/drivers/gpu/drm/virtio/virtgpu_fb.c
+++ b/drivers/gpu/drm/virtio/virtgpu_fb.c
@@ -95,7 +95,7 @@ static int virtio_gpu_dirty_update(struct virtio_gpu_framebuffer *fb,
95 95
96 offset = (y * fb->base.pitches[0]) + x * bpp; 96 offset = (y * fb->base.pitches[0]) + x * bpp;
97 97
98 virtio_gpu_cmd_transfer_to_host_2d(vgdev, obj->hw_res_handle, 98 virtio_gpu_cmd_transfer_to_host_2d(vgdev, obj,
99 offset, 99 offset,
100 cpu_to_le32(w), 100 cpu_to_le32(w),
101 cpu_to_le32(h), 101 cpu_to_le32(h),
@@ -203,12 +203,6 @@ static struct fb_ops virtio_gpufb_ops = {
203 .fb_imageblit = virtio_gpu_3d_imageblit, 203 .fb_imageblit = virtio_gpu_3d_imageblit,
204}; 204};
205 205
206static int virtio_gpu_vmap_fb(struct virtio_gpu_device *vgdev,
207 struct virtio_gpu_object *obj)
208{
209 return virtio_gpu_object_kmap(obj, NULL);
210}
211
212static int virtio_gpufb_create(struct drm_fb_helper *helper, 206static int virtio_gpufb_create(struct drm_fb_helper *helper,
213 struct drm_fb_helper_surface_size *sizes) 207 struct drm_fb_helper_surface_size *sizes)
214{ 208{
@@ -226,7 +220,7 @@ static int virtio_gpufb_create(struct drm_fb_helper *helper,
226 mode_cmd.width = sizes->surface_width; 220 mode_cmd.width = sizes->surface_width;
227 mode_cmd.height = sizes->surface_height; 221 mode_cmd.height = sizes->surface_height;
228 mode_cmd.pitches[0] = mode_cmd.width * 4; 222 mode_cmd.pitches[0] = mode_cmd.width * 4;
229 mode_cmd.pixel_format = drm_mode_legacy_fb_format(32, 24); 223 mode_cmd.pixel_format = DRM_FORMAT_HOST_XRGB8888;
230 224
231 format = virtio_gpu_translate_format(mode_cmd.pixel_format); 225 format = virtio_gpu_translate_format(mode_cmd.pixel_format);
232 if (format == 0) 226 if (format == 0)
@@ -241,9 +235,9 @@ static int virtio_gpufb_create(struct drm_fb_helper *helper,
241 virtio_gpu_cmd_create_resource(vgdev, resid, format, 235 virtio_gpu_cmd_create_resource(vgdev, resid, format,
242 mode_cmd.width, mode_cmd.height); 236 mode_cmd.width, mode_cmd.height);
243 237
244 ret = virtio_gpu_vmap_fb(vgdev, obj); 238 ret = virtio_gpu_object_kmap(obj);
245 if (ret) { 239 if (ret) {
246 DRM_ERROR("failed to vmap fb %d\n", ret); 240 DRM_ERROR("failed to kmap fb %d\n", ret);
247 goto err_obj_vmap; 241 goto err_obj_vmap;
248 } 242 }
249 243
diff --git a/drivers/gpu/drm/virtio/virtgpu_gem.c b/drivers/gpu/drm/virtio/virtgpu_gem.c
index 0f2768eacaee..82c817f37cf7 100644
--- a/drivers/gpu/drm/virtio/virtgpu_gem.c
+++ b/drivers/gpu/drm/virtio/virtgpu_gem.c
@@ -90,7 +90,10 @@ int virtio_gpu_mode_dumb_create(struct drm_file *file_priv,
90 uint32_t resid; 90 uint32_t resid;
91 uint32_t format; 91 uint32_t format;
92 92
93 pitch = args->width * ((args->bpp + 1) / 8); 93 if (args->bpp != 32)
94 return -EINVAL;
95
96 pitch = args->width * 4;
94 args->size = pitch * args->height; 97 args->size = pitch * args->height;
95 args->size = ALIGN(args->size, PAGE_SIZE); 98 args->size = ALIGN(args->size, PAGE_SIZE);
96 99
@@ -99,7 +102,7 @@ int virtio_gpu_mode_dumb_create(struct drm_file *file_priv,
99 if (ret) 102 if (ret)
100 goto fail; 103 goto fail;
101 104
102 format = virtio_gpu_translate_format(DRM_FORMAT_XRGB8888); 105 format = virtio_gpu_translate_format(DRM_FORMAT_HOST_XRGB8888);
103 virtio_gpu_resource_id_get(vgdev, &resid); 106 virtio_gpu_resource_id_get(vgdev, &resid);
104 virtio_gpu_cmd_create_resource(vgdev, resid, format, 107 virtio_gpu_cmd_create_resource(vgdev, resid, format,
105 args->width, args->height); 108 args->width, args->height);
diff --git a/drivers/gpu/drm/virtio/virtgpu_ioctl.c b/drivers/gpu/drm/virtio/virtgpu_ioctl.c
index 7bdf6f0e58a5..f16b875d6a46 100644
--- a/drivers/gpu/drm/virtio/virtgpu_ioctl.c
+++ b/drivers/gpu/drm/virtio/virtgpu_ioctl.c
@@ -429,11 +429,11 @@ static int virtio_gpu_transfer_to_host_ioctl(struct drm_device *dev, void *data,
429 convert_to_hw_box(&box, &args->box); 429 convert_to_hw_box(&box, &args->box);
430 if (!vgdev->has_virgl_3d) { 430 if (!vgdev->has_virgl_3d) {
431 virtio_gpu_cmd_transfer_to_host_2d 431 virtio_gpu_cmd_transfer_to_host_2d
432 (vgdev, qobj->hw_res_handle, offset, 432 (vgdev, qobj, offset,
433 box.w, box.h, box.x, box.y, NULL); 433 box.w, box.h, box.x, box.y, NULL);
434 } else { 434 } else {
435 virtio_gpu_cmd_transfer_to_host_3d 435 virtio_gpu_cmd_transfer_to_host_3d
436 (vgdev, qobj->hw_res_handle, 436 (vgdev, qobj,
437 vfpriv ? vfpriv->ctx_id : 0, offset, 437 vfpriv ? vfpriv->ctx_id : 0, offset,
438 args->level, &box, &fence); 438 args->level, &box, &fence);
439 reservation_object_add_excl_fence(qobj->tbo.resv, 439 reservation_object_add_excl_fence(qobj->tbo.resv,
diff --git a/drivers/gpu/drm/virtio/virtgpu_object.c b/drivers/gpu/drm/virtio/virtgpu_object.c
index 9f2f470efd9b..eca765537470 100644
--- a/drivers/gpu/drm/virtio/virtgpu_object.c
+++ b/drivers/gpu/drm/virtio/virtgpu_object.c
@@ -37,6 +37,8 @@ static void virtio_gpu_ttm_bo_destroy(struct ttm_buffer_object *tbo)
37 virtio_gpu_cmd_unref_resource(vgdev, bo->hw_res_handle); 37 virtio_gpu_cmd_unref_resource(vgdev, bo->hw_res_handle);
38 if (bo->pages) 38 if (bo->pages)
39 virtio_gpu_object_free_sg_table(bo); 39 virtio_gpu_object_free_sg_table(bo);
40 if (bo->vmap)
41 virtio_gpu_object_kunmap(bo);
40 drm_gem_object_release(&bo->gem_base); 42 drm_gem_object_release(&bo->gem_base);
41 kfree(bo); 43 kfree(bo);
42} 44}
@@ -99,22 +101,23 @@ int virtio_gpu_object_create(struct virtio_gpu_device *vgdev,
99 return 0; 101 return 0;
100} 102}
101 103
102int virtio_gpu_object_kmap(struct virtio_gpu_object *bo, void **ptr) 104void virtio_gpu_object_kunmap(struct virtio_gpu_object *bo)
105{
106 bo->vmap = NULL;
107 ttm_bo_kunmap(&bo->kmap);
108}
109
110int virtio_gpu_object_kmap(struct virtio_gpu_object *bo)
103{ 111{
104 bool is_iomem; 112 bool is_iomem;
105 int r; 113 int r;
106 114
107 if (bo->vmap) { 115 WARN_ON(bo->vmap);
108 if (ptr) 116
109 *ptr = bo->vmap;
110 return 0;
111 }
112 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); 117 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap);
113 if (r) 118 if (r)
114 return r; 119 return r;
115 bo->vmap = ttm_kmap_obj_virtual(&bo->kmap, &is_iomem); 120 bo->vmap = ttm_kmap_obj_virtual(&bo->kmap, &is_iomem);
116 if (ptr)
117 *ptr = bo->vmap;
118 return 0; 121 return 0;
119} 122}
120 123
diff --git a/drivers/gpu/drm/virtio/virtgpu_plane.c b/drivers/gpu/drm/virtio/virtgpu_plane.c
index 88f2fb8c61c4..a9f4ae7d4483 100644
--- a/drivers/gpu/drm/virtio/virtgpu_plane.c
+++ b/drivers/gpu/drm/virtio/virtgpu_plane.c
@@ -28,22 +28,11 @@
28#include <drm/drm_atomic_helper.h> 28#include <drm/drm_atomic_helper.h>
29 29
30static const uint32_t virtio_gpu_formats[] = { 30static const uint32_t virtio_gpu_formats[] = {
31 DRM_FORMAT_XRGB8888, 31 DRM_FORMAT_HOST_XRGB8888,
32 DRM_FORMAT_ARGB8888,
33 DRM_FORMAT_BGRX8888,
34 DRM_FORMAT_BGRA8888,
35 DRM_FORMAT_RGBX8888,
36 DRM_FORMAT_RGBA8888,
37 DRM_FORMAT_XBGR8888,
38 DRM_FORMAT_ABGR8888,
39}; 32};
40 33
41static const uint32_t virtio_gpu_cursor_formats[] = { 34static const uint32_t virtio_gpu_cursor_formats[] = {
42#ifdef __BIG_ENDIAN 35 DRM_FORMAT_HOST_ARGB8888,
43 DRM_FORMAT_BGRA8888,
44#else
45 DRM_FORMAT_ARGB8888,
46#endif
47}; 36};
48 37
49uint32_t virtio_gpu_translate_format(uint32_t drm_fourcc) 38uint32_t virtio_gpu_translate_format(uint32_t drm_fourcc)
@@ -51,32 +40,6 @@ uint32_t virtio_gpu_translate_format(uint32_t drm_fourcc)
51 uint32_t format; 40 uint32_t format;
52 41
53 switch (drm_fourcc) { 42 switch (drm_fourcc) {
54#ifdef __BIG_ENDIAN
55 case DRM_FORMAT_XRGB8888:
56 format = VIRTIO_GPU_FORMAT_X8R8G8B8_UNORM;
57 break;
58 case DRM_FORMAT_ARGB8888:
59 format = VIRTIO_GPU_FORMAT_A8R8G8B8_UNORM;
60 break;
61 case DRM_FORMAT_BGRX8888:
62 format = VIRTIO_GPU_FORMAT_B8G8R8X8_UNORM;
63 break;
64 case DRM_FORMAT_BGRA8888:
65 format = VIRTIO_GPU_FORMAT_B8G8R8A8_UNORM;
66 break;
67 case DRM_FORMAT_RGBX8888:
68 format = VIRTIO_GPU_FORMAT_R8G8B8X8_UNORM;
69 break;
70 case DRM_FORMAT_RGBA8888:
71 format = VIRTIO_GPU_FORMAT_R8G8B8A8_UNORM;
72 break;
73 case DRM_FORMAT_XBGR8888:
74 format = VIRTIO_GPU_FORMAT_X8B8G8R8_UNORM;
75 break;
76 case DRM_FORMAT_ABGR8888:
77 format = VIRTIO_GPU_FORMAT_A8B8G8R8_UNORM;
78 break;
79#else
80 case DRM_FORMAT_XRGB8888: 43 case DRM_FORMAT_XRGB8888:
81 format = VIRTIO_GPU_FORMAT_B8G8R8X8_UNORM; 44 format = VIRTIO_GPU_FORMAT_B8G8R8X8_UNORM;
82 break; 45 break;
@@ -89,19 +52,6 @@ uint32_t virtio_gpu_translate_format(uint32_t drm_fourcc)
89 case DRM_FORMAT_BGRA8888: 52 case DRM_FORMAT_BGRA8888:
90 format = VIRTIO_GPU_FORMAT_A8R8G8B8_UNORM; 53 format = VIRTIO_GPU_FORMAT_A8R8G8B8_UNORM;
91 break; 54 break;
92 case DRM_FORMAT_RGBX8888:
93 format = VIRTIO_GPU_FORMAT_X8B8G8R8_UNORM;
94 break;
95 case DRM_FORMAT_RGBA8888:
96 format = VIRTIO_GPU_FORMAT_A8B8G8R8_UNORM;
97 break;
98 case DRM_FORMAT_XBGR8888:
99 format = VIRTIO_GPU_FORMAT_R8G8B8X8_UNORM;
100 break;
101 case DRM_FORMAT_ABGR8888:
102 format = VIRTIO_GPU_FORMAT_R8G8B8A8_UNORM;
103 break;
104#endif
105 default: 55 default:
106 /* 56 /*
107 * This should not happen, we handle everything listed 57 * This should not happen, we handle everything listed
@@ -158,7 +108,7 @@ static void virtio_gpu_primary_plane_update(struct drm_plane *plane,
158 handle = bo->hw_res_handle; 108 handle = bo->hw_res_handle;
159 if (bo->dumb) { 109 if (bo->dumb) {
160 virtio_gpu_cmd_transfer_to_host_2d 110 virtio_gpu_cmd_transfer_to_host_2d
161 (vgdev, handle, 0, 111 (vgdev, bo, 0,
162 cpu_to_le32(plane->state->src_w >> 16), 112 cpu_to_le32(plane->state->src_w >> 16),
163 cpu_to_le32(plane->state->src_h >> 16), 113 cpu_to_le32(plane->state->src_h >> 16),
164 cpu_to_le32(plane->state->src_x >> 16), 114 cpu_to_le32(plane->state->src_x >> 16),
@@ -217,7 +167,7 @@ static void virtio_gpu_cursor_plane_update(struct drm_plane *plane,
217 if (bo && bo->dumb && (plane->state->fb != old_state->fb)) { 167 if (bo && bo->dumb && (plane->state->fb != old_state->fb)) {
218 /* new cursor -- update & wait */ 168 /* new cursor -- update & wait */
219 virtio_gpu_cmd_transfer_to_host_2d 169 virtio_gpu_cmd_transfer_to_host_2d
220 (vgdev, handle, 0, 170 (vgdev, bo, 0,
221 cpu_to_le32(plane->state->crtc_w), 171 cpu_to_le32(plane->state->crtc_w),
222 cpu_to_le32(plane->state->crtc_h), 172 cpu_to_le32(plane->state->crtc_h),
223 0, 0, &fence); 173 0, 0, &fence);
diff --git a/drivers/gpu/drm/virtio/virtgpu_prime.c b/drivers/gpu/drm/virtio/virtgpu_prime.c
index d27a1688714f..86ce0ae93f59 100644
--- a/drivers/gpu/drm/virtio/virtgpu_prime.c
+++ b/drivers/gpu/drm/virtio/virtgpu_prime.c
@@ -55,13 +55,18 @@ struct drm_gem_object *virtgpu_gem_prime_import_sg_table(
55 55
56void *virtgpu_gem_prime_vmap(struct drm_gem_object *obj) 56void *virtgpu_gem_prime_vmap(struct drm_gem_object *obj)
57{ 57{
58 WARN_ONCE(1, "not implemented"); 58 struct virtio_gpu_object *bo = gem_to_virtio_gpu_obj(obj);
59 return ERR_PTR(-ENODEV); 59 int ret;
60
61 ret = virtio_gpu_object_kmap(bo);
62 if (ret)
63 return NULL;
64 return bo->vmap;
60} 65}
61 66
62void virtgpu_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr) 67void virtgpu_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr)
63{ 68{
64 WARN_ONCE(1, "not implemented"); 69 virtio_gpu_object_kunmap(gem_to_virtio_gpu_obj(obj));
65} 70}
66 71
67int virtgpu_gem_prime_mmap(struct drm_gem_object *obj, 72int virtgpu_gem_prime_mmap(struct drm_gem_object *obj,
diff --git a/drivers/gpu/drm/virtio/virtgpu_vq.c b/drivers/gpu/drm/virtio/virtgpu_vq.c
index df32811f2c3e..4e2e037aed34 100644
--- a/drivers/gpu/drm/virtio/virtgpu_vq.c
+++ b/drivers/gpu/drm/virtio/virtgpu_vq.c
@@ -483,28 +483,26 @@ void virtio_gpu_cmd_resource_flush(struct virtio_gpu_device *vgdev,
483} 483}
484 484
485void virtio_gpu_cmd_transfer_to_host_2d(struct virtio_gpu_device *vgdev, 485void virtio_gpu_cmd_transfer_to_host_2d(struct virtio_gpu_device *vgdev,
486 uint32_t resource_id, uint64_t offset, 486 struct virtio_gpu_object *bo,
487 uint64_t offset,
487 __le32 width, __le32 height, 488 __le32 width, __le32 height,
488 __le32 x, __le32 y, 489 __le32 x, __le32 y,
489 struct virtio_gpu_fence **fence) 490 struct virtio_gpu_fence **fence)
490{ 491{
491 struct virtio_gpu_transfer_to_host_2d *cmd_p; 492 struct virtio_gpu_transfer_to_host_2d *cmd_p;
492 struct virtio_gpu_vbuffer *vbuf; 493 struct virtio_gpu_vbuffer *vbuf;
493 struct virtio_gpu_fbdev *vgfbdev = vgdev->vgfbdev;
494 struct virtio_gpu_framebuffer *fb = &vgfbdev->vgfb;
495 struct virtio_gpu_object *obj = gem_to_virtio_gpu_obj(fb->base.obj[0]);
496 bool use_dma_api = !virtio_has_iommu_quirk(vgdev->vdev); 494 bool use_dma_api = !virtio_has_iommu_quirk(vgdev->vdev);
497 495
498 if (use_dma_api) 496 if (use_dma_api)
499 dma_sync_sg_for_device(vgdev->vdev->dev.parent, 497 dma_sync_sg_for_device(vgdev->vdev->dev.parent,
500 obj->pages->sgl, obj->pages->nents, 498 bo->pages->sgl, bo->pages->nents,
501 DMA_TO_DEVICE); 499 DMA_TO_DEVICE);
502 500
503 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); 501 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p));
504 memset(cmd_p, 0, sizeof(*cmd_p)); 502 memset(cmd_p, 0, sizeof(*cmd_p));
505 503
506 cmd_p->hdr.type = cpu_to_le32(VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D); 504 cmd_p->hdr.type = cpu_to_le32(VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D);
507 cmd_p->resource_id = cpu_to_le32(resource_id); 505 cmd_p->resource_id = cpu_to_le32(bo->hw_res_handle);
508 cmd_p->offset = cpu_to_le64(offset); 506 cmd_p->offset = cpu_to_le64(offset);
509 cmd_p->r.width = width; 507 cmd_p->r.width = width;
510 cmd_p->r.height = height; 508 cmd_p->r.height = height;
@@ -791,21 +789,19 @@ virtio_gpu_cmd_resource_create_3d(struct virtio_gpu_device *vgdev,
791} 789}
792 790
793void virtio_gpu_cmd_transfer_to_host_3d(struct virtio_gpu_device *vgdev, 791void virtio_gpu_cmd_transfer_to_host_3d(struct virtio_gpu_device *vgdev,
794 uint32_t resource_id, uint32_t ctx_id, 792 struct virtio_gpu_object *bo,
793 uint32_t ctx_id,
795 uint64_t offset, uint32_t level, 794 uint64_t offset, uint32_t level,
796 struct virtio_gpu_box *box, 795 struct virtio_gpu_box *box,
797 struct virtio_gpu_fence **fence) 796 struct virtio_gpu_fence **fence)
798{ 797{
799 struct virtio_gpu_transfer_host_3d *cmd_p; 798 struct virtio_gpu_transfer_host_3d *cmd_p;
800 struct virtio_gpu_vbuffer *vbuf; 799 struct virtio_gpu_vbuffer *vbuf;
801 struct virtio_gpu_fbdev *vgfbdev = vgdev->vgfbdev;
802 struct virtio_gpu_framebuffer *fb = &vgfbdev->vgfb;
803 struct virtio_gpu_object *obj = gem_to_virtio_gpu_obj(fb->base.obj[0]);
804 bool use_dma_api = !virtio_has_iommu_quirk(vgdev->vdev); 800 bool use_dma_api = !virtio_has_iommu_quirk(vgdev->vdev);
805 801
806 if (use_dma_api) 802 if (use_dma_api)
807 dma_sync_sg_for_device(vgdev->vdev->dev.parent, 803 dma_sync_sg_for_device(vgdev->vdev->dev.parent,
808 obj->pages->sgl, obj->pages->nents, 804 bo->pages->sgl, bo->pages->nents,
809 DMA_TO_DEVICE); 805 DMA_TO_DEVICE);
810 806
811 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); 807 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p));
@@ -813,7 +809,7 @@ void virtio_gpu_cmd_transfer_to_host_3d(struct virtio_gpu_device *vgdev,
813 809
814 cmd_p->hdr.type = cpu_to_le32(VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D); 810 cmd_p->hdr.type = cpu_to_le32(VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D);
815 cmd_p->hdr.ctx_id = cpu_to_le32(ctx_id); 811 cmd_p->hdr.ctx_id = cpu_to_le32(ctx_id);
816 cmd_p->resource_id = cpu_to_le32(resource_id); 812 cmd_p->resource_id = cpu_to_le32(bo->hw_res_handle);
817 cmd_p->box = *box; 813 cmd_p->box = *box;
818 cmd_p->offset = cpu_to_le64(offset); 814 cmd_p->offset = cpu_to_le64(offset);
819 cmd_p->level = cpu_to_le32(level); 815 cmd_p->level = cpu_to_le32(level);