diff options
Diffstat (limited to 'drivers/gpu/drm/etnaviv/etnaviv_gem.c')
| -rw-r--r-- | drivers/gpu/drm/etnaviv/etnaviv_gem.c | 36 |
1 files changed, 26 insertions, 10 deletions
diff --git a/drivers/gpu/drm/etnaviv/etnaviv_gem.c b/drivers/gpu/drm/etnaviv/etnaviv_gem.c index 9f77c3b94cc6..4b519e4309b2 100644 --- a/drivers/gpu/drm/etnaviv/etnaviv_gem.c +++ b/drivers/gpu/drm/etnaviv/etnaviv_gem.c | |||
| @@ -353,25 +353,39 @@ void etnaviv_gem_put_iova(struct etnaviv_gpu *gpu, struct drm_gem_object *obj) | |||
| 353 | drm_gem_object_unreference_unlocked(obj); | 353 | drm_gem_object_unreference_unlocked(obj); |
| 354 | } | 354 | } |
| 355 | 355 | ||
| 356 | void *etnaviv_gem_vaddr(struct drm_gem_object *obj) | 356 | void *etnaviv_gem_vmap(struct drm_gem_object *obj) |
| 357 | { | 357 | { |
| 358 | struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj); | 358 | struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj); |
| 359 | 359 | ||
| 360 | mutex_lock(&etnaviv_obj->lock); | 360 | if (etnaviv_obj->vaddr) |
| 361 | if (!etnaviv_obj->vaddr) { | 361 | return etnaviv_obj->vaddr; |
| 362 | struct page **pages = etnaviv_gem_get_pages(etnaviv_obj); | ||
| 363 | |||
| 364 | if (IS_ERR(pages)) | ||
| 365 | return ERR_CAST(pages); | ||
| 366 | 362 | ||
| 367 | etnaviv_obj->vaddr = vmap(pages, obj->size >> PAGE_SHIFT, | 363 | mutex_lock(&etnaviv_obj->lock); |
| 368 | VM_MAP, pgprot_writecombine(PAGE_KERNEL)); | 364 | /* |
| 369 | } | 365 | * Need to check again, as we might have raced with another thread |
| 366 | * while waiting for the mutex. | ||
| 367 | */ | ||
| 368 | if (!etnaviv_obj->vaddr) | ||
| 369 | etnaviv_obj->vaddr = etnaviv_obj->ops->vmap(etnaviv_obj); | ||
| 370 | mutex_unlock(&etnaviv_obj->lock); | 370 | mutex_unlock(&etnaviv_obj->lock); |
| 371 | 371 | ||
| 372 | return etnaviv_obj->vaddr; | 372 | return etnaviv_obj->vaddr; |
| 373 | } | 373 | } |
| 374 | 374 | ||
| 375 | static void *etnaviv_gem_vmap_impl(struct etnaviv_gem_object *obj) | ||
| 376 | { | ||
| 377 | struct page **pages; | ||
| 378 | |||
| 379 | lockdep_assert_held(&obj->lock); | ||
| 380 | |||
| 381 | pages = etnaviv_gem_get_pages(obj); | ||
| 382 | if (IS_ERR(pages)) | ||
| 383 | return NULL; | ||
| 384 | |||
| 385 | return vmap(pages, obj->base.size >> PAGE_SHIFT, | ||
| 386 | VM_MAP, pgprot_writecombine(PAGE_KERNEL)); | ||
| 387 | } | ||
| 388 | |||
| 375 | static inline enum dma_data_direction etnaviv_op_to_dma_dir(u32 op) | 389 | static inline enum dma_data_direction etnaviv_op_to_dma_dir(u32 op) |
| 376 | { | 390 | { |
| 377 | if (op & ETNA_PREP_READ) | 391 | if (op & ETNA_PREP_READ) |
| @@ -522,6 +536,7 @@ static void etnaviv_gem_shmem_release(struct etnaviv_gem_object *etnaviv_obj) | |||
| 522 | static const struct etnaviv_gem_ops etnaviv_gem_shmem_ops = { | 536 | static const struct etnaviv_gem_ops etnaviv_gem_shmem_ops = { |
| 523 | .get_pages = etnaviv_gem_shmem_get_pages, | 537 | .get_pages = etnaviv_gem_shmem_get_pages, |
| 524 | .release = etnaviv_gem_shmem_release, | 538 | .release = etnaviv_gem_shmem_release, |
| 539 | .vmap = etnaviv_gem_vmap_impl, | ||
| 525 | }; | 540 | }; |
| 526 | 541 | ||
| 527 | void etnaviv_gem_free_object(struct drm_gem_object *obj) | 542 | void etnaviv_gem_free_object(struct drm_gem_object *obj) |
| @@ -866,6 +881,7 @@ static void etnaviv_gem_userptr_release(struct etnaviv_gem_object *etnaviv_obj) | |||
| 866 | static const struct etnaviv_gem_ops etnaviv_gem_userptr_ops = { | 881 | static const struct etnaviv_gem_ops etnaviv_gem_userptr_ops = { |
| 867 | .get_pages = etnaviv_gem_userptr_get_pages, | 882 | .get_pages = etnaviv_gem_userptr_get_pages, |
| 868 | .release = etnaviv_gem_userptr_release, | 883 | .release = etnaviv_gem_userptr_release, |
| 884 | .vmap = etnaviv_gem_vmap_impl, | ||
| 869 | }; | 885 | }; |
| 870 | 886 | ||
| 871 | int etnaviv_gem_new_userptr(struct drm_device *dev, struct drm_file *file, | 887 | int etnaviv_gem_new_userptr(struct drm_device *dev, struct drm_file *file, |
