summaryrefslogtreecommitdiffstats
path: root/drivers/gpu/nvgpu/gk20a/gr_gk20a.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/nvgpu/gk20a/gr_gk20a.c')
-rw-r--r--drivers/gpu/nvgpu/gk20a/gr_gk20a.c32
1 files changed, 16 insertions, 16 deletions
diff --git a/drivers/gpu/nvgpu/gk20a/gr_gk20a.c b/drivers/gpu/nvgpu/gk20a/gr_gk20a.c
index 82695e44..270d36d6 100644
--- a/drivers/gpu/nvgpu/gk20a/gr_gk20a.c
+++ b/drivers/gpu/nvgpu/gk20a/gr_gk20a.c
@@ -2613,13 +2613,13 @@ static int gr_gk20a_map_global_ctx_buffers(struct gk20a *g,
2613 gk20a_dbg_fn(""); 2613 gk20a_dbg_fn("");
2614 2614
2615 /* Circular Buffer */ 2615 /* Circular Buffer */
2616 if (!c->vpr || 2616 if (c->vpr &&
2617 (gr->global_ctx_buffer[CIRCULAR_VPR].mem.priv.sgt == NULL)) { 2617 nvgpu_mem_is_valid(&gr->global_ctx_buffer[CIRCULAR_VPR].mem)) {
2618 mem = &gr->global_ctx_buffer[CIRCULAR].mem;
2619 g_bfr_index[CIRCULAR_VA] = CIRCULAR;
2620 } else {
2621 mem = &gr->global_ctx_buffer[CIRCULAR_VPR].mem; 2618 mem = &gr->global_ctx_buffer[CIRCULAR_VPR].mem;
2622 g_bfr_index[CIRCULAR_VA] = CIRCULAR_VPR; 2619 g_bfr_index[CIRCULAR_VA] = CIRCULAR_VPR;
2620 } else {
2621 mem = &gr->global_ctx_buffer[CIRCULAR].mem;
2622 g_bfr_index[CIRCULAR_VA] = CIRCULAR;
2623 } 2623 }
2624 2624
2625 gpu_va = nvgpu_gmmu_map(ch_vm, mem, mem->size, 2625 gpu_va = nvgpu_gmmu_map(ch_vm, mem, mem->size,
@@ -2631,13 +2631,13 @@ static int gr_gk20a_map_global_ctx_buffers(struct gk20a *g,
2631 g_bfr_size[CIRCULAR_VA] = mem->size; 2631 g_bfr_size[CIRCULAR_VA] = mem->size;
2632 2632
2633 /* Attribute Buffer */ 2633 /* Attribute Buffer */
2634 if (!c->vpr || 2634 if (c->vpr &&
2635 (gr->global_ctx_buffer[ATTRIBUTE_VPR].mem.priv.sgt == NULL)) { 2635 nvgpu_mem_is_valid(&gr->global_ctx_buffer[ATTRIBUTE_VPR].mem)) {
2636 mem = &gr->global_ctx_buffer[ATTRIBUTE].mem;
2637 g_bfr_index[ATTRIBUTE_VA] = ATTRIBUTE;
2638 } else {
2639 mem = &gr->global_ctx_buffer[ATTRIBUTE_VPR].mem; 2636 mem = &gr->global_ctx_buffer[ATTRIBUTE_VPR].mem;
2640 g_bfr_index[ATTRIBUTE_VA] = ATTRIBUTE_VPR; 2637 g_bfr_index[ATTRIBUTE_VA] = ATTRIBUTE_VPR;
2638 } else {
2639 mem = &gr->global_ctx_buffer[ATTRIBUTE].mem;
2640 g_bfr_index[ATTRIBUTE_VA] = ATTRIBUTE;
2641 } 2641 }
2642 2642
2643 gpu_va = nvgpu_gmmu_map(ch_vm, mem, mem->size, 2643 gpu_va = nvgpu_gmmu_map(ch_vm, mem, mem->size,
@@ -2649,13 +2649,13 @@ static int gr_gk20a_map_global_ctx_buffers(struct gk20a *g,
2649 g_bfr_size[ATTRIBUTE_VA] = mem->size; 2649 g_bfr_size[ATTRIBUTE_VA] = mem->size;
2650 2650
2651 /* Page Pool */ 2651 /* Page Pool */
2652 if (!c->vpr || 2652 if (c->vpr &&
2653 (gr->global_ctx_buffer[PAGEPOOL_VPR].mem.priv.sgt == NULL)) { 2653 nvgpu_mem_is_valid(&gr->global_ctx_buffer[PAGEPOOL_VPR].mem)) {
2654 mem = &gr->global_ctx_buffer[PAGEPOOL].mem;
2655 g_bfr_index[PAGEPOOL_VA] = PAGEPOOL;
2656 } else {
2657 mem = &gr->global_ctx_buffer[PAGEPOOL_VPR].mem; 2654 mem = &gr->global_ctx_buffer[PAGEPOOL_VPR].mem;
2658 g_bfr_index[PAGEPOOL_VA] = PAGEPOOL_VPR; 2655 g_bfr_index[PAGEPOOL_VA] = PAGEPOOL_VPR;
2656 } else {
2657 mem = &gr->global_ctx_buffer[PAGEPOOL].mem;
2658 g_bfr_index[PAGEPOOL_VA] = PAGEPOOL;
2659 } 2659 }
2660 2660
2661 gpu_va = nvgpu_gmmu_map(ch_vm, mem, mem->size, 2661 gpu_va = nvgpu_gmmu_map(ch_vm, mem, mem->size,
@@ -2960,7 +2960,7 @@ int gk20a_alloc_obj_ctx(struct channel_gk20a *c, u32 class_num, u32 flags)
2960 } 2960 }
2961 2961
2962 /* allocate patch buffer */ 2962 /* allocate patch buffer */
2963 if (ch_ctx->patch_ctx.mem.priv.sgt == NULL) { 2963 if (!nvgpu_mem_is_valid(&ch_ctx->patch_ctx.mem)) {
2964 ch_ctx->patch_ctx.data_count = 0; 2964 ch_ctx->patch_ctx.data_count = 0;
2965 err = gr_gk20a_alloc_channel_patch_ctx(g, c); 2965 err = gr_gk20a_alloc_channel_patch_ctx(g, c);
2966 if (err) { 2966 if (err) {