summaryrefslogtreecommitdiffstats
path: root/drivers/gpu/nvgpu/gk20a/gr_gk20a.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/nvgpu/gk20a/gr_gk20a.c')
-rw-r--r--drivers/gpu/nvgpu/gk20a/gr_gk20a.c40
1 files changed, 20 insertions, 20 deletions
diff --git a/drivers/gpu/nvgpu/gk20a/gr_gk20a.c b/drivers/gpu/nvgpu/gk20a/gr_gk20a.c
index 360b8c97..971e2320 100644
--- a/drivers/gpu/nvgpu/gk20a/gr_gk20a.c
+++ b/drivers/gpu/nvgpu/gk20a/gr_gk20a.c
@@ -699,7 +699,7 @@ void gr_gk20a_ctx_patch_write(struct gk20a *g,
699 } 699 }
700} 700}
701 701
702static u32 fecs_current_ctx_data(struct gk20a *g, struct mem_desc *inst_block) 702static u32 fecs_current_ctx_data(struct gk20a *g, struct nvgpu_mem *inst_block)
703{ 703{
704 u32 ptr = u64_lo32(gk20a_mm_inst_block_addr(g, inst_block) 704 u32 ptr = u64_lo32(gk20a_mm_inst_block_addr(g, inst_block)
705 >> ram_in_base_shift_v()); 705 >> ram_in_base_shift_v());
@@ -741,7 +741,7 @@ static int gr_gk20a_fecs_ctx_bind_channel(struct gk20a *g,
741} 741}
742 742
743void gr_gk20a_write_zcull_ptr(struct gk20a *g, 743void gr_gk20a_write_zcull_ptr(struct gk20a *g,
744 struct mem_desc *mem, u64 gpu_va) 744 struct nvgpu_mem *mem, u64 gpu_va)
745{ 745{
746 u32 va = u64_lo32(gpu_va >> 8); 746 u32 va = u64_lo32(gpu_va >> 8);
747 747
@@ -750,7 +750,7 @@ void gr_gk20a_write_zcull_ptr(struct gk20a *g,
750} 750}
751 751
752void gr_gk20a_write_pm_ptr(struct gk20a *g, 752void gr_gk20a_write_pm_ptr(struct gk20a *g,
753 struct mem_desc *mem, u64 gpu_va) 753 struct nvgpu_mem *mem, u64 gpu_va)
754{ 754{
755 u32 va = u64_lo32(gpu_va >> 8); 755 u32 va = u64_lo32(gpu_va >> 8);
756 756
@@ -761,9 +761,9 @@ void gr_gk20a_write_pm_ptr(struct gk20a *g,
761static int gr_gk20a_ctx_zcull_setup(struct gk20a *g, struct channel_gk20a *c) 761static int gr_gk20a_ctx_zcull_setup(struct gk20a *g, struct channel_gk20a *c)
762{ 762{
763 struct channel_ctx_gk20a *ch_ctx = &c->ch_ctx; 763 struct channel_ctx_gk20a *ch_ctx = &c->ch_ctx;
764 struct mem_desc *mem = &ch_ctx->gr_ctx->mem; 764 struct nvgpu_mem *mem = &ch_ctx->gr_ctx->mem;
765 struct ctx_header_desc *ctx = &c->ch_ctx.ctx_header; 765 struct ctx_header_desc *ctx = &c->ch_ctx.ctx_header;
766 struct mem_desc *ctxheader = &ctx->mem; 766 struct nvgpu_mem *ctxheader = &ctx->mem;
767 int ret = 0; 767 int ret = 0;
768 768
769 gk20a_dbg_fn(""); 769 gk20a_dbg_fn("");
@@ -1579,15 +1579,15 @@ static int gr_gk20a_init_golden_ctx_image(struct gk20a *g,
1579 u32 ctx_header_words; 1579 u32 ctx_header_words;
1580 u32 i; 1580 u32 i;
1581 u32 data; 1581 u32 data;
1582 struct mem_desc *gold_mem = &gr->global_ctx_buffer[GOLDEN_CTX].mem; 1582 struct nvgpu_mem *gold_mem = &gr->global_ctx_buffer[GOLDEN_CTX].mem;
1583 struct mem_desc *gr_mem = &ch_ctx->gr_ctx->mem; 1583 struct nvgpu_mem *gr_mem = &ch_ctx->gr_ctx->mem;
1584 u32 err = 0; 1584 u32 err = 0;
1585 struct aiv_list_gk20a *sw_ctx_load = &g->gr.ctx_vars.sw_ctx_load; 1585 struct aiv_list_gk20a *sw_ctx_load = &g->gr.ctx_vars.sw_ctx_load;
1586 struct av_list_gk20a *sw_method_init = &g->gr.ctx_vars.sw_method_init; 1586 struct av_list_gk20a *sw_method_init = &g->gr.ctx_vars.sw_method_init;
1587 u32 last_method_data = 0; 1587 u32 last_method_data = 0;
1588 struct gk20a_platform *platform = dev_get_drvdata(g->dev); 1588 struct gk20a_platform *platform = dev_get_drvdata(g->dev);
1589 struct ctx_header_desc *ctx = &c->ch_ctx.ctx_header; 1589 struct ctx_header_desc *ctx = &c->ch_ctx.ctx_header;
1590 struct mem_desc *ctxheader = &ctx->mem; 1590 struct nvgpu_mem *ctxheader = &ctx->mem;
1591 1591
1592 gk20a_dbg_fn(""); 1592 gk20a_dbg_fn("");
1593 1593
@@ -1836,7 +1836,7 @@ int gr_gk20a_update_smpc_ctxsw_mode(struct gk20a *g,
1836 bool enable_smpc_ctxsw) 1836 bool enable_smpc_ctxsw)
1837{ 1837{
1838 struct channel_ctx_gk20a *ch_ctx = &c->ch_ctx; 1838 struct channel_ctx_gk20a *ch_ctx = &c->ch_ctx;
1839 struct mem_desc *mem; 1839 struct nvgpu_mem *mem;
1840 u32 data; 1840 u32 data;
1841 int ret; 1841 int ret;
1842 1842
@@ -1893,11 +1893,11 @@ int gr_gk20a_update_hwpm_ctxsw_mode(struct gk20a *g,
1893{ 1893{
1894 struct channel_ctx_gk20a *ch_ctx = &c->ch_ctx; 1894 struct channel_ctx_gk20a *ch_ctx = &c->ch_ctx;
1895 struct pm_ctx_desc *pm_ctx = &ch_ctx->pm_ctx; 1895 struct pm_ctx_desc *pm_ctx = &ch_ctx->pm_ctx;
1896 struct mem_desc *gr_mem; 1896 struct nvgpu_mem *gr_mem;
1897 u32 data; 1897 u32 data;
1898 u64 virt_addr; 1898 u64 virt_addr;
1899 struct ctx_header_desc *ctx = &c->ch_ctx.ctx_header; 1899 struct ctx_header_desc *ctx = &c->ch_ctx.ctx_header;
1900 struct mem_desc *ctxheader = &ctx->mem; 1900 struct nvgpu_mem *ctxheader = &ctx->mem;
1901 int ret; 1901 int ret;
1902 1902
1903 gk20a_dbg_fn(""); 1903 gk20a_dbg_fn("");
@@ -2018,7 +2018,7 @@ cleanup_pm_buf:
2018 gk20a_gmmu_unmap(c->vm, pm_ctx->mem.gpu_va, pm_ctx->mem.size, 2018 gk20a_gmmu_unmap(c->vm, pm_ctx->mem.gpu_va, pm_ctx->mem.size,
2019 gk20a_mem_flag_none); 2019 gk20a_mem_flag_none);
2020 gk20a_gmmu_free(g, &pm_ctx->mem); 2020 gk20a_gmmu_free(g, &pm_ctx->mem);
2021 memset(&pm_ctx->mem, 0, sizeof(struct mem_desc)); 2021 memset(&pm_ctx->mem, 0, sizeof(struct nvgpu_mem));
2022 2022
2023 gk20a_enable_channel_tsg(g, c); 2023 gk20a_enable_channel_tsg(g, c);
2024 return ret; 2024 return ret;
@@ -2035,9 +2035,9 @@ int gr_gk20a_load_golden_ctx_image(struct gk20a *g,
2035 u64 virt_addr = 0; 2035 u64 virt_addr = 0;
2036 u32 v, data; 2036 u32 v, data;
2037 int ret = 0; 2037 int ret = 0;
2038 struct mem_desc *mem = &ch_ctx->gr_ctx->mem; 2038 struct nvgpu_mem *mem = &ch_ctx->gr_ctx->mem;
2039 struct ctx_header_desc *ctx = &c->ch_ctx.ctx_header; 2039 struct ctx_header_desc *ctx = &c->ch_ctx.ctx_header;
2040 struct mem_desc *ctxheader = &ctx->mem; 2040 struct nvgpu_mem *ctxheader = &ctx->mem;
2041 2041
2042 gk20a_dbg_fn(""); 2042 gk20a_dbg_fn("");
2043 2043
@@ -2249,7 +2249,7 @@ static void gr_gk20a_init_ctxsw_ucode_segments(
2249 2249
2250static int gr_gk20a_copy_ctxsw_ucode_segments( 2250static int gr_gk20a_copy_ctxsw_ucode_segments(
2251 struct gk20a *g, 2251 struct gk20a *g,
2252 struct mem_desc *dst, 2252 struct nvgpu_mem *dst,
2253 struct gk20a_ctxsw_ucode_segments *segments, 2253 struct gk20a_ctxsw_ucode_segments *segments,
2254 u32 *bootimage, 2254 u32 *bootimage,
2255 u32 *code, u32 *data) 2255 u32 *code, u32 *data)
@@ -2826,7 +2826,7 @@ static int gr_gk20a_map_global_ctx_buffers(struct gk20a *g,
2826 u64 *g_bfr_va = c->ch_ctx.global_ctx_buffer_va; 2826 u64 *g_bfr_va = c->ch_ctx.global_ctx_buffer_va;
2827 u64 *g_bfr_size = c->ch_ctx.global_ctx_buffer_size; 2827 u64 *g_bfr_size = c->ch_ctx.global_ctx_buffer_size;
2828 struct gr_gk20a *gr = &g->gr; 2828 struct gr_gk20a *gr = &g->gr;
2829 struct mem_desc *mem; 2829 struct nvgpu_mem *mem;
2830 u64 gpu_va; 2830 u64 gpu_va;
2831 u32 i; 2831 u32 i;
2832 gk20a_dbg_fn(""); 2832 gk20a_dbg_fn("");
@@ -5085,7 +5085,7 @@ out:
5085static int gr_gk20a_init_access_map(struct gk20a *g) 5085static int gr_gk20a_init_access_map(struct gk20a *g)
5086{ 5086{
5087 struct gr_gk20a *gr = &g->gr; 5087 struct gr_gk20a *gr = &g->gr;
5088 struct mem_desc *mem = &gr->global_ctx_buffer[PRIV_ACCESS_MAP].mem; 5088 struct nvgpu_mem *mem = &gr->global_ctx_buffer[PRIV_ACCESS_MAP].mem;
5089 u32 w, nr_pages = 5089 u32 w, nr_pages =
5090 DIV_ROUND_UP(gr->ctx_vars.priv_access_map_size, 5090 DIV_ROUND_UP(gr->ctx_vars.priv_access_map_size,
5091 PAGE_SIZE); 5091 PAGE_SIZE);
@@ -6645,7 +6645,7 @@ int gr_gk20a_fecs_get_reglist_img_size(struct gk20a *g, u32 *size)
6645} 6645}
6646 6646
6647int gr_gk20a_fecs_set_reglist_bind_inst(struct gk20a *g, 6647int gr_gk20a_fecs_set_reglist_bind_inst(struct gk20a *g,
6648 struct mem_desc *inst_block) 6648 struct nvgpu_mem *inst_block)
6649{ 6649{
6650 u32 data = fecs_current_ctx_data(g, inst_block); 6650 u32 data = fecs_current_ctx_data(g, inst_block);
6651 6651
@@ -7131,7 +7131,7 @@ static void gr_gk20a_init_sm_dsm_reg_info(void)
7131static int gr_gk20a_ctx_patch_smpc(struct gk20a *g, 7131static int gr_gk20a_ctx_patch_smpc(struct gk20a *g,
7132 struct channel_ctx_gk20a *ch_ctx, 7132 struct channel_ctx_gk20a *ch_ctx,
7133 u32 addr, u32 data, 7133 u32 addr, u32 data,
7134 struct mem_desc *mem) 7134 struct nvgpu_mem *mem)
7135{ 7135{
7136 u32 num_gpc = g->gr.gpc_count; 7136 u32 num_gpc = g->gr.gpc_count;
7137 u32 num_tpc; 7137 u32 num_tpc;
@@ -8258,7 +8258,7 @@ int gr_gk20a_exec_ctx_ops(struct channel_gk20a *ch,
8258 struct channel_ctx_gk20a *ch_ctx = &ch->ch_ctx; 8258 struct channel_ctx_gk20a *ch_ctx = &ch->ch_ctx;
8259 bool gr_ctx_ready = false; 8259 bool gr_ctx_ready = false;
8260 bool pm_ctx_ready = false; 8260 bool pm_ctx_ready = false;
8261 struct mem_desc *current_mem = NULL; 8261 struct nvgpu_mem *current_mem = NULL;
8262 bool ch_is_curr_ctx, restart_gr_ctxsw = false; 8262 bool ch_is_curr_ctx, restart_gr_ctxsw = false;
8263 u32 i, j, offset, v; 8263 u32 i, j, offset, v;
8264 struct gr_gk20a *gr = &g->gr; 8264 struct gr_gk20a *gr = &g->gr;