summaryrefslogtreecommitdiffstats
path: root/drivers/gpu/nvgpu/gv11b/subctx_gv11b.c
diff options
context:
space:
mode:
authorTerje Bergstrom <tbergstrom@nvidia.com>2018-07-06 11:50:36 -0400
committermobile promotions <svcmobile_promotions@nvidia.com>2018-07-09 17:40:50 -0400
commit0ddd219697155bcb64aaa04544108519686e16cc (patch)
treee33da070ae0a486bca5e0510b2c5a24915d16187 /drivers/gpu/nvgpu/gv11b/subctx_gv11b.c
parentbbebc611bc10a824d5d51fc2ea9d0408e350d26a (diff)
gpu: nvgpu: Conditional enable for replayable fault
Enable replayable fault only for contexts where they are requested. This required moving the code to initialize subcontexts to happen later. Fix signedness issues in definition of flags. JIRA NVGPU-714 Change-Id: I472004e13b1ea46c1bd202f9b12d2ce221b756f9 Signed-off-by: Terje Bergstrom <tbergstrom@nvidia.com> Reviewed-on: https://git-master.nvidia.com/r/1773262 Reviewed-by: mobile promotions <svcmobile_promotions@nvidia.com> Tested-by: mobile promotions <svcmobile_promotions@nvidia.com>
Diffstat (limited to 'drivers/gpu/nvgpu/gv11b/subctx_gv11b.c')
-rw-r--r--drivers/gpu/nvgpu/gv11b/subctx_gv11b.c22
1 files changed, 15 insertions, 7 deletions
diff --git a/drivers/gpu/nvgpu/gv11b/subctx_gv11b.c b/drivers/gpu/nvgpu/gv11b/subctx_gv11b.c
index b0bcb585..8f12bbe5 100644
--- a/drivers/gpu/nvgpu/gv11b/subctx_gv11b.c
+++ b/drivers/gpu/nvgpu/gv11b/subctx_gv11b.c
@@ -37,7 +37,8 @@
37static void gv11b_subctx_commit_valid_mask(struct vm_gk20a *vm, 37static void gv11b_subctx_commit_valid_mask(struct vm_gk20a *vm,
38 struct nvgpu_mem *inst_block); 38 struct nvgpu_mem *inst_block);
39static void gv11b_subctx_commit_pdb(struct vm_gk20a *vm, 39static void gv11b_subctx_commit_pdb(struct vm_gk20a *vm,
40 struct nvgpu_mem *inst_block); 40 struct nvgpu_mem *inst_block,
41 bool replayable);
41 42
42void gv11b_free_subctx_header(struct channel_gk20a *c) 43void gv11b_free_subctx_header(struct channel_gk20a *c)
43{ 44{
@@ -84,9 +85,10 @@ int gv11b_alloc_subctx_header(struct channel_gk20a *c)
84} 85}
85 86
86void gv11b_init_subcontext_pdb(struct vm_gk20a *vm, 87void gv11b_init_subcontext_pdb(struct vm_gk20a *vm,
87 struct nvgpu_mem *inst_block) 88 struct nvgpu_mem *inst_block,
89 bool replayable)
88{ 90{
89 gv11b_subctx_commit_pdb(vm, inst_block); 91 gv11b_subctx_commit_pdb(vm, inst_block, replayable);
90 gv11b_subctx_commit_valid_mask(vm, inst_block); 92 gv11b_subctx_commit_valid_mask(vm, inst_block);
91 93
92} 94}
@@ -157,8 +159,9 @@ void gv11b_subctx_commit_valid_mask(struct vm_gk20a *vm,
157 nvgpu_mem_wr32(g, inst_block, 167, 0xffffffff); 159 nvgpu_mem_wr32(g, inst_block, 167, 0xffffffff);
158} 160}
159 161
160void gv11b_subctx_commit_pdb(struct vm_gk20a *vm, 162static void gv11b_subctx_commit_pdb(struct vm_gk20a *vm,
161 struct nvgpu_mem *inst_block) 163 struct nvgpu_mem *inst_block,
164 bool replayable)
162{ 165{
163 struct gk20a *g = gk20a_from_vm(vm); 166 struct gk20a *g = gk20a_from_vm(vm);
164 u32 lo, hi; 167 u32 lo, hi;
@@ -179,11 +182,16 @@ void gv11b_subctx_commit_pdb(struct vm_gk20a *vm,
179 aperture, 0) | 182 aperture, 0) |
180 ram_in_sc_page_dir_base_vol_f( 183 ram_in_sc_page_dir_base_vol_f(
181 ram_in_sc_page_dir_base_vol_true_v(), 0) | 184 ram_in_sc_page_dir_base_vol_true_v(), 0) |
182 ram_in_sc_page_dir_base_fault_replay_tex_f(1, 0) |
183 ram_in_sc_page_dir_base_fault_replay_gcc_f(1, 0) |
184 ram_in_sc_use_ver2_pt_format_f(1, 0) | 185 ram_in_sc_use_ver2_pt_format_f(1, 0) |
185 ram_in_sc_big_page_size_f(1, 0) | 186 ram_in_sc_big_page_size_f(1, 0) |
186 ram_in_sc_page_dir_base_lo_0_f(pdb_addr_lo); 187 ram_in_sc_page_dir_base_lo_0_f(pdb_addr_lo);
188
189 if (replayable) {
190 format_word |=
191 ram_in_sc_page_dir_base_fault_replay_tex_f(1, 0) |
192 ram_in_sc_page_dir_base_fault_replay_gcc_f(1, 0);
193 }
194
187 nvgpu_log(g, gpu_dbg_info, " pdb info lo %x hi %x", 195 nvgpu_log(g, gpu_dbg_info, " pdb info lo %x hi %x",
188 format_word, pdb_addr_hi); 196 format_word, pdb_addr_hi);
189 for (subctx_id = 0; subctx_id < max_subctx_count; subctx_id++) { 197 for (subctx_id = 0; subctx_id < max_subctx_count; subctx_id++) {