summaryrefslogtreecommitdiffstats
path: root/drivers/gpu/nvgpu/gv11b
diff options
context:
space:
mode:
authorVinod G <vinodg@nvidia.com>2018-08-29 15:32:25 -0400
committermobile promotions <svcmobile_promotions@nvidia.com>2018-08-31 21:57:23 -0400
commitf187e0bf442c3b0a08c46b21196f06a18c8220a0 (patch)
tree48820c076f6ab4a2bad6ab6053d26293c99326c3 /drivers/gpu/nvgpu/gv11b
parentb25d5d86caa049201ddcea77cf1a733a85090698 (diff)
gpu: nvgpu: Move SM_MASK_TYPE setting to TSG level
Moved the SM_MASK_TYPE variable from GR to TSG struct. SM error registers are context based. In dbg_session IOCTL to SET_SM_MASK_TYPE, kernel code iterate the TSG associated with first channel and set the mask_type to that context. Bug 200412641 Change-Id: Ic91944037ad2447f403b4803d5266ae6250ba4c9 Signed-off-by: Vinod G <vinodg@nvidia.com> Reviewed-on: https://git-master.nvidia.com/r/1809322 Reviewed-by: svc-misra-checker <svc-misra-checker@nvidia.com> GVS: Gerrit_Virtual_Submit Reviewed-by: Terje Bergstrom <tbergstrom@nvidia.com> Reviewed-by: mobile promotions <svcmobile_promotions@nvidia.com> Tested-by: mobile promotions <svcmobile_promotions@nvidia.com>
Diffstat (limited to 'drivers/gpu/nvgpu/gv11b')
-rw-r--r--drivers/gpu/nvgpu/gv11b/gr_gv11b.c37
1 files changed, 19 insertions, 18 deletions
diff --git a/drivers/gpu/nvgpu/gv11b/gr_gv11b.c b/drivers/gpu/nvgpu/gv11b/gr_gv11b.c
index 9a6afa3e..aeb49982 100644
--- a/drivers/gpu/nvgpu/gv11b/gr_gv11b.c
+++ b/drivers/gpu/nvgpu/gv11b/gr_gv11b.c
@@ -2239,7 +2239,7 @@ static int gr_gv11b_handle_warp_esr_error_mmu_nack(struct gk20a *g,
2239static bool gr_gv11b_check_warp_esr_error(struct gk20a *g, u32 warp_esr_error) 2239static bool gr_gv11b_check_warp_esr_error(struct gk20a *g, u32 warp_esr_error)
2240{ 2240{
2241 u32 index = 0U; 2241 u32 index = 0U;
2242 u32 esr_err = gr_gpc0_tpc0_sm0_hww_warp_esr_error_none_f(); 2242 bool esr_err = false;
2243 2243
2244 struct warp_esr_error_table_s { 2244 struct warp_esr_error_table_s {
2245 u32 error_value; 2245 u32 error_value;
@@ -2285,7 +2285,7 @@ static bool gr_gv11b_check_warp_esr_error(struct gk20a *g, u32 warp_esr_error)
2285 2285
2286 for (index = 0; index < ARRAY_SIZE(warp_esr_error_table); index++) { 2286 for (index = 0; index < ARRAY_SIZE(warp_esr_error_table); index++) {
2287 if (warp_esr_error_table[index].error_value == warp_esr_error) { 2287 if (warp_esr_error_table[index].error_value == warp_esr_error) {
2288 esr_err = warp_esr_error_table[index].error_value; 2288 esr_err = true;
2289 nvgpu_log(g, gpu_dbg_fn | gpu_dbg_gpu_dbg, 2289 nvgpu_log(g, gpu_dbg_fn | gpu_dbg_gpu_dbg,
2290 "WARP_ESR %s(0x%x)", 2290 "WARP_ESR %s(0x%x)",
2291 warp_esr_error_table[index].error_name, 2291 warp_esr_error_table[index].error_name,
@@ -2294,8 +2294,9 @@ static bool gr_gv11b_check_warp_esr_error(struct gk20a *g, u32 warp_esr_error)
2294 } 2294 }
2295 } 2295 }
2296 2296
2297 return (esr_err == 0U) ? false : true; 2297 return esr_err;
2298} 2298}
2299
2299static int gr_gv11b_handle_all_warp_esr_errors(struct gk20a *g, 2300static int gr_gv11b_handle_all_warp_esr_errors(struct gk20a *g,
2300 u32 gpc, u32 tpc, u32 sm, 2301 u32 gpc, u32 tpc, u32 sm,
2301 u32 warp_esr_error, 2302 u32 warp_esr_error,
@@ -2316,24 +2317,24 @@ static int gr_gv11b_handle_all_warp_esr_errors(struct gk20a *g,
2316 return 0; 2317 return 0;
2317 } 2318 }
2318 2319
2319 /*
2320 * Check SET_EXCEPTION_TYPE_MASK is being set.
2321 * If set, skip the recovery and trigger CILP
2322 * If not set, trigger the recovery.
2323 */
2324 if ((g->gr.sm_exception_mask_type &
2325 NVGPU_SM_EXCEPTION_TYPE_MASK_FATAL) ==
2326 NVGPU_SM_EXCEPTION_TYPE_MASK_FATAL) {
2327 nvgpu_log(g, gpu_dbg_fn | gpu_dbg_gpu_dbg,
2328 "SM Exception Type Mask set %d,"
2329 "skip recovery",
2330 g->gr.sm_exception_mask_type);
2331 return 0;
2332 }
2333
2334 if (fault_ch) { 2320 if (fault_ch) {
2335 tsg = &g->fifo.tsg[fault_ch->tsgid]; 2321 tsg = &g->fifo.tsg[fault_ch->tsgid];
2336 2322
2323 /*
2324 * Check SET_EXCEPTION_TYPE_MASK is being set.
2325 * If set, skip the recovery and trigger CILP
2326 * If not set, trigger the recovery.
2327 */
2328 if ((tsg->sm_exception_mask_type &
2329 NVGPU_SM_EXCEPTION_TYPE_MASK_FATAL) ==
2330 NVGPU_SM_EXCEPTION_TYPE_MASK_FATAL) {
2331 nvgpu_log(g, gpu_dbg_fn | gpu_dbg_gpu_dbg,
2332 "SM Exception Type Mask set %d,"
2333 "skip recovery",
2334 tsg->sm_exception_mask_type);
2335 return 0;
2336 }
2337
2337 nvgpu_rwsem_down_read(&tsg->ch_list_lock); 2338 nvgpu_rwsem_down_read(&tsg->ch_list_lock);
2338 nvgpu_list_for_each_entry(ch_tsg, &tsg->ch_list, 2339 nvgpu_list_for_each_entry(ch_tsg, &tsg->ch_list,
2339 channel_gk20a, ch_entry) { 2340 channel_gk20a, ch_entry) {