summaryrefslogtreecommitdiffstats
path: root/drivers/gpu/nvgpu/gk20a/fifo_gk20a.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/nvgpu/gk20a/fifo_gk20a.c')
-rw-r--r--drivers/gpu/nvgpu/gk20a/fifo_gk20a.c19
1 files changed, 10 insertions, 9 deletions
diff --git a/drivers/gpu/nvgpu/gk20a/fifo_gk20a.c b/drivers/gpu/nvgpu/gk20a/fifo_gk20a.c
index 79aec7a2..576a7f81 100644
--- a/drivers/gpu/nvgpu/gk20a/fifo_gk20a.c
+++ b/drivers/gpu/nvgpu/gk20a/fifo_gk20a.c
@@ -1393,7 +1393,7 @@ void gk20a_fifo_set_ctx_mmu_error_ch(struct gk20a *g,
1393{ 1393{
1394 nvgpu_err(g, 1394 nvgpu_err(g,
1395 "channel %d generated a mmu fault", refch->chid); 1395 "channel %d generated a mmu fault", refch->chid);
1396 nvgpu_set_error_notifier(refch, 1396 g->ops.fifo.set_error_notifier(refch,
1397 NVGPU_ERR_NOTIFIER_FIFO_ERROR_MMU_ERR_FLT); 1397 NVGPU_ERR_NOTIFIER_FIFO_ERROR_MMU_ERR_FLT);
1398} 1398}
1399 1399
@@ -1938,7 +1938,8 @@ int gk20a_fifo_force_reset_ch(struct channel_gk20a *ch,
1938 nvgpu_list_for_each_entry(ch_tsg, &tsg->ch_list, 1938 nvgpu_list_for_each_entry(ch_tsg, &tsg->ch_list,
1939 channel_gk20a, ch_entry) { 1939 channel_gk20a, ch_entry) {
1940 if (gk20a_channel_get(ch_tsg)) { 1940 if (gk20a_channel_get(ch_tsg)) {
1941 nvgpu_set_error_notifier(ch_tsg, err_code); 1941 g->ops.fifo.set_error_notifier(ch_tsg,
1942 err_code);
1942 gk20a_channel_put(ch_tsg); 1943 gk20a_channel_put(ch_tsg);
1943 } 1944 }
1944 } 1945 }
@@ -1946,7 +1947,7 @@ int gk20a_fifo_force_reset_ch(struct channel_gk20a *ch,
1946 nvgpu_rwsem_up_read(&tsg->ch_list_lock); 1947 nvgpu_rwsem_up_read(&tsg->ch_list_lock);
1947 gk20a_fifo_recover_tsg(g, ch->tsgid, verbose); 1948 gk20a_fifo_recover_tsg(g, ch->tsgid, verbose);
1948 } else { 1949 } else {
1949 nvgpu_set_error_notifier(ch, err_code); 1950 g->ops.fifo.set_error_notifier(ch, err_code);
1950 gk20a_fifo_recover_ch(g, ch->chid, verbose); 1951 gk20a_fifo_recover_ch(g, ch->chid, verbose);
1951 } 1952 }
1952 1953
@@ -2108,7 +2109,7 @@ bool gk20a_fifo_check_ch_ctxsw_timeout(struct channel_gk20a *ch,
2108 *verbose = ch->timeout_debug_dump; 2109 *verbose = ch->timeout_debug_dump;
2109 *ms = ch->timeout_accumulated_ms; 2110 *ms = ch->timeout_accumulated_ms;
2110 if (recover) 2111 if (recover)
2111 nvgpu_set_error_notifier(ch, 2112 ch->g->ops.fifo.set_error_notifier(ch,
2112 NVGPU_ERR_NOTIFIER_FIFO_ERROR_IDLE_TIMEOUT); 2113 NVGPU_ERR_NOTIFIER_FIFO_ERROR_IDLE_TIMEOUT);
2113 2114
2114 gk20a_channel_put(ch); 2115 gk20a_channel_put(ch);
@@ -2172,7 +2173,7 @@ bool gk20a_fifo_check_tsg_ctxsw_timeout(struct tsg_gk20a *tsg,
2172 nvgpu_list_for_each_entry(ch, &tsg->ch_list, 2173 nvgpu_list_for_each_entry(ch, &tsg->ch_list,
2173 channel_gk20a, ch_entry) { 2174 channel_gk20a, ch_entry) {
2174 if (gk20a_channel_get(ch)) { 2175 if (gk20a_channel_get(ch)) {
2175 nvgpu_set_error_notifier(ch, 2176 ch->g->ops.fifo.set_error_notifier(ch,
2176 NVGPU_ERR_NOTIFIER_FIFO_ERROR_IDLE_TIMEOUT); 2177 NVGPU_ERR_NOTIFIER_FIFO_ERROR_IDLE_TIMEOUT);
2177 *verbose |= ch->timeout_debug_dump; 2178 *verbose |= ch->timeout_debug_dump;
2178 gk20a_channel_put(ch); 2179 gk20a_channel_put(ch);
@@ -2487,7 +2488,7 @@ static void gk20a_fifo_pbdma_fault_rc(struct gk20a *g,
2487 struct channel_gk20a *ch = &f->channel[id]; 2488 struct channel_gk20a *ch = &f->channel[id];
2488 2489
2489 if (gk20a_channel_get(ch)) { 2490 if (gk20a_channel_get(ch)) {
2490 nvgpu_set_error_notifier(ch, error_notifier); 2491 g->ops.fifo.set_error_notifier(ch, error_notifier);
2491 gk20a_fifo_recover_ch(g, id, true); 2492 gk20a_fifo_recover_ch(g, id, true);
2492 gk20a_channel_put(ch); 2493 gk20a_channel_put(ch);
2493 } 2494 }
@@ -2500,7 +2501,7 @@ static void gk20a_fifo_pbdma_fault_rc(struct gk20a *g,
2500 nvgpu_list_for_each_entry(ch, &tsg->ch_list, 2501 nvgpu_list_for_each_entry(ch, &tsg->ch_list,
2501 channel_gk20a, ch_entry) { 2502 channel_gk20a, ch_entry) {
2502 if (gk20a_channel_get(ch)) { 2503 if (gk20a_channel_get(ch)) {
2503 nvgpu_set_error_notifier(ch, 2504 g->ops.fifo.set_error_notifier(ch,
2504 error_notifier); 2505 error_notifier);
2505 gk20a_channel_put(ch); 2506 gk20a_channel_put(ch);
2506 } 2507 }
@@ -2662,7 +2663,7 @@ void __locked_fifo_preempt_timeout_rc(struct gk20a *g, u32 id,
2662 channel_gk20a, ch_entry) { 2663 channel_gk20a, ch_entry) {
2663 if (!gk20a_channel_get(ch)) 2664 if (!gk20a_channel_get(ch))
2664 continue; 2665 continue;
2665 nvgpu_set_error_notifier(ch, 2666 g->ops.fifo.set_error_notifier(ch,
2666 NVGPU_ERR_NOTIFIER_FIFO_ERROR_IDLE_TIMEOUT); 2667 NVGPU_ERR_NOTIFIER_FIFO_ERROR_IDLE_TIMEOUT);
2667 gk20a_channel_put(ch); 2668 gk20a_channel_put(ch);
2668 } 2669 }
@@ -2675,7 +2676,7 @@ void __locked_fifo_preempt_timeout_rc(struct gk20a *g, u32 id,
2675 "preempt channel %d timeout", id); 2676 "preempt channel %d timeout", id);
2676 2677
2677 if (gk20a_channel_get(ch)) { 2678 if (gk20a_channel_get(ch)) {
2678 nvgpu_set_error_notifier(ch, 2679 g->ops.fifo.set_error_notifier(ch,
2679 NVGPU_ERR_NOTIFIER_FIFO_ERROR_IDLE_TIMEOUT); 2680 NVGPU_ERR_NOTIFIER_FIFO_ERROR_IDLE_TIMEOUT);
2680 gk20a_fifo_recover_ch(g, id, true); 2681 gk20a_fifo_recover_ch(g, id, true);
2681 gk20a_channel_put(ch); 2682 gk20a_channel_put(ch);