summaryrefslogtreecommitdiffstats
path: root/drivers/gpu/nvgpu/gk20a
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/nvgpu/gk20a')
-rw-r--r--drivers/gpu/nvgpu/gk20a/fifo_gk20a.c39
-rw-r--r--drivers/gpu/nvgpu/gk20a/fifo_gk20a.h2
2 files changed, 25 insertions, 16 deletions
diff --git a/drivers/gpu/nvgpu/gk20a/fifo_gk20a.c b/drivers/gpu/nvgpu/gk20a/fifo_gk20a.c
index a1844a28..d6f1cb3a 100644
--- a/drivers/gpu/nvgpu/gk20a/fifo_gk20a.c
+++ b/drivers/gpu/nvgpu/gk20a/fifo_gk20a.c
@@ -1980,6 +1980,27 @@ void gk20a_fifo_recover_tsg(struct gk20a *g, struct tsg_gk20a *tsg,
1980 nvgpu_mutex_release(&g->dbg_sessions_lock); 1980 nvgpu_mutex_release(&g->dbg_sessions_lock);
1981} 1981}
1982 1982
1983void gk20a_fifo_teardown_mask_intr(struct gk20a *g)
1984{
1985 u32 val;
1986
1987 val = gk20a_readl(g, fifo_intr_en_0_r());
1988 val &= ~(fifo_intr_en_0_sched_error_m() |
1989 fifo_intr_en_0_mmu_fault_m());
1990 gk20a_writel(g, fifo_intr_en_0_r(), val);
1991 gk20a_writel(g, fifo_intr_0_r(), fifo_intr_0_sched_error_reset_f());
1992}
1993
1994void gk20a_fifo_teardown_unmask_intr(struct gk20a *g)
1995{
1996 u32 val;
1997
1998 val = gk20a_readl(g, fifo_intr_en_0_r());
1999 val |= fifo_intr_en_0_mmu_fault_f(1) | fifo_intr_en_0_sched_error_f(1);
2000 gk20a_writel(g, fifo_intr_en_0_r(), val);
2001
2002}
2003
1983void gk20a_fifo_teardown_ch_tsg(struct gk20a *g, u32 __engine_ids, 2004void gk20a_fifo_teardown_ch_tsg(struct gk20a *g, u32 __engine_ids,
1984 u32 hw_id, unsigned int id_type, unsigned int rc_type, 2005 u32 hw_id, unsigned int id_type, unsigned int rc_type,
1985 struct mmu_fault_info *mmfault) 2006 struct mmu_fault_info *mmfault)
@@ -1987,7 +2008,6 @@ void gk20a_fifo_teardown_ch_tsg(struct gk20a *g, u32 __engine_ids,
1987 unsigned long engine_id, i; 2008 unsigned long engine_id, i;
1988 unsigned long _engine_ids = __engine_ids; 2009 unsigned long _engine_ids = __engine_ids;
1989 unsigned long engine_ids = 0; 2010 unsigned long engine_ids = 0;
1990 u32 val;
1991 u32 mmu_fault_engines = 0; 2011 u32 mmu_fault_engines = 0;
1992 u32 ref_type; 2012 u32 ref_type;
1993 u32 ref_id; 2013 u32 ref_id;
@@ -2048,25 +2068,12 @@ void gk20a_fifo_teardown_ch_tsg(struct gk20a *g, u32 __engine_ids,
2048 } 2068 }
2049 2069
2050 if (mmu_fault_engines) { 2070 if (mmu_fault_engines) {
2051 /* 2071 g->ops.fifo.teardown_mask_intr(g);
2052 * sched error prevents recovery, and ctxsw error will retrigger
2053 * every 100ms. Disable the sched error to allow recovery.
2054 */
2055 val = gk20a_readl(g, fifo_intr_en_0_r());
2056 val &= ~(fifo_intr_en_0_sched_error_m() |
2057 fifo_intr_en_0_mmu_fault_m());
2058 gk20a_writel(g, fifo_intr_en_0_r(), val);
2059 gk20a_writel(g, fifo_intr_0_r(),
2060 fifo_intr_0_sched_error_reset_f());
2061
2062 g->ops.fifo.trigger_mmu_fault(g, engine_ids); 2072 g->ops.fifo.trigger_mmu_fault(g, engine_ids);
2063 gk20a_fifo_handle_mmu_fault_locked(g, mmu_fault_engines, ref_id, 2073 gk20a_fifo_handle_mmu_fault_locked(g, mmu_fault_engines, ref_id,
2064 ref_id_is_tsg); 2074 ref_id_is_tsg);
2065 2075
2066 val = gk20a_readl(g, fifo_intr_en_0_r()); 2076 g->ops.fifo.teardown_unmask_intr(g);
2067 val |= fifo_intr_en_0_mmu_fault_f(1)
2068 | fifo_intr_en_0_sched_error_f(1);
2069 gk20a_writel(g, fifo_intr_en_0_r(), val);
2070 } 2077 }
2071 2078
2072 nvgpu_log_info(g, "release runlist_lock for all runlists"); 2079 nvgpu_log_info(g, "release runlist_lock for all runlists");
diff --git a/drivers/gpu/nvgpu/gk20a/fifo_gk20a.h b/drivers/gpu/nvgpu/gk20a/fifo_gk20a.h
index 29c2f889..0c9d9101 100644
--- a/drivers/gpu/nvgpu/gk20a/fifo_gk20a.h
+++ b/drivers/gpu/nvgpu/gk20a/fifo_gk20a.h
@@ -426,6 +426,8 @@ bool gk20a_fifo_check_ch_ctxsw_timeout(struct channel_gk20a *ch,
426 bool *verbose, u32 *ms); 426 bool *verbose, u32 *ms);
427bool gk20a_fifo_check_tsg_ctxsw_timeout(struct tsg_gk20a *tsg, 427bool gk20a_fifo_check_tsg_ctxsw_timeout(struct tsg_gk20a *tsg,
428 bool *verbose, u32 *ms); 428 bool *verbose, u32 *ms);
429void gk20a_fifo_teardown_mask_intr(struct gk20a *g);
430void gk20a_fifo_teardown_unmask_intr(struct gk20a *g);
429bool gk20a_fifo_handle_sched_error(struct gk20a *g); 431bool gk20a_fifo_handle_sched_error(struct gk20a *g);
430 432
431void gk20a_fifo_reset_pbdma_method(struct gk20a *g, int pbdma_id, 433void gk20a_fifo_reset_pbdma_method(struct gk20a *g, int pbdma_id,