summaryrefslogtreecommitdiffstats
path: root/drivers/gpu/nvgpu/gk20a/fifo_gk20a.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/nvgpu/gk20a/fifo_gk20a.c')
-rw-r--r--drivers/gpu/nvgpu/gk20a/fifo_gk20a.c195
1 files changed, 0 insertions, 195 deletions
diff --git a/drivers/gpu/nvgpu/gk20a/fifo_gk20a.c b/drivers/gpu/nvgpu/gk20a/fifo_gk20a.c
index 17f3743f..31b470d4 100644
--- a/drivers/gpu/nvgpu/gk20a/fifo_gk20a.c
+++ b/drivers/gpu/nvgpu/gk20a/fifo_gk20a.c
@@ -822,51 +822,6 @@ int gk20a_init_fifo_reset_enable_hw(struct gk20a *g)
822 return 0; 822 return 0;
823} 823}
824 824
825static void gk20a_fifo_init_pbdma_intr_descs(struct fifo_gk20a *f)
826{
827 /*
828 * These are all errors which indicate something really wrong
829 * going on in the device
830 */
831 f->intr.pbdma.device_fatal_0 =
832 pbdma_intr_0_memreq_pending_f() |
833 pbdma_intr_0_memack_timeout_pending_f() |
834 pbdma_intr_0_memack_extra_pending_f() |
835 pbdma_intr_0_memdat_timeout_pending_f() |
836 pbdma_intr_0_memdat_extra_pending_f() |
837 pbdma_intr_0_memflush_pending_f() |
838 pbdma_intr_0_memop_pending_f() |
839 pbdma_intr_0_lbconnect_pending_f() |
840 pbdma_intr_0_lback_timeout_pending_f() |
841 pbdma_intr_0_lback_extra_pending_f() |
842 pbdma_intr_0_lbdat_timeout_pending_f() |
843 pbdma_intr_0_lbdat_extra_pending_f() |
844 pbdma_intr_0_xbarconnect_pending_f() |
845 pbdma_intr_0_pri_pending_f();
846
847 /*
848 * These are data parsing, framing errors or others which can be
849 * recovered from with intervention... or just resetting the
850 * channel
851 */
852 f->intr.pbdma.channel_fatal_0 =
853 pbdma_intr_0_gpfifo_pending_f() |
854 pbdma_intr_0_gpptr_pending_f() |
855 pbdma_intr_0_gpentry_pending_f() |
856 pbdma_intr_0_gpcrc_pending_f() |
857 pbdma_intr_0_pbptr_pending_f() |
858 pbdma_intr_0_pbentry_pending_f() |
859 pbdma_intr_0_pbcrc_pending_f() |
860 pbdma_intr_0_method_pending_f() |
861 pbdma_intr_0_methodcrc_pending_f() |
862 pbdma_intr_0_pbseg_pending_f() |
863 pbdma_intr_0_signature_pending_f();
864
865 /* Can be used for sw-methods, or represents a recoverable timeout. */
866 f->intr.pbdma.restartable_0 =
867 pbdma_intr_0_device_pending_f();
868}
869
870static int gk20a_init_fifo_setup_sw(struct gk20a *g) 825static int gk20a_init_fifo_setup_sw(struct gk20a *g)
871{ 826{
872 struct fifo_gk20a *f = &g->fifo; 827 struct fifo_gk20a *f = &g->fifo;
@@ -1722,46 +1677,6 @@ static void gk20a_fifo_get_faulty_id_type(struct gk20a *g, int engine_id,
1722 fifo_engine_status_id_type_v(status); 1677 fifo_engine_status_id_type_v(status);
1723} 1678}
1724 1679
1725static void gk20a_fifo_trigger_mmu_fault(struct gk20a *g,
1726 unsigned long engine_ids)
1727{
1728 struct nvgpu_timeout timeout;
1729 unsigned long delay = GR_IDLE_CHECK_DEFAULT;
1730 unsigned long engine_id;
1731
1732 /* trigger faults for all bad engines */
1733 for_each_set_bit(engine_id, &engine_ids, 32) {
1734 u32 mmu_id;
1735
1736 if (!gk20a_fifo_is_valid_engine_id(g, engine_id)) {
1737 WARN_ON(true);
1738 break;
1739 }
1740
1741 mmu_id = gk20a_engine_id_to_mmu_id(g, engine_id);
1742 if (mmu_id != FIFO_INVAL_ENGINE_ID)
1743 gk20a_writel(g, fifo_trigger_mmu_fault_r(engine_id),
1744 fifo_trigger_mmu_fault_id_f(mmu_id) |
1745 fifo_trigger_mmu_fault_enable_f(1));
1746 }
1747
1748 /* Wait for MMU fault to trigger */
1749 nvgpu_timeout_init(g, &timeout, gk20a_get_gr_idle_timeout(g),
1750 NVGPU_TIMER_CPU_TIMER);
1751 do {
1752 if (gk20a_readl(g, fifo_intr_0_r()) &
1753 fifo_intr_0_mmu_fault_pending_f())
1754 break;
1755
1756 nvgpu_usleep_range(delay, delay * 2);
1757 delay = min_t(u32, delay << 1, GR_IDLE_CHECK_MAX);
1758 } while (!nvgpu_timeout_expired_msg(&timeout, "mmu fault timeout"));
1759
1760 /* release mmu fault trigger */
1761 for_each_set_bit(engine_id, &engine_ids, 32)
1762 gk20a_writel(g, fifo_trigger_mmu_fault_r(engine_id), 0);
1763}
1764
1765static u32 gk20a_fifo_engines_on_id(struct gk20a *g, u32 id, bool is_tsg) 1680static u32 gk20a_fifo_engines_on_id(struct gk20a *g, u32 id, bool is_tsg)
1766{ 1681{
1767 unsigned int i; 1682 unsigned int i;
@@ -3406,22 +3321,6 @@ int gk20a_fifo_wait_engine_idle(struct gk20a *g)
3406 return ret; 3321 return ret;
3407} 3322}
3408 3323
3409static void gk20a_fifo_apply_pb_timeout(struct gk20a *g)
3410{
3411 u32 timeout;
3412
3413 if (nvgpu_platform_is_silicon(g)) {
3414 timeout = gk20a_readl(g, fifo_pb_timeout_r());
3415 timeout &= ~fifo_pb_timeout_detection_enabled_f();
3416 gk20a_writel(g, fifo_pb_timeout_r(), timeout);
3417 }
3418}
3419
3420static u32 gk20a_fifo_get_num_fifos(struct gk20a *g)
3421{
3422 return ccsr_channel__size_1_v();
3423}
3424
3425u32 gk20a_fifo_get_pbdma_signature(struct gk20a *g) 3324u32 gk20a_fifo_get_pbdma_signature(struct gk20a *g)
3426{ 3325{
3427 return pbdma_signature_hw_valid_f() | pbdma_signature_sw_zero_f(); 3326 return pbdma_signature_hw_valid_f() | pbdma_signature_sw_zero_f();
@@ -3686,38 +3585,6 @@ void gk20a_fifo_disable_channel(struct channel_gk20a *ch)
3686 ccsr_channel_enable_clr_true_f()); 3585 ccsr_channel_enable_clr_true_f());
3687} 3586}
3688 3587
3689static void gk20a_fifo_channel_bind(struct channel_gk20a *c)
3690{
3691 struct gk20a *g = c->g;
3692 u32 inst_ptr = gk20a_mm_inst_block_addr(g, &c->inst_block) >>
3693 ram_in_base_shift_v();
3694
3695 gk20a_dbg_info("bind channel %d inst ptr 0x%08x",
3696 c->chid, inst_ptr);
3697
3698
3699 gk20a_writel(g, ccsr_channel_r(c->chid),
3700 (gk20a_readl(g, ccsr_channel_r(c->chid)) &
3701 ~ccsr_channel_runlist_f(~0)) |
3702 ccsr_channel_runlist_f(c->runlist_id));
3703
3704 gk20a_writel(g, ccsr_channel_inst_r(c->chid),
3705 ccsr_channel_inst_ptr_f(inst_ptr) |
3706 nvgpu_aperture_mask(g, &c->inst_block,
3707 ccsr_channel_inst_target_sys_mem_ncoh_f(),
3708 ccsr_channel_inst_target_vid_mem_f()) |
3709 ccsr_channel_inst_bind_true_f());
3710
3711 gk20a_writel(g, ccsr_channel_r(c->chid),
3712 (gk20a_readl(g, ccsr_channel_r(c->chid)) &
3713 ~ccsr_channel_enable_set_f(~0)) |
3714 ccsr_channel_enable_set_true_f());
3715
3716 wmb();
3717 atomic_set(&c->bound, true);
3718
3719}
3720
3721void gk20a_fifo_channel_unbind(struct channel_gk20a *ch_gk20a) 3588void gk20a_fifo_channel_unbind(struct channel_gk20a *ch_gk20a)
3722{ 3589{
3723 struct gk20a *g = ch_gk20a->g; 3590 struct gk20a *g = ch_gk20a->g;
@@ -4080,65 +3947,3 @@ int gk20a_fifo_alloc_syncpt_buf(struct channel_gk20a *c,
4080 return 0; 3947 return 0;
4081} 3948}
4082#endif 3949#endif
4083
4084
4085void gk20a_init_fifo(struct gpu_ops *gops)
4086{
4087 gops->fifo.disable_channel = gk20a_fifo_disable_channel;
4088 gops->fifo.enable_channel = gk20a_fifo_enable_channel;
4089 gops->fifo.bind_channel = gk20a_fifo_channel_bind;
4090 gops->fifo.unbind_channel = gk20a_fifo_channel_unbind;
4091 gops->fifo.init_fifo_setup_hw = gk20a_init_fifo_setup_hw;
4092 gops->fifo.preempt_channel = gk20a_fifo_preempt_channel;
4093 gops->fifo.preempt_tsg = gk20a_fifo_preempt_tsg;
4094 gops->fifo.update_runlist = gk20a_fifo_update_runlist;
4095 gops->fifo.trigger_mmu_fault = gk20a_fifo_trigger_mmu_fault;
4096 gops->fifo.get_mmu_fault_info = gk20a_fifo_get_mmu_fault_info;
4097 gops->fifo.apply_pb_timeout = gk20a_fifo_apply_pb_timeout;
4098 gops->fifo.wait_engine_idle = gk20a_fifo_wait_engine_idle;
4099 gops->fifo.get_num_fifos = gk20a_fifo_get_num_fifos;
4100 gops->fifo.get_pbdma_signature = gk20a_fifo_get_pbdma_signature;
4101 gops->fifo.set_runlist_interleave = gk20a_fifo_set_runlist_interleave;
4102 gops->fifo.tsg_set_timeslice = gk20a_fifo_tsg_set_timeslice;
4103 gops->fifo.force_reset_ch = gk20a_fifo_force_reset_ch;
4104 gops->fifo.engine_enum_from_type = gk20a_fifo_engine_enum_from_type;
4105 /* gk20a doesn't support device_info_data packet parsing */
4106 gops->fifo.device_info_data_parse = NULL;
4107 gops->fifo.eng_runlist_base_size = fifo_eng_runlist_base__size_1_v;
4108 gops->fifo.init_engine_info = gk20a_fifo_init_engine_info;
4109 gops->fifo.runlist_entry_size = ram_rl_entry_size_v;
4110 gops->fifo.get_tsg_runlist_entry = gk20a_get_tsg_runlist_entry;
4111 gops->fifo.get_ch_runlist_entry = gk20a_get_ch_runlist_entry;
4112 gops->fifo.is_fault_engine_subid_gpc = gk20a_is_fault_engine_subid_gpc;
4113 gops->fifo.dump_pbdma_status = gk20a_dump_pbdma_status;
4114 gops->fifo.dump_eng_status = gk20a_dump_eng_status;
4115 gops->fifo.dump_channel_status_ramfc = gk20a_dump_channel_status_ramfc;
4116 gops->fifo.intr_0_error_mask = gk20a_fifo_intr_0_error_mask;
4117 gops->fifo.is_preempt_pending = gk20a_fifo_is_preempt_pending;
4118 gops->fifo.init_pbdma_intr_descs = gk20a_fifo_init_pbdma_intr_descs;
4119 gops->fifo.reset_enable_hw = gk20a_init_fifo_reset_enable_hw;
4120 gops->fifo.setup_ramfc = gk20a_fifo_setup_ramfc;
4121 gops->fifo.channel_set_priority = gk20a_fifo_set_priority;
4122 gops->fifo.channel_set_timeslice = gk20a_fifo_set_timeslice;
4123 gops->fifo.alloc_inst = gk20a_fifo_alloc_inst;
4124 gops->fifo.free_inst = gk20a_fifo_free_inst;
4125 gops->fifo.setup_userd = gk20a_fifo_setup_userd;
4126 gops->fifo.userd_gp_get = gk20a_fifo_userd_gp_get;
4127 gops->fifo.userd_gp_put = gk20a_fifo_userd_gp_put;
4128 gops->fifo.userd_pb_get = gk20a_fifo_userd_pb_get;
4129 gops->fifo.pbdma_acquire_val = gk20a_fifo_pbdma_acquire_val;
4130 gops->fifo.teardown_ch_tsg = gk20a_fifo_teardown_ch_tsg;
4131 gops->fifo.handle_sched_error = gk20a_fifo_handle_sched_error;
4132 gops->fifo.handle_pbdma_intr_0 = gk20a_fifo_handle_pbdma_intr_0;
4133 gops->fifo.handle_pbdma_intr_1 = gk20a_fifo_handle_pbdma_intr_1;
4134#ifdef CONFIG_TEGRA_GK20A_NVHOST
4135 gops->fifo.alloc_syncpt_buf = gk20a_fifo_alloc_syncpt_buf;
4136 gops->fifo.free_syncpt_buf = gk20a_fifo_free_syncpt_buf;
4137 gops->fifo.add_syncpt_wait_cmd = gk20a_fifo_add_syncpt_wait_cmd;
4138 gops->fifo.get_syncpt_wait_cmd_size =
4139 gk20a_fifo_get_syncpt_wait_cmd_size;
4140 gops->fifo.add_syncpt_incr_cmd = gk20a_fifo_add_syncpt_incr_cmd;
4141 gops->fifo.get_syncpt_incr_cmd_size =
4142 gk20a_fifo_get_syncpt_incr_cmd_size;
4143#endif
4144}