summaryrefslogtreecommitdiffstats
path: root/drivers/gpu/nvgpu/gk20a/fifo_gk20a.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/nvgpu/gk20a/fifo_gk20a.c')
-rw-r--r--drivers/gpu/nvgpu/gk20a/fifo_gk20a.c58
1 files changed, 29 insertions, 29 deletions
diff --git a/drivers/gpu/nvgpu/gk20a/fifo_gk20a.c b/drivers/gpu/nvgpu/gk20a/fifo_gk20a.c
index b2a6b1a0..b8b0c9b0 100644
--- a/drivers/gpu/nvgpu/gk20a/fifo_gk20a.c
+++ b/drivers/gpu/nvgpu/gk20a/fifo_gk20a.c
@@ -3141,7 +3141,7 @@ static int gk20a_fifo_update_runlist_locked(struct gk20a *g, u32 runlist_id,
3141 if (count != 0) { 3141 if (count != 0) {
3142 gk20a_writel(g, fifo_runlist_base_r(), 3142 gk20a_writel(g, fifo_runlist_base_r(),
3143 fifo_runlist_base_ptr_f(u64_lo32(runlist_iova >> 12)) | 3143 fifo_runlist_base_ptr_f(u64_lo32(runlist_iova >> 12)) |
3144 gk20a_aperture_mask(g, &runlist->mem[new_buf], 3144 nvgpu_aperture_mask(g, &runlist->mem[new_buf],
3145 fifo_runlist_base_target_sys_mem_ncoh_f(), 3145 fifo_runlist_base_target_sys_mem_ncoh_f(),
3146 fifo_runlist_base_target_vid_mem_f())); 3146 fifo_runlist_base_target_vid_mem_f()));
3147 } 3147 }
@@ -3901,7 +3901,7 @@ static void gk20a_fifo_channel_bind(struct channel_gk20a *c)
3901 3901
3902 gk20a_writel(g, ccsr_channel_inst_r(c->hw_chid), 3902 gk20a_writel(g, ccsr_channel_inst_r(c->hw_chid),
3903 ccsr_channel_inst_ptr_f(inst_ptr) | 3903 ccsr_channel_inst_ptr_f(inst_ptr) |
3904 gk20a_aperture_mask(g, &c->inst_block, 3904 nvgpu_aperture_mask(g, &c->inst_block,
3905 ccsr_channel_inst_target_sys_mem_ncoh_f(), 3905 ccsr_channel_inst_target_sys_mem_ncoh_f(),
3906 ccsr_channel_inst_target_vid_mem_f()) | 3906 ccsr_channel_inst_target_vid_mem_f()) |
3907 ccsr_channel_inst_bind_true_f()); 3907 ccsr_channel_inst_bind_true_f());
@@ -3943,14 +3943,14 @@ static int gk20a_fifo_commit_userd(struct channel_gk20a *c)
3943 gk20a_dbg_info("channel %d : set ramfc userd 0x%16llx", 3943 gk20a_dbg_info("channel %d : set ramfc userd 0x%16llx",
3944 c->hw_chid, (u64)c->userd_iova); 3944 c->hw_chid, (u64)c->userd_iova);
3945 3945
3946 gk20a_mem_wr32(g, &c->inst_block, 3946 nvgpu_mem_wr32(g, &c->inst_block,
3947 ram_in_ramfc_w() + ram_fc_userd_w(), 3947 ram_in_ramfc_w() + ram_fc_userd_w(),
3948 gk20a_aperture_mask(g, &g->fifo.userd, 3948 nvgpu_aperture_mask(g, &g->fifo.userd,
3949 pbdma_userd_target_sys_mem_ncoh_f(), 3949 pbdma_userd_target_sys_mem_ncoh_f(),
3950 pbdma_userd_target_vid_mem_f()) | 3950 pbdma_userd_target_vid_mem_f()) |
3951 pbdma_userd_addr_f(addr_lo)); 3951 pbdma_userd_addr_f(addr_lo));
3952 3952
3953 gk20a_mem_wr32(g, &c->inst_block, 3953 nvgpu_mem_wr32(g, &c->inst_block,
3954 ram_in_ramfc_w() + ram_fc_userd_hi_w(), 3954 ram_in_ramfc_w() + ram_fc_userd_hi_w(),
3955 pbdma_userd_hi_addr_f(addr_hi)); 3955 pbdma_userd_hi_addr_f(addr_hi));
3956 3956
@@ -3967,25 +3967,25 @@ int gk20a_fifo_setup_ramfc(struct channel_gk20a *c,
3967 3967
3968 gk20a_dbg_fn(""); 3968 gk20a_dbg_fn("");
3969 3969
3970 gk20a_memset(g, mem, 0, 0, ram_fc_size_val_v()); 3970 nvgpu_memset(g, mem, 0, 0, ram_fc_size_val_v());
3971 3971
3972 gk20a_mem_wr32(g, mem, ram_fc_gp_base_w(), 3972 nvgpu_mem_wr32(g, mem, ram_fc_gp_base_w(),
3973 pbdma_gp_base_offset_f( 3973 pbdma_gp_base_offset_f(
3974 u64_lo32(gpfifo_base >> pbdma_gp_base_rsvd_s()))); 3974 u64_lo32(gpfifo_base >> pbdma_gp_base_rsvd_s())));
3975 3975
3976 gk20a_mem_wr32(g, mem, ram_fc_gp_base_hi_w(), 3976 nvgpu_mem_wr32(g, mem, ram_fc_gp_base_hi_w(),
3977 pbdma_gp_base_hi_offset_f(u64_hi32(gpfifo_base)) | 3977 pbdma_gp_base_hi_offset_f(u64_hi32(gpfifo_base)) |
3978 pbdma_gp_base_hi_limit2_f(ilog2(gpfifo_entries))); 3978 pbdma_gp_base_hi_limit2_f(ilog2(gpfifo_entries)));
3979 3979
3980 gk20a_mem_wr32(g, mem, ram_fc_signature_w(), 3980 nvgpu_mem_wr32(g, mem, ram_fc_signature_w(),
3981 c->g->ops.fifo.get_pbdma_signature(c->g)); 3981 c->g->ops.fifo.get_pbdma_signature(c->g));
3982 3982
3983 gk20a_mem_wr32(g, mem, ram_fc_formats_w(), 3983 nvgpu_mem_wr32(g, mem, ram_fc_formats_w(),
3984 pbdma_formats_gp_fermi0_f() | 3984 pbdma_formats_gp_fermi0_f() |
3985 pbdma_formats_pb_fermi1_f() | 3985 pbdma_formats_pb_fermi1_f() |
3986 pbdma_formats_mp_fermi0_f()); 3986 pbdma_formats_mp_fermi0_f());
3987 3987
3988 gk20a_mem_wr32(g, mem, ram_fc_pb_header_w(), 3988 nvgpu_mem_wr32(g, mem, ram_fc_pb_header_w(),
3989 pbdma_pb_header_priv_user_f() | 3989 pbdma_pb_header_priv_user_f() |
3990 pbdma_pb_header_method_zero_f() | 3990 pbdma_pb_header_method_zero_f() |
3991 pbdma_pb_header_subchannel_zero_f() | 3991 pbdma_pb_header_subchannel_zero_f() |
@@ -3993,27 +3993,27 @@ int gk20a_fifo_setup_ramfc(struct channel_gk20a *c,
3993 pbdma_pb_header_first_true_f() | 3993 pbdma_pb_header_first_true_f() |
3994 pbdma_pb_header_type_inc_f()); 3994 pbdma_pb_header_type_inc_f());
3995 3995
3996 gk20a_mem_wr32(g, mem, ram_fc_subdevice_w(), 3996 nvgpu_mem_wr32(g, mem, ram_fc_subdevice_w(),
3997 pbdma_subdevice_id_f(1) | 3997 pbdma_subdevice_id_f(1) |
3998 pbdma_subdevice_status_active_f() | 3998 pbdma_subdevice_status_active_f() |
3999 pbdma_subdevice_channel_dma_enable_f()); 3999 pbdma_subdevice_channel_dma_enable_f());
4000 4000
4001 gk20a_mem_wr32(g, mem, ram_fc_target_w(), pbdma_target_engine_sw_f()); 4001 nvgpu_mem_wr32(g, mem, ram_fc_target_w(), pbdma_target_engine_sw_f());
4002 4002
4003 gk20a_mem_wr32(g, mem, ram_fc_acquire_w(), 4003 nvgpu_mem_wr32(g, mem, ram_fc_acquire_w(),
4004 g->ops.fifo.pbdma_acquire_val(timeout)); 4004 g->ops.fifo.pbdma_acquire_val(timeout));
4005 4005
4006 gk20a_mem_wr32(g, mem, ram_fc_runlist_timeslice_w(), 4006 nvgpu_mem_wr32(g, mem, ram_fc_runlist_timeslice_w(),
4007 fifo_runlist_timeslice_timeout_128_f() | 4007 fifo_runlist_timeslice_timeout_128_f() |
4008 fifo_runlist_timeslice_timescale_3_f() | 4008 fifo_runlist_timeslice_timescale_3_f() |
4009 fifo_runlist_timeslice_enable_true_f()); 4009 fifo_runlist_timeslice_enable_true_f());
4010 4010
4011 gk20a_mem_wr32(g, mem, ram_fc_pb_timeslice_w(), 4011 nvgpu_mem_wr32(g, mem, ram_fc_pb_timeslice_w(),
4012 fifo_pb_timeslice_timeout_16_f() | 4012 fifo_pb_timeslice_timeout_16_f() |
4013 fifo_pb_timeslice_timescale_0_f() | 4013 fifo_pb_timeslice_timescale_0_f() |
4014 fifo_pb_timeslice_enable_true_f()); 4014 fifo_pb_timeslice_enable_true_f());
4015 4015
4016 gk20a_mem_wr32(g, mem, ram_fc_chid_w(), ram_fc_chid_id_f(c->hw_chid)); 4016 nvgpu_mem_wr32(g, mem, ram_fc_chid_w(), ram_fc_chid_id_f(c->hw_chid));
4017 4017
4018 if (c->is_privileged_channel) 4018 if (c->is_privileged_channel)
4019 gk20a_fifo_setup_ramfc_for_privileged_channel(c); 4019 gk20a_fifo_setup_ramfc_for_privileged_channel(c);
@@ -4035,7 +4035,7 @@ static int channel_gk20a_set_schedule_params(struct channel_gk20a *c)
4035 WARN_ON(c->g->ops.fifo.preempt_channel(c->g, c->hw_chid)); 4035 WARN_ON(c->g->ops.fifo.preempt_channel(c->g, c->hw_chid));
4036 4036
4037 /* set new timeslice */ 4037 /* set new timeslice */
4038 gk20a_mem_wr32(c->g, &c->inst_block, ram_fc_runlist_timeslice_w(), 4038 nvgpu_mem_wr32(c->g, &c->inst_block, ram_fc_runlist_timeslice_w(),
4039 value | (shift << 12) | 4039 value | (shift << 12) |
4040 fifo_runlist_timeslice_enable_true_f()); 4040 fifo_runlist_timeslice_enable_true_f());
4041 4041
@@ -4102,7 +4102,7 @@ void gk20a_fifo_setup_ramfc_for_privileged_channel(struct channel_gk20a *c)
4102 gk20a_dbg_info("channel %d : set ramfc privileged_channel", c->hw_chid); 4102 gk20a_dbg_info("channel %d : set ramfc privileged_channel", c->hw_chid);
4103 4103
4104 /* Enable HCE priv mode for phys mode transfer */ 4104 /* Enable HCE priv mode for phys mode transfer */
4105 gk20a_mem_wr32(g, mem, ram_fc_hce_ctrl_w(), 4105 nvgpu_mem_wr32(g, mem, ram_fc_hce_ctrl_w(),
4106 pbdma_hce_ctrl_hce_priv_mode_yes_f()); 4106 pbdma_hce_ctrl_hce_priv_mode_yes_f());
4107} 4107}
4108 4108
@@ -4114,16 +4114,16 @@ int gk20a_fifo_setup_userd(struct channel_gk20a *c)
4114 4114
4115 gk20a_dbg_fn(""); 4115 gk20a_dbg_fn("");
4116 4116
4117 gk20a_mem_wr32(g, mem, offset + ram_userd_put_w(), 0); 4117 nvgpu_mem_wr32(g, mem, offset + ram_userd_put_w(), 0);
4118 gk20a_mem_wr32(g, mem, offset + ram_userd_get_w(), 0); 4118 nvgpu_mem_wr32(g, mem, offset + ram_userd_get_w(), 0);
4119 gk20a_mem_wr32(g, mem, offset + ram_userd_ref_w(), 0); 4119 nvgpu_mem_wr32(g, mem, offset + ram_userd_ref_w(), 0);
4120 gk20a_mem_wr32(g, mem, offset + ram_userd_put_hi_w(), 0); 4120 nvgpu_mem_wr32(g, mem, offset + ram_userd_put_hi_w(), 0);
4121 gk20a_mem_wr32(g, mem, offset + ram_userd_ref_threshold_w(), 0); 4121 nvgpu_mem_wr32(g, mem, offset + ram_userd_ref_threshold_w(), 0);
4122 gk20a_mem_wr32(g, mem, offset + ram_userd_gp_top_level_get_w(), 0); 4122 nvgpu_mem_wr32(g, mem, offset + ram_userd_gp_top_level_get_w(), 0);
4123 gk20a_mem_wr32(g, mem, offset + ram_userd_gp_top_level_get_hi_w(), 0); 4123 nvgpu_mem_wr32(g, mem, offset + ram_userd_gp_top_level_get_hi_w(), 0);
4124 gk20a_mem_wr32(g, mem, offset + ram_userd_get_hi_w(), 0); 4124 nvgpu_mem_wr32(g, mem, offset + ram_userd_get_hi_w(), 0);
4125 gk20a_mem_wr32(g, mem, offset + ram_userd_gp_get_w(), 0); 4125 nvgpu_mem_wr32(g, mem, offset + ram_userd_gp_get_w(), 0);
4126 gk20a_mem_wr32(g, mem, offset + ram_userd_gp_put_w(), 0); 4126 nvgpu_mem_wr32(g, mem, offset + ram_userd_gp_put_w(), 0);
4127 4127
4128 return 0; 4128 return 0;
4129} 4129}