summaryrefslogtreecommitdiffstats
path: root/drivers/gpu/nvgpu/gp10b/fifo_gp10b.c
diff options
context:
space:
mode:
authorAlex Waterman <alexw@nvidia.com>2017-03-15 19:42:12 -0400
committermobile promotions <svcmobile_promotions@nvidia.com>2017-04-06 21:14:48 -0400
commitb69020bff5dfa69cad926c9374cdbe9a62509ffd (patch)
tree222f6b6bc23561a38004a257cbac401e431ff3be /drivers/gpu/nvgpu/gp10b/fifo_gp10b.c
parentfa4ecf5730a75269e85cc41c2ad2ee61307e72a9 (diff)
gpu: nvgpu: Rename gk20a_mem_* functions
Rename the functions used for mem_desc access to nvgpu_mem_*. JIRA NVGPU-12 Change-Id: Ibfdc1112d43f0a125e4487c250e3f977ffd2cd75 Signed-off-by: Alex Waterman <alexw@nvidia.com> Reviewed-on: http://git-master/r/1323325 Reviewed-by: mobile promotions <svcmobile_promotions@nvidia.com> Tested-by: mobile promotions <svcmobile_promotions@nvidia.com>
Diffstat (limited to 'drivers/gpu/nvgpu/gp10b/fifo_gp10b.c')
-rw-r--r--drivers/gpu/nvgpu/gp10b/fifo_gp10b.c40
1 files changed, 20 insertions, 20 deletions
diff --git a/drivers/gpu/nvgpu/gp10b/fifo_gp10b.c b/drivers/gpu/nvgpu/gp10b/fifo_gp10b.c
index 6f1a0298..3787662b 100644
--- a/drivers/gpu/nvgpu/gp10b/fifo_gp10b.c
+++ b/drivers/gpu/nvgpu/gp10b/fifo_gp10b.c
@@ -33,18 +33,18 @@ static void gp10b_set_pdb_fault_replay_flags(struct gk20a *g,
33 33
34 gk20a_dbg_fn(""); 34 gk20a_dbg_fn("");
35 35
36 val = gk20a_mem_rd32(g, mem, 36 val = nvgpu_mem_rd32(g, mem,
37 ram_in_page_dir_base_fault_replay_tex_w()); 37 ram_in_page_dir_base_fault_replay_tex_w());
38 val &= ~ram_in_page_dir_base_fault_replay_tex_m(); 38 val &= ~ram_in_page_dir_base_fault_replay_tex_m();
39 val |= ram_in_page_dir_base_fault_replay_tex_true_f(); 39 val |= ram_in_page_dir_base_fault_replay_tex_true_f();
40 gk20a_mem_wr32(g, mem, 40 nvgpu_mem_wr32(g, mem,
41 ram_in_page_dir_base_fault_replay_tex_w(), val); 41 ram_in_page_dir_base_fault_replay_tex_w(), val);
42 42
43 val = gk20a_mem_rd32(g, mem, 43 val = nvgpu_mem_rd32(g, mem,
44 ram_in_page_dir_base_fault_replay_gcc_w()); 44 ram_in_page_dir_base_fault_replay_gcc_w());
45 val &= ~ram_in_page_dir_base_fault_replay_gcc_m(); 45 val &= ~ram_in_page_dir_base_fault_replay_gcc_m();
46 val |= ram_in_page_dir_base_fault_replay_gcc_true_f(); 46 val |= ram_in_page_dir_base_fault_replay_gcc_true_f();
47 gk20a_mem_wr32(g, mem, 47 nvgpu_mem_wr32(g, mem,
48 ram_in_page_dir_base_fault_replay_gcc_w(), val); 48 ram_in_page_dir_base_fault_replay_gcc_w(), val);
49 49
50 gk20a_dbg_fn("done"); 50 gk20a_dbg_fn("done");
@@ -64,14 +64,14 @@ int channel_gp10b_commit_userd(struct channel_gk20a *c)
64 gk20a_dbg_info("channel %d : set ramfc userd 0x%16llx", 64 gk20a_dbg_info("channel %d : set ramfc userd 0x%16llx",
65 c->hw_chid, (u64)c->userd_iova); 65 c->hw_chid, (u64)c->userd_iova);
66 66
67 gk20a_mem_wr32(g, &c->inst_block, 67 nvgpu_mem_wr32(g, &c->inst_block,
68 ram_in_ramfc_w() + ram_fc_userd_w(), 68 ram_in_ramfc_w() + ram_fc_userd_w(),
69 (g->mm.vidmem_is_vidmem ? 69 (g->mm.vidmem_is_vidmem ?
70 pbdma_userd_target_sys_mem_ncoh_f() : 70 pbdma_userd_target_sys_mem_ncoh_f() :
71 pbdma_userd_target_vid_mem_f()) | 71 pbdma_userd_target_vid_mem_f()) |
72 pbdma_userd_addr_f(addr_lo)); 72 pbdma_userd_addr_f(addr_lo));
73 73
74 gk20a_mem_wr32(g, &c->inst_block, 74 nvgpu_mem_wr32(g, &c->inst_block,
75 ram_in_ramfc_w() + ram_fc_userd_hi_w(), 75 ram_in_ramfc_w() + ram_fc_userd_hi_w(),
76 pbdma_userd_hi_addr_f(addr_hi)); 76 pbdma_userd_hi_addr_f(addr_hi));
77 77
@@ -87,25 +87,25 @@ static int channel_gp10b_setup_ramfc(struct channel_gk20a *c,
87 87
88 gk20a_dbg_fn(""); 88 gk20a_dbg_fn("");
89 89
90 gk20a_memset(g, mem, 0, 0, ram_fc_size_val_v()); 90 nvgpu_memset(g, mem, 0, 0, ram_fc_size_val_v());
91 91
92 gk20a_mem_wr32(g, mem, ram_fc_gp_base_w(), 92 nvgpu_mem_wr32(g, mem, ram_fc_gp_base_w(),
93 pbdma_gp_base_offset_f( 93 pbdma_gp_base_offset_f(
94 u64_lo32(gpfifo_base >> pbdma_gp_base_rsvd_s()))); 94 u64_lo32(gpfifo_base >> pbdma_gp_base_rsvd_s())));
95 95
96 gk20a_mem_wr32(g, mem, ram_fc_gp_base_hi_w(), 96 nvgpu_mem_wr32(g, mem, ram_fc_gp_base_hi_w(),
97 pbdma_gp_base_hi_offset_f(u64_hi32(gpfifo_base)) | 97 pbdma_gp_base_hi_offset_f(u64_hi32(gpfifo_base)) |
98 pbdma_gp_base_hi_limit2_f(ilog2(gpfifo_entries))); 98 pbdma_gp_base_hi_limit2_f(ilog2(gpfifo_entries)));
99 99
100 gk20a_mem_wr32(g, mem, ram_fc_signature_w(), 100 nvgpu_mem_wr32(g, mem, ram_fc_signature_w(),
101 c->g->ops.fifo.get_pbdma_signature(c->g)); 101 c->g->ops.fifo.get_pbdma_signature(c->g));
102 102
103 gk20a_mem_wr32(g, mem, ram_fc_formats_w(), 103 nvgpu_mem_wr32(g, mem, ram_fc_formats_w(),
104 pbdma_formats_gp_fermi0_f() | 104 pbdma_formats_gp_fermi0_f() |
105 pbdma_formats_pb_fermi1_f() | 105 pbdma_formats_pb_fermi1_f() |
106 pbdma_formats_mp_fermi0_f()); 106 pbdma_formats_mp_fermi0_f());
107 107
108 gk20a_mem_wr32(g, mem, ram_fc_pb_header_w(), 108 nvgpu_mem_wr32(g, mem, ram_fc_pb_header_w(),
109 pbdma_pb_header_priv_user_f() | 109 pbdma_pb_header_priv_user_f() |
110 pbdma_pb_header_method_zero_f() | 110 pbdma_pb_header_method_zero_f() |
111 pbdma_pb_header_subchannel_zero_f() | 111 pbdma_pb_header_subchannel_zero_f() |
@@ -113,17 +113,17 @@ static int channel_gp10b_setup_ramfc(struct channel_gk20a *c,
113 pbdma_pb_header_first_true_f() | 113 pbdma_pb_header_first_true_f() |
114 pbdma_pb_header_type_inc_f()); 114 pbdma_pb_header_type_inc_f());
115 115
116 gk20a_mem_wr32(g, mem, ram_fc_subdevice_w(), 116 nvgpu_mem_wr32(g, mem, ram_fc_subdevice_w(),
117 pbdma_subdevice_id_f(1) | 117 pbdma_subdevice_id_f(1) |
118 pbdma_subdevice_status_active_f() | 118 pbdma_subdevice_status_active_f() |
119 pbdma_subdevice_channel_dma_enable_f()); 119 pbdma_subdevice_channel_dma_enable_f());
120 120
121 gk20a_mem_wr32(g, mem, ram_fc_target_w(), pbdma_target_engine_sw_f()); 121 nvgpu_mem_wr32(g, mem, ram_fc_target_w(), pbdma_target_engine_sw_f());
122 122
123 gk20a_mem_wr32(g, mem, ram_fc_acquire_w(), 123 nvgpu_mem_wr32(g, mem, ram_fc_acquire_w(),
124 g->ops.fifo.pbdma_acquire_val(acquire_timeout)); 124 g->ops.fifo.pbdma_acquire_val(acquire_timeout));
125 125
126 gk20a_mem_wr32(g, mem, ram_fc_runlist_timeslice_w(), 126 nvgpu_mem_wr32(g, mem, ram_fc_runlist_timeslice_w(),
127 pbdma_runlist_timeslice_timeout_128_f() | 127 pbdma_runlist_timeslice_timeout_128_f() |
128 pbdma_runlist_timeslice_timescale_3_f() | 128 pbdma_runlist_timeslice_timescale_3_f() |
129 pbdma_runlist_timeslice_enable_true_f()); 129 pbdma_runlist_timeslice_enable_true_f());
@@ -132,11 +132,11 @@ static int channel_gp10b_setup_ramfc(struct channel_gk20a *c,
132 gp10b_set_pdb_fault_replay_flags(c->g, mem); 132 gp10b_set_pdb_fault_replay_flags(c->g, mem);
133 133
134 134
135 gk20a_mem_wr32(g, mem, ram_fc_chid_w(), ram_fc_chid_id_f(c->hw_chid)); 135 nvgpu_mem_wr32(g, mem, ram_fc_chid_w(), ram_fc_chid_id_f(c->hw_chid));
136 136
137 if (c->is_privileged_channel) { 137 if (c->is_privileged_channel) {
138 /* Set privilege level for channel */ 138 /* Set privilege level for channel */
139 gk20a_mem_wr32(g, mem, ram_fc_config_w(), 139 nvgpu_mem_wr32(g, mem, ram_fc_config_w(),
140 pbdma_config_auth_level_privileged_f()); 140 pbdma_config_auth_level_privileged_f());
141 141
142 gk20a_fifo_setup_ramfc_for_privileged_channel(c); 142 gk20a_fifo_setup_ramfc_for_privileged_channel(c);
@@ -158,7 +158,7 @@ static int gp10b_fifo_resetup_ramfc(struct channel_gk20a *c)
158 158
159 gk20a_dbg_fn(""); 159 gk20a_dbg_fn("");
160 160
161 v = gk20a_mem_rd32(c->g, &c->inst_block, 161 v = nvgpu_mem_rd32(c->g, &c->inst_block,
162 ram_fc_allowed_syncpoints_w()); 162 ram_fc_allowed_syncpoints_w());
163 old_syncpt = pbdma_allowed_syncpoints_0_index_v(v); 163 old_syncpt = pbdma_allowed_syncpoints_0_index_v(v);
164 if (c->sync) 164 if (c->sync)
@@ -178,7 +178,7 @@ static int gp10b_fifo_resetup_ramfc(struct channel_gk20a *c)
178 178
179 v |= pbdma_allowed_syncpoints_0_index_f(new_syncpt); 179 v |= pbdma_allowed_syncpoints_0_index_f(new_syncpt);
180 180
181 gk20a_mem_wr32(c->g, &c->inst_block, 181 nvgpu_mem_wr32(c->g, &c->inst_block,
182 ram_fc_allowed_syncpoints_w(), v); 182 ram_fc_allowed_syncpoints_w(), v);
183 } 183 }
184 184