summaryrefslogtreecommitdiffstats
path: root/drivers/gpu/nvgpu/gp10b/fifo_gp10b.c
diff options
context:
space:
mode:
authorSeshendra Gadagottu <sgadagottu@nvidia.com>2015-02-05 17:01:59 -0500
committerDeepak Nibade <dnibade@nvidia.com>2016-12-27 04:52:04 -0500
commit750014be79cce9562653db96e735f78fdc2e058f (patch)
tree9e1aa5cc77aadb262931adf03080fb98304e5787 /drivers/gpu/nvgpu/gp10b/fifo_gp10b.c
parentc965d7a54a347dc71191561ea5cd1a389ee8b091 (diff)
gpu: nvgpu: gp10b: support for replayable faults
Add support for enabling replayable faults during channel instance block binding. Also fixed register programing sequence for setting channel pbdma timeout. Bug 1587825 Change-Id: I5a25819b960001d184507bc597aca051f2ac43ad Signed-off-by: Seshendra Gadagottu <sgadagottu@nvidia.com> Reviewed-on: http://git-master/r/681703 Reviewed-by: Automatic_Commit_Validation_User GVS: Gerrit_Virtual_Submit Reviewed-by: Terje Bergstrom <tbergstrom@nvidia.com>
Diffstat (limited to 'drivers/gpu/nvgpu/gp10b/fifo_gp10b.c')
-rw-r--r--drivers/gpu/nvgpu/gp10b/fifo_gp10b.c124
1 files changed, 124 insertions, 0 deletions
diff --git a/drivers/gpu/nvgpu/gp10b/fifo_gp10b.c b/drivers/gpu/nvgpu/gp10b/fifo_gp10b.c
index d67c7ee2..db5d4ede 100644
--- a/drivers/gpu/nvgpu/gp10b/fifo_gp10b.c
+++ b/drivers/gpu/nvgpu/gp10b/fifo_gp10b.c
@@ -13,11 +13,133 @@
13 * more details. 13 * more details.
14 */ 14 */
15 15
16#include <linux/delay.h>
16#include <linux/types.h> 17#include <linux/types.h>
17 18
18#include "gk20a/gk20a.h" 19#include "gk20a/gk20a.h"
19#include "gm20b/fifo_gm20b.h" 20#include "gm20b/fifo_gm20b.h"
20#include "hw_pbdma_gp10b.h" 21#include "hw_pbdma_gp10b.h"
22#include "fifo_gp10b.h"
23#include "hw_ccsr_gp10b.h"
24#include "hw_fifo_gp10b.h"
25#include "hw_ram_gp10b.h"
26
27static void gp10b_set_pdb_fault_replay_flags(struct gk20a *g,
28 void *inst_ptr)
29{
30 u32 val;
31
32 gk20a_dbg_fn("");
33
34 val = gk20a_mem_rd32(inst_ptr,
35 ram_in_page_dir_base_fault_replay_tex_w());
36 val &= ~ram_in_page_dir_base_fault_replay_tex_m();
37 val |= ram_in_page_dir_base_fault_replay_tex_true_f();
38 gk20a_mem_wr32(inst_ptr,
39 ram_in_page_dir_base_fault_replay_tex_w(), val);
40
41 val = gk20a_mem_rd32(inst_ptr,
42 ram_in_page_dir_base_fault_replay_gcc_w());
43 val &= ~ram_in_page_dir_base_fault_replay_gcc_m();
44 val |= ram_in_page_dir_base_fault_replay_gcc_true_f();
45 gk20a_mem_wr32(inst_ptr,
46 ram_in_page_dir_base_fault_replay_gcc_w(), val);
47
48 gk20a_dbg_fn("done");
49}
50
51static int channel_gp10b_commit_userd(struct channel_gk20a *c)
52{
53 u32 addr_lo;
54 u32 addr_hi;
55 void *inst_ptr;
56
57 gk20a_dbg_fn("");
58
59 inst_ptr = c->inst_block.cpuva;
60 if (!inst_ptr)
61 return -ENOMEM;
62
63 addr_lo = u64_lo32(c->userd_iova >> ram_userd_base_shift_v());
64 addr_hi = u64_hi32(c->userd_iova);
65
66 gk20a_dbg_info("channel %d : set ramfc userd 0x%16llx",
67 c->hw_chid, (u64)c->userd_iova);
68
69 gk20a_mem_wr32(inst_ptr, ram_in_ramfc_w() + ram_fc_userd_w(),
70 pbdma_userd_target_vid_mem_f() |
71 pbdma_userd_addr_f(addr_lo));
72
73 gk20a_mem_wr32(inst_ptr, ram_in_ramfc_w() + ram_fc_userd_hi_w(),
74 pbdma_userd_target_vid_mem_f() |
75 pbdma_userd_hi_addr_f(addr_hi));
76
77 return 0;
78}
79
80static int channel_gp10b_setup_ramfc(struct channel_gk20a *c,
81 u64 gpfifo_base, u32 gpfifo_entries)
82{
83 void *inst_ptr;
84
85 gk20a_dbg_fn("");
86
87 inst_ptr = c->inst_block.cpuva;
88 if (!inst_ptr)
89 return -ENOMEM;
90
91 memset(inst_ptr, 0, ram_fc_size_val_v());
92
93 gk20a_mem_wr32(inst_ptr, ram_fc_gp_base_w(),
94 pbdma_gp_base_offset_f(
95 u64_lo32(gpfifo_base >> pbdma_gp_base_rsvd_s())));
96
97 gk20a_mem_wr32(inst_ptr, ram_fc_gp_base_hi_w(),
98 pbdma_gp_base_hi_offset_f(u64_hi32(gpfifo_base)) |
99 pbdma_gp_base_hi_limit2_f(ilog2(gpfifo_entries)));
100
101 gk20a_mem_wr32(inst_ptr, ram_fc_signature_w(),
102 pbdma_signature_hw_valid_f() | pbdma_signature_sw_zero_f());
103
104 gk20a_mem_wr32(inst_ptr, ram_fc_formats_w(),
105 pbdma_formats_gp_fermi0_f() |
106 pbdma_formats_pb_fermi1_f() |
107 pbdma_formats_mp_fermi0_f());
108
109 gk20a_mem_wr32(inst_ptr, ram_fc_pb_header_w(),
110 pbdma_pb_header_priv_user_f() |
111 pbdma_pb_header_method_zero_f() |
112 pbdma_pb_header_subchannel_zero_f() |
113 pbdma_pb_header_level_main_f() |
114 pbdma_pb_header_first_true_f() |
115 pbdma_pb_header_type_inc_f());
116
117 gk20a_mem_wr32(inst_ptr, ram_fc_subdevice_w(),
118 pbdma_subdevice_id_f(1) |
119 pbdma_subdevice_status_active_f() |
120 pbdma_subdevice_channel_dma_enable_f());
121
122 gk20a_mem_wr32(inst_ptr, ram_fc_target_w(), pbdma_target_engine_sw_f());
123
124 gk20a_mem_wr32(inst_ptr, ram_fc_acquire_w(),
125 pbdma_acquire_retry_man_2_f() |
126 pbdma_acquire_retry_exp_2_f() |
127 pbdma_acquire_timeout_exp_max_f() |
128 pbdma_acquire_timeout_man_max_f() |
129 pbdma_acquire_timeout_en_disable_f());
130
131 gk20a_mem_wr32(inst_ptr, ram_fc_runlist_timeslice_w(),
132 pbdma_runlist_timeslice_timeout_128_f() |
133 pbdma_runlist_timeslice_timescale_3_f() |
134 pbdma_runlist_timeslice_enable_true_f());
135
136 gp10b_set_pdb_fault_replay_flags(c->g, inst_ptr);
137
138
139 gk20a_mem_wr32(inst_ptr, ram_fc_chid_w(), ram_fc_chid_id_f(c->hw_chid));
140
141 return channel_gp10b_commit_userd(c);
142}
21 143
22static u32 gp10b_fifo_get_pbdma_signature(struct gk20a *g) 144static u32 gp10b_fifo_get_pbdma_signature(struct gk20a *g)
23{ 145{
@@ -28,5 +150,7 @@ static u32 gp10b_fifo_get_pbdma_signature(struct gk20a *g)
28void gp10b_init_fifo(struct gpu_ops *gops) 150void gp10b_init_fifo(struct gpu_ops *gops)
29{ 151{
30 gm20b_init_fifo(gops); 152 gm20b_init_fifo(gops);
153 gops->fifo.setup_ramfc = channel_gp10b_setup_ramfc;
31 gops->fifo.get_pbdma_signature = gp10b_fifo_get_pbdma_signature; 154 gops->fifo.get_pbdma_signature = gp10b_fifo_get_pbdma_signature;
155
32} 156}