diff options
Diffstat (limited to 'drivers/gpu/drm/radeon/radeon_asic.h')
-rw-r--r-- | drivers/gpu/drm/radeon/radeon_asic.h | 545 |
1 files changed, 50 insertions, 495 deletions
diff --git a/drivers/gpu/drm/radeon/radeon_asic.h b/drivers/gpu/drm/radeon/radeon_asic.h index d3a157b2bcb7..a0b8280663d1 100644 --- a/drivers/gpu/drm/radeon/radeon_asic.h +++ b/drivers/gpu/drm/radeon/radeon_asic.h | |||
@@ -45,10 +45,18 @@ void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable); | |||
45 | /* | 45 | /* |
46 | * r100,rv100,rs100,rv200,rs200 | 46 | * r100,rv100,rs100,rv200,rs200 |
47 | */ | 47 | */ |
48 | extern int r100_init(struct radeon_device *rdev); | 48 | struct r100_mc_save { |
49 | extern void r100_fini(struct radeon_device *rdev); | 49 | u32 GENMO_WT; |
50 | extern int r100_suspend(struct radeon_device *rdev); | 50 | u32 CRTC_EXT_CNTL; |
51 | extern int r100_resume(struct radeon_device *rdev); | 51 | u32 CRTC_GEN_CNTL; |
52 | u32 CRTC2_GEN_CNTL; | ||
53 | u32 CUR_OFFSET; | ||
54 | u32 CUR2_OFFSET; | ||
55 | }; | ||
56 | int r100_init(struct radeon_device *rdev); | ||
57 | void r100_fini(struct radeon_device *rdev); | ||
58 | int r100_suspend(struct radeon_device *rdev); | ||
59 | int r100_resume(struct radeon_device *rdev); | ||
52 | uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg); | 60 | uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg); |
53 | void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | 61 | void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
54 | void r100_vga_set_state(struct radeon_device *rdev, bool state); | 62 | void r100_vga_set_state(struct radeon_device *rdev, bool state); |
@@ -73,7 +81,7 @@ int r100_copy_blit(struct radeon_device *rdev, | |||
73 | int r100_set_surface_reg(struct radeon_device *rdev, int reg, | 81 | int r100_set_surface_reg(struct radeon_device *rdev, int reg, |
74 | uint32_t tiling_flags, uint32_t pitch, | 82 | uint32_t tiling_flags, uint32_t pitch, |
75 | uint32_t offset, uint32_t obj_size); | 83 | uint32_t offset, uint32_t obj_size); |
76 | int r100_clear_surface_reg(struct radeon_device *rdev, int reg); | 84 | void r100_clear_surface_reg(struct radeon_device *rdev, int reg); |
77 | void r100_bandwidth_update(struct radeon_device *rdev); | 85 | void r100_bandwidth_update(struct radeon_device *rdev); |
78 | void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); | 86 | void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); |
79 | int r100_ring_test(struct radeon_device *rdev); | 87 | int r100_ring_test(struct radeon_device *rdev); |
@@ -82,44 +90,42 @@ void r100_hpd_fini(struct radeon_device *rdev); | |||
82 | bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd); | 90 | bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd); |
83 | void r100_hpd_set_polarity(struct radeon_device *rdev, | 91 | void r100_hpd_set_polarity(struct radeon_device *rdev, |
84 | enum radeon_hpd_id hpd); | 92 | enum radeon_hpd_id hpd); |
85 | 93 | int r100_debugfs_rbbm_init(struct radeon_device *rdev); | |
86 | static struct radeon_asic r100_asic = { | 94 | int r100_debugfs_cp_init(struct radeon_device *rdev); |
87 | .init = &r100_init, | 95 | void r100_cp_disable(struct radeon_device *rdev); |
88 | .fini = &r100_fini, | 96 | int r100_cp_init(struct radeon_device *rdev, unsigned ring_size); |
89 | .suspend = &r100_suspend, | 97 | void r100_cp_fini(struct radeon_device *rdev); |
90 | .resume = &r100_resume, | 98 | int r100_pci_gart_init(struct radeon_device *rdev); |
91 | .vga_set_state = &r100_vga_set_state, | 99 | void r100_pci_gart_fini(struct radeon_device *rdev); |
92 | .gpu_reset = &r100_gpu_reset, | 100 | int r100_pci_gart_enable(struct radeon_device *rdev); |
93 | .gart_tlb_flush = &r100_pci_gart_tlb_flush, | 101 | void r100_pci_gart_disable(struct radeon_device *rdev); |
94 | .gart_set_page = &r100_pci_gart_set_page, | 102 | int r100_debugfs_mc_info_init(struct radeon_device *rdev); |
95 | .cp_commit = &r100_cp_commit, | 103 | int r100_gui_wait_for_idle(struct radeon_device *rdev); |
96 | .ring_start = &r100_ring_start, | 104 | void r100_ib_fini(struct radeon_device *rdev); |
97 | .ring_test = &r100_ring_test, | 105 | int r100_ib_init(struct radeon_device *rdev); |
98 | .ring_ib_execute = &r100_ring_ib_execute, | 106 | void r100_irq_disable(struct radeon_device *rdev); |
99 | .irq_set = &r100_irq_set, | 107 | void r100_mc_stop(struct radeon_device *rdev, struct r100_mc_save *save); |
100 | .irq_process = &r100_irq_process, | 108 | void r100_mc_resume(struct radeon_device *rdev, struct r100_mc_save *save); |
101 | .get_vblank_counter = &r100_get_vblank_counter, | 109 | void r100_vram_init_sizes(struct radeon_device *rdev); |
102 | .fence_ring_emit = &r100_fence_ring_emit, | 110 | void r100_wb_disable(struct radeon_device *rdev); |
103 | .cs_parse = &r100_cs_parse, | 111 | void r100_wb_fini(struct radeon_device *rdev); |
104 | .copy_blit = &r100_copy_blit, | 112 | int r100_wb_init(struct radeon_device *rdev); |
105 | .copy_dma = NULL, | 113 | void r100_hdp_reset(struct radeon_device *rdev); |
106 | .copy = &r100_copy_blit, | 114 | int r100_rb2d_reset(struct radeon_device *rdev); |
107 | .get_engine_clock = &radeon_legacy_get_engine_clock, | 115 | int r100_cp_reset(struct radeon_device *rdev); |
108 | .set_engine_clock = &radeon_legacy_set_engine_clock, | 116 | void r100_vga_render_disable(struct radeon_device *rdev); |
109 | .get_memory_clock = &radeon_legacy_get_memory_clock, | 117 | int r100_cs_track_check_pkt3_indx_buffer(struct radeon_cs_parser *p, |
110 | .set_memory_clock = NULL, | 118 | struct radeon_cs_packet *pkt, |
111 | .get_pcie_lanes = NULL, | 119 | struct radeon_bo *robj); |
112 | .set_pcie_lanes = NULL, | 120 | int r100_cs_parse_packet0(struct radeon_cs_parser *p, |
113 | .set_clock_gating = &radeon_legacy_set_clock_gating, | 121 | struct radeon_cs_packet *pkt, |
114 | .set_surface_reg = r100_set_surface_reg, | 122 | const unsigned *auth, unsigned n, |
115 | .clear_surface_reg = r100_clear_surface_reg, | 123 | radeon_packet0_check_t check); |
116 | .bandwidth_update = &r100_bandwidth_update, | 124 | int r100_cs_packet_parse(struct radeon_cs_parser *p, |
117 | .hpd_init = &r100_hpd_init, | 125 | struct radeon_cs_packet *pkt, |
118 | .hpd_fini = &r100_hpd_fini, | 126 | unsigned idx); |
119 | .hpd_sense = &r100_hpd_sense, | 127 | void r100_enable_bm(struct radeon_device *rdev); |
120 | .hpd_set_polarity = &r100_hpd_set_polarity, | 128 | void r100_set_common_regs(struct radeon_device *rdev); |
121 | .ioctl_wait_idle = NULL, | ||
122 | }; | ||
123 | 129 | ||
124 | /* | 130 | /* |
125 | * r200,rv250,rs300,rv280 | 131 | * r200,rv250,rs300,rv280 |
@@ -129,43 +135,6 @@ extern int r200_copy_dma(struct radeon_device *rdev, | |||
129 | uint64_t dst_offset, | 135 | uint64_t dst_offset, |
130 | unsigned num_pages, | 136 | unsigned num_pages, |
131 | struct radeon_fence *fence); | 137 | struct radeon_fence *fence); |
132 | static struct radeon_asic r200_asic = { | ||
133 | .init = &r100_init, | ||
134 | .fini = &r100_fini, | ||
135 | .suspend = &r100_suspend, | ||
136 | .resume = &r100_resume, | ||
137 | .vga_set_state = &r100_vga_set_state, | ||
138 | .gpu_reset = &r100_gpu_reset, | ||
139 | .gart_tlb_flush = &r100_pci_gart_tlb_flush, | ||
140 | .gart_set_page = &r100_pci_gart_set_page, | ||
141 | .cp_commit = &r100_cp_commit, | ||
142 | .ring_start = &r100_ring_start, | ||
143 | .ring_test = &r100_ring_test, | ||
144 | .ring_ib_execute = &r100_ring_ib_execute, | ||
145 | .irq_set = &r100_irq_set, | ||
146 | .irq_process = &r100_irq_process, | ||
147 | .get_vblank_counter = &r100_get_vblank_counter, | ||
148 | .fence_ring_emit = &r100_fence_ring_emit, | ||
149 | .cs_parse = &r100_cs_parse, | ||
150 | .copy_blit = &r100_copy_blit, | ||
151 | .copy_dma = &r200_copy_dma, | ||
152 | .copy = &r100_copy_blit, | ||
153 | .get_engine_clock = &radeon_legacy_get_engine_clock, | ||
154 | .set_engine_clock = &radeon_legacy_set_engine_clock, | ||
155 | .get_memory_clock = &radeon_legacy_get_memory_clock, | ||
156 | .set_memory_clock = NULL, | ||
157 | .set_pcie_lanes = NULL, | ||
158 | .set_clock_gating = &radeon_legacy_set_clock_gating, | ||
159 | .set_surface_reg = r100_set_surface_reg, | ||
160 | .clear_surface_reg = r100_clear_surface_reg, | ||
161 | .bandwidth_update = &r100_bandwidth_update, | ||
162 | .hpd_init = &r100_hpd_init, | ||
163 | .hpd_fini = &r100_hpd_fini, | ||
164 | .hpd_sense = &r100_hpd_sense, | ||
165 | .hpd_set_polarity = &r100_hpd_set_polarity, | ||
166 | .ioctl_wait_idle = NULL, | ||
167 | }; | ||
168 | |||
169 | 138 | ||
170 | /* | 139 | /* |
171 | * r300,r350,rv350,rv380 | 140 | * r300,r350,rv350,rv380 |
@@ -186,82 +155,6 @@ extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v | |||
186 | extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes); | 155 | extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes); |
187 | extern int rv370_get_pcie_lanes(struct radeon_device *rdev); | 156 | extern int rv370_get_pcie_lanes(struct radeon_device *rdev); |
188 | 157 | ||
189 | static struct radeon_asic r300_asic = { | ||
190 | .init = &r300_init, | ||
191 | .fini = &r300_fini, | ||
192 | .suspend = &r300_suspend, | ||
193 | .resume = &r300_resume, | ||
194 | .vga_set_state = &r100_vga_set_state, | ||
195 | .gpu_reset = &r300_gpu_reset, | ||
196 | .gart_tlb_flush = &r100_pci_gart_tlb_flush, | ||
197 | .gart_set_page = &r100_pci_gart_set_page, | ||
198 | .cp_commit = &r100_cp_commit, | ||
199 | .ring_start = &r300_ring_start, | ||
200 | .ring_test = &r100_ring_test, | ||
201 | .ring_ib_execute = &r100_ring_ib_execute, | ||
202 | .irq_set = &r100_irq_set, | ||
203 | .irq_process = &r100_irq_process, | ||
204 | .get_vblank_counter = &r100_get_vblank_counter, | ||
205 | .fence_ring_emit = &r300_fence_ring_emit, | ||
206 | .cs_parse = &r300_cs_parse, | ||
207 | .copy_blit = &r100_copy_blit, | ||
208 | .copy_dma = &r200_copy_dma, | ||
209 | .copy = &r100_copy_blit, | ||
210 | .get_engine_clock = &radeon_legacy_get_engine_clock, | ||
211 | .set_engine_clock = &radeon_legacy_set_engine_clock, | ||
212 | .get_memory_clock = &radeon_legacy_get_memory_clock, | ||
213 | .set_memory_clock = NULL, | ||
214 | .get_pcie_lanes = &rv370_get_pcie_lanes, | ||
215 | .set_pcie_lanes = &rv370_set_pcie_lanes, | ||
216 | .set_clock_gating = &radeon_legacy_set_clock_gating, | ||
217 | .set_surface_reg = r100_set_surface_reg, | ||
218 | .clear_surface_reg = r100_clear_surface_reg, | ||
219 | .bandwidth_update = &r100_bandwidth_update, | ||
220 | .hpd_init = &r100_hpd_init, | ||
221 | .hpd_fini = &r100_hpd_fini, | ||
222 | .hpd_sense = &r100_hpd_sense, | ||
223 | .hpd_set_polarity = &r100_hpd_set_polarity, | ||
224 | .ioctl_wait_idle = NULL, | ||
225 | }; | ||
226 | |||
227 | |||
228 | static struct radeon_asic r300_asic_pcie = { | ||
229 | .init = &r300_init, | ||
230 | .fini = &r300_fini, | ||
231 | .suspend = &r300_suspend, | ||
232 | .resume = &r300_resume, | ||
233 | .vga_set_state = &r100_vga_set_state, | ||
234 | .gpu_reset = &r300_gpu_reset, | ||
235 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, | ||
236 | .gart_set_page = &rv370_pcie_gart_set_page, | ||
237 | .cp_commit = &r100_cp_commit, | ||
238 | .ring_start = &r300_ring_start, | ||
239 | .ring_test = &r100_ring_test, | ||
240 | .ring_ib_execute = &r100_ring_ib_execute, | ||
241 | .irq_set = &r100_irq_set, | ||
242 | .irq_process = &r100_irq_process, | ||
243 | .get_vblank_counter = &r100_get_vblank_counter, | ||
244 | .fence_ring_emit = &r300_fence_ring_emit, | ||
245 | .cs_parse = &r300_cs_parse, | ||
246 | .copy_blit = &r100_copy_blit, | ||
247 | .copy_dma = &r200_copy_dma, | ||
248 | .copy = &r100_copy_blit, | ||
249 | .get_engine_clock = &radeon_legacy_get_engine_clock, | ||
250 | .set_engine_clock = &radeon_legacy_set_engine_clock, | ||
251 | .get_memory_clock = &radeon_legacy_get_memory_clock, | ||
252 | .set_memory_clock = NULL, | ||
253 | .set_pcie_lanes = &rv370_set_pcie_lanes, | ||
254 | .set_clock_gating = &radeon_legacy_set_clock_gating, | ||
255 | .set_surface_reg = r100_set_surface_reg, | ||
256 | .clear_surface_reg = r100_clear_surface_reg, | ||
257 | .bandwidth_update = &r100_bandwidth_update, | ||
258 | .hpd_init = &r100_hpd_init, | ||
259 | .hpd_fini = &r100_hpd_fini, | ||
260 | .hpd_sense = &r100_hpd_sense, | ||
261 | .hpd_set_polarity = &r100_hpd_set_polarity, | ||
262 | .ioctl_wait_idle = NULL, | ||
263 | }; | ||
264 | |||
265 | /* | 158 | /* |
266 | * r420,r423,rv410 | 159 | * r420,r423,rv410 |
267 | */ | 160 | */ |
@@ -269,44 +162,6 @@ extern int r420_init(struct radeon_device *rdev); | |||
269 | extern void r420_fini(struct radeon_device *rdev); | 162 | extern void r420_fini(struct radeon_device *rdev); |
270 | extern int r420_suspend(struct radeon_device *rdev); | 163 | extern int r420_suspend(struct radeon_device *rdev); |
271 | extern int r420_resume(struct radeon_device *rdev); | 164 | extern int r420_resume(struct radeon_device *rdev); |
272 | static struct radeon_asic r420_asic = { | ||
273 | .init = &r420_init, | ||
274 | .fini = &r420_fini, | ||
275 | .suspend = &r420_suspend, | ||
276 | .resume = &r420_resume, | ||
277 | .vga_set_state = &r100_vga_set_state, | ||
278 | .gpu_reset = &r300_gpu_reset, | ||
279 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, | ||
280 | .gart_set_page = &rv370_pcie_gart_set_page, | ||
281 | .cp_commit = &r100_cp_commit, | ||
282 | .ring_start = &r300_ring_start, | ||
283 | .ring_test = &r100_ring_test, | ||
284 | .ring_ib_execute = &r100_ring_ib_execute, | ||
285 | .irq_set = &r100_irq_set, | ||
286 | .irq_process = &r100_irq_process, | ||
287 | .get_vblank_counter = &r100_get_vblank_counter, | ||
288 | .fence_ring_emit = &r300_fence_ring_emit, | ||
289 | .cs_parse = &r300_cs_parse, | ||
290 | .copy_blit = &r100_copy_blit, | ||
291 | .copy_dma = &r200_copy_dma, | ||
292 | .copy = &r100_copy_blit, | ||
293 | .get_engine_clock = &radeon_atom_get_engine_clock, | ||
294 | .set_engine_clock = &radeon_atom_set_engine_clock, | ||
295 | .get_memory_clock = &radeon_atom_get_memory_clock, | ||
296 | .set_memory_clock = &radeon_atom_set_memory_clock, | ||
297 | .get_pcie_lanes = &rv370_get_pcie_lanes, | ||
298 | .set_pcie_lanes = &rv370_set_pcie_lanes, | ||
299 | .set_clock_gating = &radeon_atom_set_clock_gating, | ||
300 | .set_surface_reg = r100_set_surface_reg, | ||
301 | .clear_surface_reg = r100_clear_surface_reg, | ||
302 | .bandwidth_update = &r100_bandwidth_update, | ||
303 | .hpd_init = &r100_hpd_init, | ||
304 | .hpd_fini = &r100_hpd_fini, | ||
305 | .hpd_sense = &r100_hpd_sense, | ||
306 | .hpd_set_polarity = &r100_hpd_set_polarity, | ||
307 | .ioctl_wait_idle = NULL, | ||
308 | }; | ||
309 | |||
310 | 165 | ||
311 | /* | 166 | /* |
312 | * rs400,rs480 | 167 | * rs400,rs480 |
@@ -319,44 +174,6 @@ void rs400_gart_tlb_flush(struct radeon_device *rdev); | |||
319 | int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); | 174 | int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); |
320 | uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg); | 175 | uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
321 | void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | 176 | void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
322 | static struct radeon_asic rs400_asic = { | ||
323 | .init = &rs400_init, | ||
324 | .fini = &rs400_fini, | ||
325 | .suspend = &rs400_suspend, | ||
326 | .resume = &rs400_resume, | ||
327 | .vga_set_state = &r100_vga_set_state, | ||
328 | .gpu_reset = &r300_gpu_reset, | ||
329 | .gart_tlb_flush = &rs400_gart_tlb_flush, | ||
330 | .gart_set_page = &rs400_gart_set_page, | ||
331 | .cp_commit = &r100_cp_commit, | ||
332 | .ring_start = &r300_ring_start, | ||
333 | .ring_test = &r100_ring_test, | ||
334 | .ring_ib_execute = &r100_ring_ib_execute, | ||
335 | .irq_set = &r100_irq_set, | ||
336 | .irq_process = &r100_irq_process, | ||
337 | .get_vblank_counter = &r100_get_vblank_counter, | ||
338 | .fence_ring_emit = &r300_fence_ring_emit, | ||
339 | .cs_parse = &r300_cs_parse, | ||
340 | .copy_blit = &r100_copy_blit, | ||
341 | .copy_dma = &r200_copy_dma, | ||
342 | .copy = &r100_copy_blit, | ||
343 | .get_engine_clock = &radeon_legacy_get_engine_clock, | ||
344 | .set_engine_clock = &radeon_legacy_set_engine_clock, | ||
345 | .get_memory_clock = &radeon_legacy_get_memory_clock, | ||
346 | .set_memory_clock = NULL, | ||
347 | .get_pcie_lanes = NULL, | ||
348 | .set_pcie_lanes = NULL, | ||
349 | .set_clock_gating = &radeon_legacy_set_clock_gating, | ||
350 | .set_surface_reg = r100_set_surface_reg, | ||
351 | .clear_surface_reg = r100_clear_surface_reg, | ||
352 | .bandwidth_update = &r100_bandwidth_update, | ||
353 | .hpd_init = &r100_hpd_init, | ||
354 | .hpd_fini = &r100_hpd_fini, | ||
355 | .hpd_sense = &r100_hpd_sense, | ||
356 | .hpd_set_polarity = &r100_hpd_set_polarity, | ||
357 | .ioctl_wait_idle = NULL, | ||
358 | }; | ||
359 | |||
360 | 177 | ||
361 | /* | 178 | /* |
362 | * rs600. | 179 | * rs600. |
@@ -379,45 +196,6 @@ bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd); | |||
379 | void rs600_hpd_set_polarity(struct radeon_device *rdev, | 196 | void rs600_hpd_set_polarity(struct radeon_device *rdev, |
380 | enum radeon_hpd_id hpd); | 197 | enum radeon_hpd_id hpd); |
381 | 198 | ||
382 | static struct radeon_asic rs600_asic = { | ||
383 | .init = &rs600_init, | ||
384 | .fini = &rs600_fini, | ||
385 | .suspend = &rs600_suspend, | ||
386 | .resume = &rs600_resume, | ||
387 | .vga_set_state = &r100_vga_set_state, | ||
388 | .gpu_reset = &r300_gpu_reset, | ||
389 | .gart_tlb_flush = &rs600_gart_tlb_flush, | ||
390 | .gart_set_page = &rs600_gart_set_page, | ||
391 | .cp_commit = &r100_cp_commit, | ||
392 | .ring_start = &r300_ring_start, | ||
393 | .ring_test = &r100_ring_test, | ||
394 | .ring_ib_execute = &r100_ring_ib_execute, | ||
395 | .irq_set = &rs600_irq_set, | ||
396 | .irq_process = &rs600_irq_process, | ||
397 | .get_vblank_counter = &rs600_get_vblank_counter, | ||
398 | .fence_ring_emit = &r300_fence_ring_emit, | ||
399 | .cs_parse = &r300_cs_parse, | ||
400 | .copy_blit = &r100_copy_blit, | ||
401 | .copy_dma = &r200_copy_dma, | ||
402 | .copy = &r100_copy_blit, | ||
403 | .get_engine_clock = &radeon_atom_get_engine_clock, | ||
404 | .set_engine_clock = &radeon_atom_set_engine_clock, | ||
405 | .get_memory_clock = &radeon_atom_get_memory_clock, | ||
406 | .set_memory_clock = &radeon_atom_set_memory_clock, | ||
407 | .get_pcie_lanes = NULL, | ||
408 | .set_pcie_lanes = NULL, | ||
409 | .set_clock_gating = &radeon_atom_set_clock_gating, | ||
410 | .set_surface_reg = r100_set_surface_reg, | ||
411 | .clear_surface_reg = r100_clear_surface_reg, | ||
412 | .bandwidth_update = &rs600_bandwidth_update, | ||
413 | .hpd_init = &rs600_hpd_init, | ||
414 | .hpd_fini = &rs600_hpd_fini, | ||
415 | .hpd_sense = &rs600_hpd_sense, | ||
416 | .hpd_set_polarity = &rs600_hpd_set_polarity, | ||
417 | .ioctl_wait_idle = NULL, | ||
418 | }; | ||
419 | |||
420 | |||
421 | /* | 199 | /* |
422 | * rs690,rs740 | 200 | * rs690,rs740 |
423 | */ | 201 | */ |
@@ -428,44 +206,6 @@ int rs690_suspend(struct radeon_device *rdev); | |||
428 | uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg); | 206 | uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
429 | void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | 207 | void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
430 | void rs690_bandwidth_update(struct radeon_device *rdev); | 208 | void rs690_bandwidth_update(struct radeon_device *rdev); |
431 | static struct radeon_asic rs690_asic = { | ||
432 | .init = &rs690_init, | ||
433 | .fini = &rs690_fini, | ||
434 | .suspend = &rs690_suspend, | ||
435 | .resume = &rs690_resume, | ||
436 | .vga_set_state = &r100_vga_set_state, | ||
437 | .gpu_reset = &r300_gpu_reset, | ||
438 | .gart_tlb_flush = &rs400_gart_tlb_flush, | ||
439 | .gart_set_page = &rs400_gart_set_page, | ||
440 | .cp_commit = &r100_cp_commit, | ||
441 | .ring_start = &r300_ring_start, | ||
442 | .ring_test = &r100_ring_test, | ||
443 | .ring_ib_execute = &r100_ring_ib_execute, | ||
444 | .irq_set = &rs600_irq_set, | ||
445 | .irq_process = &rs600_irq_process, | ||
446 | .get_vblank_counter = &rs600_get_vblank_counter, | ||
447 | .fence_ring_emit = &r300_fence_ring_emit, | ||
448 | .cs_parse = &r300_cs_parse, | ||
449 | .copy_blit = &r100_copy_blit, | ||
450 | .copy_dma = &r200_copy_dma, | ||
451 | .copy = &r200_copy_dma, | ||
452 | .get_engine_clock = &radeon_atom_get_engine_clock, | ||
453 | .set_engine_clock = &radeon_atom_set_engine_clock, | ||
454 | .get_memory_clock = &radeon_atom_get_memory_clock, | ||
455 | .set_memory_clock = &radeon_atom_set_memory_clock, | ||
456 | .get_pcie_lanes = NULL, | ||
457 | .set_pcie_lanes = NULL, | ||
458 | .set_clock_gating = &radeon_atom_set_clock_gating, | ||
459 | .set_surface_reg = r100_set_surface_reg, | ||
460 | .clear_surface_reg = r100_clear_surface_reg, | ||
461 | .bandwidth_update = &rs690_bandwidth_update, | ||
462 | .hpd_init = &rs600_hpd_init, | ||
463 | .hpd_fini = &rs600_hpd_fini, | ||
464 | .hpd_sense = &rs600_hpd_sense, | ||
465 | .hpd_set_polarity = &rs600_hpd_set_polarity, | ||
466 | .ioctl_wait_idle = NULL, | ||
467 | }; | ||
468 | |||
469 | 209 | ||
470 | /* | 210 | /* |
471 | * rv515 | 211 | * rv515 |
@@ -481,87 +221,12 @@ void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |||
481 | void rv515_bandwidth_update(struct radeon_device *rdev); | 221 | void rv515_bandwidth_update(struct radeon_device *rdev); |
482 | int rv515_resume(struct radeon_device *rdev); | 222 | int rv515_resume(struct radeon_device *rdev); |
483 | int rv515_suspend(struct radeon_device *rdev); | 223 | int rv515_suspend(struct radeon_device *rdev); |
484 | static struct radeon_asic rv515_asic = { | ||
485 | .init = &rv515_init, | ||
486 | .fini = &rv515_fini, | ||
487 | .suspend = &rv515_suspend, | ||
488 | .resume = &rv515_resume, | ||
489 | .vga_set_state = &r100_vga_set_state, | ||
490 | .gpu_reset = &rv515_gpu_reset, | ||
491 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, | ||
492 | .gart_set_page = &rv370_pcie_gart_set_page, | ||
493 | .cp_commit = &r100_cp_commit, | ||
494 | .ring_start = &rv515_ring_start, | ||
495 | .ring_test = &r100_ring_test, | ||
496 | .ring_ib_execute = &r100_ring_ib_execute, | ||
497 | .irq_set = &rs600_irq_set, | ||
498 | .irq_process = &rs600_irq_process, | ||
499 | .get_vblank_counter = &rs600_get_vblank_counter, | ||
500 | .fence_ring_emit = &r300_fence_ring_emit, | ||
501 | .cs_parse = &r300_cs_parse, | ||
502 | .copy_blit = &r100_copy_blit, | ||
503 | .copy_dma = &r200_copy_dma, | ||
504 | .copy = &r100_copy_blit, | ||
505 | .get_engine_clock = &radeon_atom_get_engine_clock, | ||
506 | .set_engine_clock = &radeon_atom_set_engine_clock, | ||
507 | .get_memory_clock = &radeon_atom_get_memory_clock, | ||
508 | .set_memory_clock = &radeon_atom_set_memory_clock, | ||
509 | .get_pcie_lanes = &rv370_get_pcie_lanes, | ||
510 | .set_pcie_lanes = &rv370_set_pcie_lanes, | ||
511 | .set_clock_gating = &radeon_atom_set_clock_gating, | ||
512 | .set_surface_reg = r100_set_surface_reg, | ||
513 | .clear_surface_reg = r100_clear_surface_reg, | ||
514 | .bandwidth_update = &rv515_bandwidth_update, | ||
515 | .hpd_init = &rs600_hpd_init, | ||
516 | .hpd_fini = &rs600_hpd_fini, | ||
517 | .hpd_sense = &rs600_hpd_sense, | ||
518 | .hpd_set_polarity = &rs600_hpd_set_polarity, | ||
519 | .ioctl_wait_idle = NULL, | ||
520 | }; | ||
521 | |||
522 | 224 | ||
523 | /* | 225 | /* |
524 | * r520,rv530,rv560,rv570,r580 | 226 | * r520,rv530,rv560,rv570,r580 |
525 | */ | 227 | */ |
526 | int r520_init(struct radeon_device *rdev); | 228 | int r520_init(struct radeon_device *rdev); |
527 | int r520_resume(struct radeon_device *rdev); | 229 | int r520_resume(struct radeon_device *rdev); |
528 | static struct radeon_asic r520_asic = { | ||
529 | .init = &r520_init, | ||
530 | .fini = &rv515_fini, | ||
531 | .suspend = &rv515_suspend, | ||
532 | .resume = &r520_resume, | ||
533 | .vga_set_state = &r100_vga_set_state, | ||
534 | .gpu_reset = &rv515_gpu_reset, | ||
535 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, | ||
536 | .gart_set_page = &rv370_pcie_gart_set_page, | ||
537 | .cp_commit = &r100_cp_commit, | ||
538 | .ring_start = &rv515_ring_start, | ||
539 | .ring_test = &r100_ring_test, | ||
540 | .ring_ib_execute = &r100_ring_ib_execute, | ||
541 | .irq_set = &rs600_irq_set, | ||
542 | .irq_process = &rs600_irq_process, | ||
543 | .get_vblank_counter = &rs600_get_vblank_counter, | ||
544 | .fence_ring_emit = &r300_fence_ring_emit, | ||
545 | .cs_parse = &r300_cs_parse, | ||
546 | .copy_blit = &r100_copy_blit, | ||
547 | .copy_dma = &r200_copy_dma, | ||
548 | .copy = &r100_copy_blit, | ||
549 | .get_engine_clock = &radeon_atom_get_engine_clock, | ||
550 | .set_engine_clock = &radeon_atom_set_engine_clock, | ||
551 | .get_memory_clock = &radeon_atom_get_memory_clock, | ||
552 | .set_memory_clock = &radeon_atom_set_memory_clock, | ||
553 | .get_pcie_lanes = &rv370_get_pcie_lanes, | ||
554 | .set_pcie_lanes = &rv370_set_pcie_lanes, | ||
555 | .set_clock_gating = &radeon_atom_set_clock_gating, | ||
556 | .set_surface_reg = r100_set_surface_reg, | ||
557 | .clear_surface_reg = r100_clear_surface_reg, | ||
558 | .bandwidth_update = &rv515_bandwidth_update, | ||
559 | .hpd_init = &rs600_hpd_init, | ||
560 | .hpd_fini = &rs600_hpd_fini, | ||
561 | .hpd_sense = &rs600_hpd_sense, | ||
562 | .hpd_set_polarity = &rs600_hpd_set_polarity, | ||
563 | .ioctl_wait_idle = NULL, | ||
564 | }; | ||
565 | 230 | ||
566 | /* | 231 | /* |
567 | * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880 | 232 | * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880 |
@@ -591,7 +256,7 @@ int r600_gpu_reset(struct radeon_device *rdev); | |||
591 | int r600_set_surface_reg(struct radeon_device *rdev, int reg, | 256 | int r600_set_surface_reg(struct radeon_device *rdev, int reg, |
592 | uint32_t tiling_flags, uint32_t pitch, | 257 | uint32_t tiling_flags, uint32_t pitch, |
593 | uint32_t offset, uint32_t obj_size); | 258 | uint32_t offset, uint32_t obj_size); |
594 | int r600_clear_surface_reg(struct radeon_device *rdev, int reg); | 259 | void r600_clear_surface_reg(struct radeon_device *rdev, int reg); |
595 | void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); | 260 | void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); |
596 | int r600_ring_test(struct radeon_device *rdev); | 261 | int r600_ring_test(struct radeon_device *rdev); |
597 | int r600_copy_blit(struct radeon_device *rdev, | 262 | int r600_copy_blit(struct radeon_device *rdev, |
@@ -604,43 +269,6 @@ void r600_hpd_set_polarity(struct radeon_device *rdev, | |||
604 | enum radeon_hpd_id hpd); | 269 | enum radeon_hpd_id hpd); |
605 | extern void r600_ioctl_wait_idle(struct radeon_device *rdev, struct radeon_bo *bo); | 270 | extern void r600_ioctl_wait_idle(struct radeon_device *rdev, struct radeon_bo *bo); |
606 | 271 | ||
607 | static struct radeon_asic r600_asic = { | ||
608 | .init = &r600_init, | ||
609 | .fini = &r600_fini, | ||
610 | .suspend = &r600_suspend, | ||
611 | .resume = &r600_resume, | ||
612 | .cp_commit = &r600_cp_commit, | ||
613 | .vga_set_state = &r600_vga_set_state, | ||
614 | .gpu_reset = &r600_gpu_reset, | ||
615 | .gart_tlb_flush = &r600_pcie_gart_tlb_flush, | ||
616 | .gart_set_page = &rs600_gart_set_page, | ||
617 | .ring_test = &r600_ring_test, | ||
618 | .ring_ib_execute = &r600_ring_ib_execute, | ||
619 | .irq_set = &r600_irq_set, | ||
620 | .irq_process = &r600_irq_process, | ||
621 | .get_vblank_counter = &rs600_get_vblank_counter, | ||
622 | .fence_ring_emit = &r600_fence_ring_emit, | ||
623 | .cs_parse = &r600_cs_parse, | ||
624 | .copy_blit = &r600_copy_blit, | ||
625 | .copy_dma = &r600_copy_blit, | ||
626 | .copy = &r600_copy_blit, | ||
627 | .get_engine_clock = &radeon_atom_get_engine_clock, | ||
628 | .set_engine_clock = &radeon_atom_set_engine_clock, | ||
629 | .get_memory_clock = &radeon_atom_get_memory_clock, | ||
630 | .set_memory_clock = &radeon_atom_set_memory_clock, | ||
631 | .get_pcie_lanes = &rv370_get_pcie_lanes, | ||
632 | .set_pcie_lanes = NULL, | ||
633 | .set_clock_gating = NULL, | ||
634 | .set_surface_reg = r600_set_surface_reg, | ||
635 | .clear_surface_reg = r600_clear_surface_reg, | ||
636 | .bandwidth_update = &rv515_bandwidth_update, | ||
637 | .hpd_init = &r600_hpd_init, | ||
638 | .hpd_fini = &r600_hpd_fini, | ||
639 | .hpd_sense = &r600_hpd_sense, | ||
640 | .hpd_set_polarity = &r600_hpd_set_polarity, | ||
641 | .ioctl_wait_idle = r600_ioctl_wait_idle, | ||
642 | }; | ||
643 | |||
644 | /* | 272 | /* |
645 | * rv770,rv730,rv710,rv740 | 273 | * rv770,rv730,rv710,rv740 |
646 | */ | 274 | */ |
@@ -650,43 +278,6 @@ int rv770_suspend(struct radeon_device *rdev); | |||
650 | int rv770_resume(struct radeon_device *rdev); | 278 | int rv770_resume(struct radeon_device *rdev); |
651 | int rv770_gpu_reset(struct radeon_device *rdev); | 279 | int rv770_gpu_reset(struct radeon_device *rdev); |
652 | 280 | ||
653 | static struct radeon_asic rv770_asic = { | ||
654 | .init = &rv770_init, | ||
655 | .fini = &rv770_fini, | ||
656 | .suspend = &rv770_suspend, | ||
657 | .resume = &rv770_resume, | ||
658 | .cp_commit = &r600_cp_commit, | ||
659 | .gpu_reset = &rv770_gpu_reset, | ||
660 | .vga_set_state = &r600_vga_set_state, | ||
661 | .gart_tlb_flush = &r600_pcie_gart_tlb_flush, | ||
662 | .gart_set_page = &rs600_gart_set_page, | ||
663 | .ring_test = &r600_ring_test, | ||
664 | .ring_ib_execute = &r600_ring_ib_execute, | ||
665 | .irq_set = &r600_irq_set, | ||
666 | .irq_process = &r600_irq_process, | ||
667 | .get_vblank_counter = &rs600_get_vblank_counter, | ||
668 | .fence_ring_emit = &r600_fence_ring_emit, | ||
669 | .cs_parse = &r600_cs_parse, | ||
670 | .copy_blit = &r600_copy_blit, | ||
671 | .copy_dma = &r600_copy_blit, | ||
672 | .copy = &r600_copy_blit, | ||
673 | .get_engine_clock = &radeon_atom_get_engine_clock, | ||
674 | .set_engine_clock = &radeon_atom_set_engine_clock, | ||
675 | .get_memory_clock = &radeon_atom_get_memory_clock, | ||
676 | .set_memory_clock = &radeon_atom_set_memory_clock, | ||
677 | .get_pcie_lanes = &rv370_get_pcie_lanes, | ||
678 | .set_pcie_lanes = NULL, | ||
679 | .set_clock_gating = &radeon_atom_set_clock_gating, | ||
680 | .set_surface_reg = r600_set_surface_reg, | ||
681 | .clear_surface_reg = r600_clear_surface_reg, | ||
682 | .bandwidth_update = &rv515_bandwidth_update, | ||
683 | .hpd_init = &r600_hpd_init, | ||
684 | .hpd_fini = &r600_hpd_fini, | ||
685 | .hpd_sense = &r600_hpd_sense, | ||
686 | .hpd_set_polarity = &r600_hpd_set_polarity, | ||
687 | .ioctl_wait_idle = r600_ioctl_wait_idle, | ||
688 | }; | ||
689 | |||
690 | /* | 281 | /* |
691 | * evergreen | 282 | * evergreen |
692 | */ | 283 | */ |
@@ -701,40 +292,4 @@ void evergreen_hpd_fini(struct radeon_device *rdev); | |||
701 | bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd); | 292 | bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd); |
702 | void evergreen_hpd_set_polarity(struct radeon_device *rdev, | 293 | void evergreen_hpd_set_polarity(struct radeon_device *rdev, |
703 | enum radeon_hpd_id hpd); | 294 | enum radeon_hpd_id hpd); |
704 | |||
705 | static struct radeon_asic evergreen_asic = { | ||
706 | .init = &evergreen_init, | ||
707 | .fini = &evergreen_fini, | ||
708 | .suspend = &evergreen_suspend, | ||
709 | .resume = &evergreen_resume, | ||
710 | .cp_commit = NULL, | ||
711 | .gpu_reset = &evergreen_gpu_reset, | ||
712 | .vga_set_state = &r600_vga_set_state, | ||
713 | .gart_tlb_flush = &r600_pcie_gart_tlb_flush, | ||
714 | .gart_set_page = &rs600_gart_set_page, | ||
715 | .ring_test = NULL, | ||
716 | .ring_ib_execute = NULL, | ||
717 | .irq_set = NULL, | ||
718 | .irq_process = NULL, | ||
719 | .get_vblank_counter = NULL, | ||
720 | .fence_ring_emit = NULL, | ||
721 | .cs_parse = NULL, | ||
722 | .copy_blit = NULL, | ||
723 | .copy_dma = NULL, | ||
724 | .copy = NULL, | ||
725 | .get_engine_clock = &radeon_atom_get_engine_clock, | ||
726 | .set_engine_clock = &radeon_atom_set_engine_clock, | ||
727 | .get_memory_clock = &radeon_atom_get_memory_clock, | ||
728 | .set_memory_clock = &radeon_atom_set_memory_clock, | ||
729 | .set_pcie_lanes = NULL, | ||
730 | .set_clock_gating = NULL, | ||
731 | .set_surface_reg = r600_set_surface_reg, | ||
732 | .clear_surface_reg = r600_clear_surface_reg, | ||
733 | .bandwidth_update = &evergreen_bandwidth_update, | ||
734 | .hpd_init = &evergreen_hpd_init, | ||
735 | .hpd_fini = &evergreen_hpd_fini, | ||
736 | .hpd_sense = &evergreen_hpd_sense, | ||
737 | .hpd_set_polarity = &evergreen_hpd_set_polarity, | ||
738 | }; | ||
739 | |||
740 | #endif | 295 | #endif |