aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/radeon/radeon_asic.h
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/drm/radeon/radeon_asic.h')
-rw-r--r--drivers/gpu/drm/radeon/radeon_asic.h415
1 files changed, 91 insertions, 324 deletions
diff --git a/drivers/gpu/drm/radeon/radeon_asic.h b/drivers/gpu/drm/radeon/radeon_asic.h
index c18fbee387d7..a0b8280663d1 100644
--- a/drivers/gpu/drm/radeon/radeon_asic.h
+++ b/drivers/gpu/drm/radeon/radeon_asic.h
@@ -33,6 +33,7 @@
33 */ 33 */
34uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev); 34uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
35void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); 35void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
36uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev);
36void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable); 37void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
37 38
38uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev); 39uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
@@ -42,12 +43,20 @@ void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock
42void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable); 43void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
43 44
44/* 45/*
45 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280 46 * r100,rv100,rs100,rv200,rs200
46 */ 47 */
47extern int r100_init(struct radeon_device *rdev); 48struct r100_mc_save {
48extern void r100_fini(struct radeon_device *rdev); 49 u32 GENMO_WT;
49extern int r100_suspend(struct radeon_device *rdev); 50 u32 CRTC_EXT_CNTL;
50extern int r100_resume(struct radeon_device *rdev); 51 u32 CRTC_GEN_CNTL;
52 u32 CRTC2_GEN_CNTL;
53 u32 CUR_OFFSET;
54 u32 CUR2_OFFSET;
55};
56int r100_init(struct radeon_device *rdev);
57void r100_fini(struct radeon_device *rdev);
58int r100_suspend(struct radeon_device *rdev);
59int r100_resume(struct radeon_device *rdev);
51uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg); 60uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
52void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 61void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
53void r100_vga_set_state(struct radeon_device *rdev, bool state); 62void r100_vga_set_state(struct radeon_device *rdev, bool state);
@@ -72,43 +81,60 @@ int r100_copy_blit(struct radeon_device *rdev,
72int r100_set_surface_reg(struct radeon_device *rdev, int reg, 81int r100_set_surface_reg(struct radeon_device *rdev, int reg,
73 uint32_t tiling_flags, uint32_t pitch, 82 uint32_t tiling_flags, uint32_t pitch,
74 uint32_t offset, uint32_t obj_size); 83 uint32_t offset, uint32_t obj_size);
75int r100_clear_surface_reg(struct radeon_device *rdev, int reg); 84void r100_clear_surface_reg(struct radeon_device *rdev, int reg);
76void r100_bandwidth_update(struct radeon_device *rdev); 85void r100_bandwidth_update(struct radeon_device *rdev);
77void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); 86void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
78int r100_ring_test(struct radeon_device *rdev); 87int r100_ring_test(struct radeon_device *rdev);
88void r100_hpd_init(struct radeon_device *rdev);
89void r100_hpd_fini(struct radeon_device *rdev);
90bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
91void r100_hpd_set_polarity(struct radeon_device *rdev,
92 enum radeon_hpd_id hpd);
93int r100_debugfs_rbbm_init(struct radeon_device *rdev);
94int r100_debugfs_cp_init(struct radeon_device *rdev);
95void r100_cp_disable(struct radeon_device *rdev);
96int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
97void r100_cp_fini(struct radeon_device *rdev);
98int r100_pci_gart_init(struct radeon_device *rdev);
99void r100_pci_gart_fini(struct radeon_device *rdev);
100int r100_pci_gart_enable(struct radeon_device *rdev);
101void r100_pci_gart_disable(struct radeon_device *rdev);
102int r100_debugfs_mc_info_init(struct radeon_device *rdev);
103int r100_gui_wait_for_idle(struct radeon_device *rdev);
104void r100_ib_fini(struct radeon_device *rdev);
105int r100_ib_init(struct radeon_device *rdev);
106void r100_irq_disable(struct radeon_device *rdev);
107void r100_mc_stop(struct radeon_device *rdev, struct r100_mc_save *save);
108void r100_mc_resume(struct radeon_device *rdev, struct r100_mc_save *save);
109void r100_vram_init_sizes(struct radeon_device *rdev);
110void r100_wb_disable(struct radeon_device *rdev);
111void r100_wb_fini(struct radeon_device *rdev);
112int r100_wb_init(struct radeon_device *rdev);
113void r100_hdp_reset(struct radeon_device *rdev);
114int r100_rb2d_reset(struct radeon_device *rdev);
115int r100_cp_reset(struct radeon_device *rdev);
116void r100_vga_render_disable(struct radeon_device *rdev);
117int r100_cs_track_check_pkt3_indx_buffer(struct radeon_cs_parser *p,
118 struct radeon_cs_packet *pkt,
119 struct radeon_bo *robj);
120int r100_cs_parse_packet0(struct radeon_cs_parser *p,
121 struct radeon_cs_packet *pkt,
122 const unsigned *auth, unsigned n,
123 radeon_packet0_check_t check);
124int r100_cs_packet_parse(struct radeon_cs_parser *p,
125 struct radeon_cs_packet *pkt,
126 unsigned idx);
127void r100_enable_bm(struct radeon_device *rdev);
128void r100_set_common_regs(struct radeon_device *rdev);
79 129
80static struct radeon_asic r100_asic = { 130/*
81 .init = &r100_init, 131 * r200,rv250,rs300,rv280
82 .fini = &r100_fini, 132 */
83 .suspend = &r100_suspend, 133extern int r200_copy_dma(struct radeon_device *rdev,
84 .resume = &r100_resume, 134 uint64_t src_offset,
85 .vga_set_state = &r100_vga_set_state, 135 uint64_t dst_offset,
86 .gpu_reset = &r100_gpu_reset, 136 unsigned num_pages,
87 .gart_tlb_flush = &r100_pci_gart_tlb_flush, 137 struct radeon_fence *fence);
88 .gart_set_page = &r100_pci_gart_set_page,
89 .cp_commit = &r100_cp_commit,
90 .ring_start = &r100_ring_start,
91 .ring_test = &r100_ring_test,
92 .ring_ib_execute = &r100_ring_ib_execute,
93 .irq_set = &r100_irq_set,
94 .irq_process = &r100_irq_process,
95 .get_vblank_counter = &r100_get_vblank_counter,
96 .fence_ring_emit = &r100_fence_ring_emit,
97 .cs_parse = &r100_cs_parse,
98 .copy_blit = &r100_copy_blit,
99 .copy_dma = NULL,
100 .copy = &r100_copy_blit,
101 .get_engine_clock = &radeon_legacy_get_engine_clock,
102 .set_engine_clock = &radeon_legacy_set_engine_clock,
103 .get_memory_clock = NULL,
104 .set_memory_clock = NULL,
105 .set_pcie_lanes = NULL,
106 .set_clock_gating = &radeon_legacy_set_clock_gating,
107 .set_surface_reg = r100_set_surface_reg,
108 .clear_surface_reg = r100_clear_surface_reg,
109 .bandwidth_update = &r100_bandwidth_update,
110};
111
112 138
113/* 139/*
114 * r300,r350,rv350,rv380 140 * r300,r350,rv350,rv380
@@ -127,42 +153,7 @@ extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t
127extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg); 153extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
128extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 154extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
129extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes); 155extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
130extern int r300_copy_dma(struct radeon_device *rdev, 156extern int rv370_get_pcie_lanes(struct radeon_device *rdev);
131 uint64_t src_offset,
132 uint64_t dst_offset,
133 unsigned num_pages,
134 struct radeon_fence *fence);
135static struct radeon_asic r300_asic = {
136 .init = &r300_init,
137 .fini = &r300_fini,
138 .suspend = &r300_suspend,
139 .resume = &r300_resume,
140 .vga_set_state = &r100_vga_set_state,
141 .gpu_reset = &r300_gpu_reset,
142 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
143 .gart_set_page = &r100_pci_gart_set_page,
144 .cp_commit = &r100_cp_commit,
145 .ring_start = &r300_ring_start,
146 .ring_test = &r100_ring_test,
147 .ring_ib_execute = &r100_ring_ib_execute,
148 .irq_set = &r100_irq_set,
149 .irq_process = &r100_irq_process,
150 .get_vblank_counter = &r100_get_vblank_counter,
151 .fence_ring_emit = &r300_fence_ring_emit,
152 .cs_parse = &r300_cs_parse,
153 .copy_blit = &r100_copy_blit,
154 .copy_dma = &r300_copy_dma,
155 .copy = &r100_copy_blit,
156 .get_engine_clock = &radeon_legacy_get_engine_clock,
157 .set_engine_clock = &radeon_legacy_set_engine_clock,
158 .get_memory_clock = NULL,
159 .set_memory_clock = NULL,
160 .set_pcie_lanes = &rv370_set_pcie_lanes,
161 .set_clock_gating = &radeon_legacy_set_clock_gating,
162 .set_surface_reg = r100_set_surface_reg,
163 .clear_surface_reg = r100_clear_surface_reg,
164 .bandwidth_update = &r100_bandwidth_update,
165};
166 157
167/* 158/*
168 * r420,r423,rv410 159 * r420,r423,rv410
@@ -171,38 +162,6 @@ extern int r420_init(struct radeon_device *rdev);
171extern void r420_fini(struct radeon_device *rdev); 162extern void r420_fini(struct radeon_device *rdev);
172extern int r420_suspend(struct radeon_device *rdev); 163extern int r420_suspend(struct radeon_device *rdev);
173extern int r420_resume(struct radeon_device *rdev); 164extern int r420_resume(struct radeon_device *rdev);
174static struct radeon_asic r420_asic = {
175 .init = &r420_init,
176 .fini = &r420_fini,
177 .suspend = &r420_suspend,
178 .resume = &r420_resume,
179 .vga_set_state = &r100_vga_set_state,
180 .gpu_reset = &r300_gpu_reset,
181 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
182 .gart_set_page = &rv370_pcie_gart_set_page,
183 .cp_commit = &r100_cp_commit,
184 .ring_start = &r300_ring_start,
185 .ring_test = &r100_ring_test,
186 .ring_ib_execute = &r100_ring_ib_execute,
187 .irq_set = &r100_irq_set,
188 .irq_process = &r100_irq_process,
189 .get_vblank_counter = &r100_get_vblank_counter,
190 .fence_ring_emit = &r300_fence_ring_emit,
191 .cs_parse = &r300_cs_parse,
192 .copy_blit = &r100_copy_blit,
193 .copy_dma = &r300_copy_dma,
194 .copy = &r100_copy_blit,
195 .get_engine_clock = &radeon_atom_get_engine_clock,
196 .set_engine_clock = &radeon_atom_set_engine_clock,
197 .get_memory_clock = &radeon_atom_get_memory_clock,
198 .set_memory_clock = &radeon_atom_set_memory_clock,
199 .set_pcie_lanes = &rv370_set_pcie_lanes,
200 .set_clock_gating = &radeon_atom_set_clock_gating,
201 .set_surface_reg = r100_set_surface_reg,
202 .clear_surface_reg = r100_clear_surface_reg,
203 .bandwidth_update = &r100_bandwidth_update,
204};
205
206 165
207/* 166/*
208 * rs400,rs480 167 * rs400,rs480
@@ -215,38 +174,6 @@ void rs400_gart_tlb_flush(struct radeon_device *rdev);
215int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); 174int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
216uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg); 175uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
217void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 176void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
218static struct radeon_asic rs400_asic = {
219 .init = &rs400_init,
220 .fini = &rs400_fini,
221 .suspend = &rs400_suspend,
222 .resume = &rs400_resume,
223 .vga_set_state = &r100_vga_set_state,
224 .gpu_reset = &r300_gpu_reset,
225 .gart_tlb_flush = &rs400_gart_tlb_flush,
226 .gart_set_page = &rs400_gart_set_page,
227 .cp_commit = &r100_cp_commit,
228 .ring_start = &r300_ring_start,
229 .ring_test = &r100_ring_test,
230 .ring_ib_execute = &r100_ring_ib_execute,
231 .irq_set = &r100_irq_set,
232 .irq_process = &r100_irq_process,
233 .get_vblank_counter = &r100_get_vblank_counter,
234 .fence_ring_emit = &r300_fence_ring_emit,
235 .cs_parse = &r300_cs_parse,
236 .copy_blit = &r100_copy_blit,
237 .copy_dma = &r300_copy_dma,
238 .copy = &r100_copy_blit,
239 .get_engine_clock = &radeon_legacy_get_engine_clock,
240 .set_engine_clock = &radeon_legacy_set_engine_clock,
241 .get_memory_clock = NULL,
242 .set_memory_clock = NULL,
243 .set_pcie_lanes = NULL,
244 .set_clock_gating = &radeon_legacy_set_clock_gating,
245 .set_surface_reg = r100_set_surface_reg,
246 .clear_surface_reg = r100_clear_surface_reg,
247 .bandwidth_update = &r100_bandwidth_update,
248};
249
250 177
251/* 178/*
252 * rs600. 179 * rs600.
@@ -263,36 +190,11 @@ int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
263uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg); 190uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
264void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 191void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
265void rs600_bandwidth_update(struct radeon_device *rdev); 192void rs600_bandwidth_update(struct radeon_device *rdev);
266static struct radeon_asic rs600_asic = { 193void rs600_hpd_init(struct radeon_device *rdev);
267 .init = &rs600_init, 194void rs600_hpd_fini(struct radeon_device *rdev);
268 .fini = &rs600_fini, 195bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
269 .suspend = &rs600_suspend, 196void rs600_hpd_set_polarity(struct radeon_device *rdev,
270 .resume = &rs600_resume, 197 enum radeon_hpd_id hpd);
271 .vga_set_state = &r100_vga_set_state,
272 .gpu_reset = &r300_gpu_reset,
273 .gart_tlb_flush = &rs600_gart_tlb_flush,
274 .gart_set_page = &rs600_gart_set_page,
275 .cp_commit = &r100_cp_commit,
276 .ring_start = &r300_ring_start,
277 .ring_test = &r100_ring_test,
278 .ring_ib_execute = &r100_ring_ib_execute,
279 .irq_set = &rs600_irq_set,
280 .irq_process = &rs600_irq_process,
281 .get_vblank_counter = &rs600_get_vblank_counter,
282 .fence_ring_emit = &r300_fence_ring_emit,
283 .cs_parse = &r300_cs_parse,
284 .copy_blit = &r100_copy_blit,
285 .copy_dma = &r300_copy_dma,
286 .copy = &r100_copy_blit,
287 .get_engine_clock = &radeon_atom_get_engine_clock,
288 .set_engine_clock = &radeon_atom_set_engine_clock,
289 .get_memory_clock = &radeon_atom_get_memory_clock,
290 .set_memory_clock = &radeon_atom_set_memory_clock,
291 .set_pcie_lanes = NULL,
292 .set_clock_gating = &radeon_atom_set_clock_gating,
293 .bandwidth_update = &rs600_bandwidth_update,
294};
295
296 198
297/* 199/*
298 * rs690,rs740 200 * rs690,rs740
@@ -304,38 +206,6 @@ int rs690_suspend(struct radeon_device *rdev);
304uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg); 206uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
305void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 207void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
306void rs690_bandwidth_update(struct radeon_device *rdev); 208void rs690_bandwidth_update(struct radeon_device *rdev);
307static struct radeon_asic rs690_asic = {
308 .init = &rs690_init,
309 .fini = &rs690_fini,
310 .suspend = &rs690_suspend,
311 .resume = &rs690_resume,
312 .vga_set_state = &r100_vga_set_state,
313 .gpu_reset = &r300_gpu_reset,
314 .gart_tlb_flush = &rs400_gart_tlb_flush,
315 .gart_set_page = &rs400_gart_set_page,
316 .cp_commit = &r100_cp_commit,
317 .ring_start = &r300_ring_start,
318 .ring_test = &r100_ring_test,
319 .ring_ib_execute = &r100_ring_ib_execute,
320 .irq_set = &rs600_irq_set,
321 .irq_process = &rs600_irq_process,
322 .get_vblank_counter = &rs600_get_vblank_counter,
323 .fence_ring_emit = &r300_fence_ring_emit,
324 .cs_parse = &r300_cs_parse,
325 .copy_blit = &r100_copy_blit,
326 .copy_dma = &r300_copy_dma,
327 .copy = &r300_copy_dma,
328 .get_engine_clock = &radeon_atom_get_engine_clock,
329 .set_engine_clock = &radeon_atom_set_engine_clock,
330 .get_memory_clock = &radeon_atom_get_memory_clock,
331 .set_memory_clock = &radeon_atom_set_memory_clock,
332 .set_pcie_lanes = NULL,
333 .set_clock_gating = &radeon_atom_set_clock_gating,
334 .set_surface_reg = r100_set_surface_reg,
335 .clear_surface_reg = r100_clear_surface_reg,
336 .bandwidth_update = &rs690_bandwidth_update,
337};
338
339 209
340/* 210/*
341 * rv515 211 * rv515
@@ -351,75 +221,12 @@ void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
351void rv515_bandwidth_update(struct radeon_device *rdev); 221void rv515_bandwidth_update(struct radeon_device *rdev);
352int rv515_resume(struct radeon_device *rdev); 222int rv515_resume(struct radeon_device *rdev);
353int rv515_suspend(struct radeon_device *rdev); 223int rv515_suspend(struct radeon_device *rdev);
354static struct radeon_asic rv515_asic = {
355 .init = &rv515_init,
356 .fini = &rv515_fini,
357 .suspend = &rv515_suspend,
358 .resume = &rv515_resume,
359 .vga_set_state = &r100_vga_set_state,
360 .gpu_reset = &rv515_gpu_reset,
361 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
362 .gart_set_page = &rv370_pcie_gart_set_page,
363 .cp_commit = &r100_cp_commit,
364 .ring_start = &rv515_ring_start,
365 .ring_test = &r100_ring_test,
366 .ring_ib_execute = &r100_ring_ib_execute,
367 .irq_set = &rs600_irq_set,
368 .irq_process = &rs600_irq_process,
369 .get_vblank_counter = &rs600_get_vblank_counter,
370 .fence_ring_emit = &r300_fence_ring_emit,
371 .cs_parse = &r300_cs_parse,
372 .copy_blit = &r100_copy_blit,
373 .copy_dma = &r300_copy_dma,
374 .copy = &r100_copy_blit,
375 .get_engine_clock = &radeon_atom_get_engine_clock,
376 .set_engine_clock = &radeon_atom_set_engine_clock,
377 .get_memory_clock = &radeon_atom_get_memory_clock,
378 .set_memory_clock = &radeon_atom_set_memory_clock,
379 .set_pcie_lanes = &rv370_set_pcie_lanes,
380 .set_clock_gating = &radeon_atom_set_clock_gating,
381 .set_surface_reg = r100_set_surface_reg,
382 .clear_surface_reg = r100_clear_surface_reg,
383 .bandwidth_update = &rv515_bandwidth_update,
384};
385
386 224
387/* 225/*
388 * r520,rv530,rv560,rv570,r580 226 * r520,rv530,rv560,rv570,r580
389 */ 227 */
390int r520_init(struct radeon_device *rdev); 228int r520_init(struct radeon_device *rdev);
391int r520_resume(struct radeon_device *rdev); 229int r520_resume(struct radeon_device *rdev);
392static struct radeon_asic r520_asic = {
393 .init = &r520_init,
394 .fini = &rv515_fini,
395 .suspend = &rv515_suspend,
396 .resume = &r520_resume,
397 .vga_set_state = &r100_vga_set_state,
398 .gpu_reset = &rv515_gpu_reset,
399 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
400 .gart_set_page = &rv370_pcie_gart_set_page,
401 .cp_commit = &r100_cp_commit,
402 .ring_start = &rv515_ring_start,
403 .ring_test = &r100_ring_test,
404 .ring_ib_execute = &r100_ring_ib_execute,
405 .irq_set = &rs600_irq_set,
406 .irq_process = &rs600_irq_process,
407 .get_vblank_counter = &rs600_get_vblank_counter,
408 .fence_ring_emit = &r300_fence_ring_emit,
409 .cs_parse = &r300_cs_parse,
410 .copy_blit = &r100_copy_blit,
411 .copy_dma = &r300_copy_dma,
412 .copy = &r100_copy_blit,
413 .get_engine_clock = &radeon_atom_get_engine_clock,
414 .set_engine_clock = &radeon_atom_set_engine_clock,
415 .get_memory_clock = &radeon_atom_get_memory_clock,
416 .set_memory_clock = &radeon_atom_set_memory_clock,
417 .set_pcie_lanes = &rv370_set_pcie_lanes,
418 .set_clock_gating = &radeon_atom_set_clock_gating,
419 .set_surface_reg = r100_set_surface_reg,
420 .clear_surface_reg = r100_clear_surface_reg,
421 .bandwidth_update = &rv515_bandwidth_update,
422};
423 230
424/* 231/*
425 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880 232 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
@@ -449,42 +256,18 @@ int r600_gpu_reset(struct radeon_device *rdev);
449int r600_set_surface_reg(struct radeon_device *rdev, int reg, 256int r600_set_surface_reg(struct radeon_device *rdev, int reg,
450 uint32_t tiling_flags, uint32_t pitch, 257 uint32_t tiling_flags, uint32_t pitch,
451 uint32_t offset, uint32_t obj_size); 258 uint32_t offset, uint32_t obj_size);
452int r600_clear_surface_reg(struct radeon_device *rdev, int reg); 259void r600_clear_surface_reg(struct radeon_device *rdev, int reg);
453void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); 260void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
454int r600_ring_test(struct radeon_device *rdev); 261int r600_ring_test(struct radeon_device *rdev);
455int r600_copy_blit(struct radeon_device *rdev, 262int r600_copy_blit(struct radeon_device *rdev,
456 uint64_t src_offset, uint64_t dst_offset, 263 uint64_t src_offset, uint64_t dst_offset,
457 unsigned num_pages, struct radeon_fence *fence); 264 unsigned num_pages, struct radeon_fence *fence);
458 265void r600_hpd_init(struct radeon_device *rdev);
459static struct radeon_asic r600_asic = { 266void r600_hpd_fini(struct radeon_device *rdev);
460 .init = &r600_init, 267bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
461 .fini = &r600_fini, 268void r600_hpd_set_polarity(struct radeon_device *rdev,
462 .suspend = &r600_suspend, 269 enum radeon_hpd_id hpd);
463 .resume = &r600_resume, 270extern void r600_ioctl_wait_idle(struct radeon_device *rdev, struct radeon_bo *bo);
464 .cp_commit = &r600_cp_commit,
465 .vga_set_state = &r600_vga_set_state,
466 .gpu_reset = &r600_gpu_reset,
467 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
468 .gart_set_page = &rs600_gart_set_page,
469 .ring_test = &r600_ring_test,
470 .ring_ib_execute = &r600_ring_ib_execute,
471 .irq_set = &r600_irq_set,
472 .irq_process = &r600_irq_process,
473 .fence_ring_emit = &r600_fence_ring_emit,
474 .cs_parse = &r600_cs_parse,
475 .copy_blit = &r600_copy_blit,
476 .copy_dma = &r600_copy_blit,
477 .copy = &r600_copy_blit,
478 .get_engine_clock = &radeon_atom_get_engine_clock,
479 .set_engine_clock = &radeon_atom_set_engine_clock,
480 .get_memory_clock = &radeon_atom_get_memory_clock,
481 .set_memory_clock = &radeon_atom_set_memory_clock,
482 .set_pcie_lanes = NULL,
483 .set_clock_gating = &radeon_atom_set_clock_gating,
484 .set_surface_reg = r600_set_surface_reg,
485 .clear_surface_reg = r600_clear_surface_reg,
486 .bandwidth_update = &rv515_bandwidth_update,
487};
488 271
489/* 272/*
490 * rv770,rv730,rv710,rv740 273 * rv770,rv730,rv710,rv740
@@ -495,34 +278,18 @@ int rv770_suspend(struct radeon_device *rdev);
495int rv770_resume(struct radeon_device *rdev); 278int rv770_resume(struct radeon_device *rdev);
496int rv770_gpu_reset(struct radeon_device *rdev); 279int rv770_gpu_reset(struct radeon_device *rdev);
497 280
498static struct radeon_asic rv770_asic = { 281/*
499 .init = &rv770_init, 282 * evergreen
500 .fini = &rv770_fini, 283 */
501 .suspend = &rv770_suspend, 284int evergreen_init(struct radeon_device *rdev);
502 .resume = &rv770_resume, 285void evergreen_fini(struct radeon_device *rdev);
503 .cp_commit = &r600_cp_commit, 286int evergreen_suspend(struct radeon_device *rdev);
504 .gpu_reset = &rv770_gpu_reset, 287int evergreen_resume(struct radeon_device *rdev);
505 .vga_set_state = &r600_vga_set_state, 288int evergreen_gpu_reset(struct radeon_device *rdev);
506 .gart_tlb_flush = &r600_pcie_gart_tlb_flush, 289void evergreen_bandwidth_update(struct radeon_device *rdev);
507 .gart_set_page = &rs600_gart_set_page, 290void evergreen_hpd_init(struct radeon_device *rdev);
508 .ring_test = &r600_ring_test, 291void evergreen_hpd_fini(struct radeon_device *rdev);
509 .ring_ib_execute = &r600_ring_ib_execute, 292bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
510 .irq_set = &r600_irq_set, 293void evergreen_hpd_set_polarity(struct radeon_device *rdev,
511 .irq_process = &r600_irq_process, 294 enum radeon_hpd_id hpd);
512 .fence_ring_emit = &r600_fence_ring_emit,
513 .cs_parse = &r600_cs_parse,
514 .copy_blit = &r600_copy_blit,
515 .copy_dma = &r600_copy_blit,
516 .copy = &r600_copy_blit,
517 .get_engine_clock = &radeon_atom_get_engine_clock,
518 .set_engine_clock = &radeon_atom_set_engine_clock,
519 .get_memory_clock = &radeon_atom_get_memory_clock,
520 .set_memory_clock = &radeon_atom_set_memory_clock,
521 .set_pcie_lanes = NULL,
522 .set_clock_gating = &radeon_atom_set_clock_gating,
523 .set_surface_reg = r600_set_surface_reg,
524 .clear_surface_reg = r600_clear_surface_reg,
525 .bandwidth_update = &rv515_bandwidth_update,
526};
527
528#endif 295#endif