diff options
Diffstat (limited to 'drivers/gpu/drm/radeon/evergreen.c')
-rw-r--r-- | drivers/gpu/drm/radeon/evergreen.c | 767 |
1 files changed, 767 insertions, 0 deletions
diff --git a/drivers/gpu/drm/radeon/evergreen.c b/drivers/gpu/drm/radeon/evergreen.c new file mode 100644 index 000000000000..bd2e7aa85c1d --- /dev/null +++ b/drivers/gpu/drm/radeon/evergreen.c | |||
@@ -0,0 +1,767 @@ | |||
1 | /* | ||
2 | * Copyright 2010 Advanced Micro Devices, Inc. | ||
3 | * | ||
4 | * Permission is hereby granted, free of charge, to any person obtaining a | ||
5 | * copy of this software and associated documentation files (the "Software"), | ||
6 | * to deal in the Software without restriction, including without limitation | ||
7 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, | ||
8 | * and/or sell copies of the Software, and to permit persons to whom the | ||
9 | * Software is furnished to do so, subject to the following conditions: | ||
10 | * | ||
11 | * The above copyright notice and this permission notice shall be included in | ||
12 | * all copies or substantial portions of the Software. | ||
13 | * | ||
14 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||
15 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||
16 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL | ||
17 | * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR | ||
18 | * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, | ||
19 | * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR | ||
20 | * OTHER DEALINGS IN THE SOFTWARE. | ||
21 | * | ||
22 | * Authors: Alex Deucher | ||
23 | */ | ||
24 | #include <linux/firmware.h> | ||
25 | #include <linux/platform_device.h> | ||
26 | #include "drmP.h" | ||
27 | #include "radeon.h" | ||
28 | #include "radeon_drm.h" | ||
29 | #include "rv770d.h" | ||
30 | #include "atom.h" | ||
31 | #include "avivod.h" | ||
32 | #include "evergreen_reg.h" | ||
33 | |||
34 | static void evergreen_gpu_init(struct radeon_device *rdev); | ||
35 | void evergreen_fini(struct radeon_device *rdev); | ||
36 | |||
37 | bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd) | ||
38 | { | ||
39 | bool connected = false; | ||
40 | /* XXX */ | ||
41 | return connected; | ||
42 | } | ||
43 | |||
44 | void evergreen_hpd_set_polarity(struct radeon_device *rdev, | ||
45 | enum radeon_hpd_id hpd) | ||
46 | { | ||
47 | /* XXX */ | ||
48 | } | ||
49 | |||
50 | void evergreen_hpd_init(struct radeon_device *rdev) | ||
51 | { | ||
52 | /* XXX */ | ||
53 | } | ||
54 | |||
55 | |||
56 | void evergreen_bandwidth_update(struct radeon_device *rdev) | ||
57 | { | ||
58 | /* XXX */ | ||
59 | } | ||
60 | |||
61 | void evergreen_hpd_fini(struct radeon_device *rdev) | ||
62 | { | ||
63 | /* XXX */ | ||
64 | } | ||
65 | |||
66 | static int evergreen_mc_wait_for_idle(struct radeon_device *rdev) | ||
67 | { | ||
68 | unsigned i; | ||
69 | u32 tmp; | ||
70 | |||
71 | for (i = 0; i < rdev->usec_timeout; i++) { | ||
72 | /* read MC_STATUS */ | ||
73 | tmp = RREG32(SRBM_STATUS) & 0x1F00; | ||
74 | if (!tmp) | ||
75 | return 0; | ||
76 | udelay(1); | ||
77 | } | ||
78 | return -1; | ||
79 | } | ||
80 | |||
81 | /* | ||
82 | * GART | ||
83 | */ | ||
84 | int evergreen_pcie_gart_enable(struct radeon_device *rdev) | ||
85 | { | ||
86 | u32 tmp; | ||
87 | int r, i; | ||
88 | |||
89 | if (rdev->gart.table.vram.robj == NULL) { | ||
90 | dev_err(rdev->dev, "No VRAM object for PCIE GART.\n"); | ||
91 | return -EINVAL; | ||
92 | } | ||
93 | r = radeon_gart_table_vram_pin(rdev); | ||
94 | if (r) | ||
95 | return r; | ||
96 | radeon_gart_restore(rdev); | ||
97 | /* Setup L2 cache */ | ||
98 | WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING | | ||
99 | ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE | | ||
100 | EFFECTIVE_L2_QUEUE_SIZE(7)); | ||
101 | WREG32(VM_L2_CNTL2, 0); | ||
102 | WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2)); | ||
103 | /* Setup TLB control */ | ||
104 | tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING | | ||
105 | SYSTEM_ACCESS_MODE_NOT_IN_SYS | | ||
106 | SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU | | ||
107 | EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5); | ||
108 | WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp); | ||
109 | WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp); | ||
110 | WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp); | ||
111 | WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp); | ||
112 | WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp); | ||
113 | WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp); | ||
114 | WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp); | ||
115 | WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12); | ||
116 | WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12); | ||
117 | WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); | ||
118 | WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) | | ||
119 | RANGE_PROTECTION_FAULT_ENABLE_DEFAULT); | ||
120 | WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR, | ||
121 | (u32)(rdev->dummy_page.addr >> 12)); | ||
122 | for (i = 1; i < 7; i++) | ||
123 | WREG32(VM_CONTEXT0_CNTL + (i * 4), 0); | ||
124 | |||
125 | r600_pcie_gart_tlb_flush(rdev); | ||
126 | rdev->gart.ready = true; | ||
127 | return 0; | ||
128 | } | ||
129 | |||
130 | void evergreen_pcie_gart_disable(struct radeon_device *rdev) | ||
131 | { | ||
132 | u32 tmp; | ||
133 | int i, r; | ||
134 | |||
135 | /* Disable all tables */ | ||
136 | for (i = 0; i < 7; i++) | ||
137 | WREG32(VM_CONTEXT0_CNTL + (i * 4), 0); | ||
138 | |||
139 | /* Setup L2 cache */ | ||
140 | WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING | | ||
141 | EFFECTIVE_L2_QUEUE_SIZE(7)); | ||
142 | WREG32(VM_L2_CNTL2, 0); | ||
143 | WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2)); | ||
144 | /* Setup TLB control */ | ||
145 | tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5); | ||
146 | WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp); | ||
147 | WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp); | ||
148 | WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp); | ||
149 | WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp); | ||
150 | WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp); | ||
151 | WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp); | ||
152 | WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp); | ||
153 | if (rdev->gart.table.vram.robj) { | ||
154 | r = radeon_bo_reserve(rdev->gart.table.vram.robj, false); | ||
155 | if (likely(r == 0)) { | ||
156 | radeon_bo_kunmap(rdev->gart.table.vram.robj); | ||
157 | radeon_bo_unpin(rdev->gart.table.vram.robj); | ||
158 | radeon_bo_unreserve(rdev->gart.table.vram.robj); | ||
159 | } | ||
160 | } | ||
161 | } | ||
162 | |||
163 | void evergreen_pcie_gart_fini(struct radeon_device *rdev) | ||
164 | { | ||
165 | evergreen_pcie_gart_disable(rdev); | ||
166 | radeon_gart_table_vram_free(rdev); | ||
167 | radeon_gart_fini(rdev); | ||
168 | } | ||
169 | |||
170 | |||
171 | void evergreen_agp_enable(struct radeon_device *rdev) | ||
172 | { | ||
173 | u32 tmp; | ||
174 | int i; | ||
175 | |||
176 | /* Setup L2 cache */ | ||
177 | WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING | | ||
178 | ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE | | ||
179 | EFFECTIVE_L2_QUEUE_SIZE(7)); | ||
180 | WREG32(VM_L2_CNTL2, 0); | ||
181 | WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2)); | ||
182 | /* Setup TLB control */ | ||
183 | tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING | | ||
184 | SYSTEM_ACCESS_MODE_NOT_IN_SYS | | ||
185 | SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU | | ||
186 | EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5); | ||
187 | WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp); | ||
188 | WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp); | ||
189 | WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp); | ||
190 | WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp); | ||
191 | WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp); | ||
192 | WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp); | ||
193 | WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp); | ||
194 | for (i = 0; i < 7; i++) | ||
195 | WREG32(VM_CONTEXT0_CNTL + (i * 4), 0); | ||
196 | } | ||
197 | |||
198 | static void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save) | ||
199 | { | ||
200 | save->vga_control[0] = RREG32(D1VGA_CONTROL); | ||
201 | save->vga_control[1] = RREG32(D2VGA_CONTROL); | ||
202 | save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL); | ||
203 | save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL); | ||
204 | save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL); | ||
205 | save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL); | ||
206 | save->vga_render_control = RREG32(VGA_RENDER_CONTROL); | ||
207 | save->vga_hdp_control = RREG32(VGA_HDP_CONTROL); | ||
208 | save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET); | ||
209 | save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET); | ||
210 | save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET); | ||
211 | save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET); | ||
212 | save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET); | ||
213 | save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET); | ||
214 | |||
215 | /* Stop all video */ | ||
216 | WREG32(VGA_RENDER_CONTROL, 0); | ||
217 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1); | ||
218 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1); | ||
219 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1); | ||
220 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1); | ||
221 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1); | ||
222 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1); | ||
223 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0); | ||
224 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0); | ||
225 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0); | ||
226 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0); | ||
227 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0); | ||
228 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0); | ||
229 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0); | ||
230 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0); | ||
231 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0); | ||
232 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0); | ||
233 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0); | ||
234 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0); | ||
235 | |||
236 | WREG32(D1VGA_CONTROL, 0); | ||
237 | WREG32(D2VGA_CONTROL, 0); | ||
238 | WREG32(EVERGREEN_D3VGA_CONTROL, 0); | ||
239 | WREG32(EVERGREEN_D4VGA_CONTROL, 0); | ||
240 | WREG32(EVERGREEN_D5VGA_CONTROL, 0); | ||
241 | WREG32(EVERGREEN_D6VGA_CONTROL, 0); | ||
242 | } | ||
243 | |||
244 | static void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save) | ||
245 | { | ||
246 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET, | ||
247 | upper_32_bits(rdev->mc.vram_start)); | ||
248 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET, | ||
249 | upper_32_bits(rdev->mc.vram_start)); | ||
250 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET, | ||
251 | (u32)rdev->mc.vram_start); | ||
252 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET, | ||
253 | (u32)rdev->mc.vram_start); | ||
254 | |||
255 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET, | ||
256 | upper_32_bits(rdev->mc.vram_start)); | ||
257 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET, | ||
258 | upper_32_bits(rdev->mc.vram_start)); | ||
259 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET, | ||
260 | (u32)rdev->mc.vram_start); | ||
261 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET, | ||
262 | (u32)rdev->mc.vram_start); | ||
263 | |||
264 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET, | ||
265 | upper_32_bits(rdev->mc.vram_start)); | ||
266 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET, | ||
267 | upper_32_bits(rdev->mc.vram_start)); | ||
268 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET, | ||
269 | (u32)rdev->mc.vram_start); | ||
270 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET, | ||
271 | (u32)rdev->mc.vram_start); | ||
272 | |||
273 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET, | ||
274 | upper_32_bits(rdev->mc.vram_start)); | ||
275 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET, | ||
276 | upper_32_bits(rdev->mc.vram_start)); | ||
277 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET, | ||
278 | (u32)rdev->mc.vram_start); | ||
279 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET, | ||
280 | (u32)rdev->mc.vram_start); | ||
281 | |||
282 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET, | ||
283 | upper_32_bits(rdev->mc.vram_start)); | ||
284 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET, | ||
285 | upper_32_bits(rdev->mc.vram_start)); | ||
286 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET, | ||
287 | (u32)rdev->mc.vram_start); | ||
288 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET, | ||
289 | (u32)rdev->mc.vram_start); | ||
290 | |||
291 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET, | ||
292 | upper_32_bits(rdev->mc.vram_start)); | ||
293 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET, | ||
294 | upper_32_bits(rdev->mc.vram_start)); | ||
295 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET, | ||
296 | (u32)rdev->mc.vram_start); | ||
297 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET, | ||
298 | (u32)rdev->mc.vram_start); | ||
299 | |||
300 | WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start)); | ||
301 | WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start); | ||
302 | /* Unlock host access */ | ||
303 | WREG32(VGA_HDP_CONTROL, save->vga_hdp_control); | ||
304 | mdelay(1); | ||
305 | /* Restore video state */ | ||
306 | WREG32(D1VGA_CONTROL, save->vga_control[0]); | ||
307 | WREG32(D2VGA_CONTROL, save->vga_control[1]); | ||
308 | WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]); | ||
309 | WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]); | ||
310 | WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]); | ||
311 | WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]); | ||
312 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1); | ||
313 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1); | ||
314 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1); | ||
315 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1); | ||
316 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1); | ||
317 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1); | ||
318 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]); | ||
319 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]); | ||
320 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]); | ||
321 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]); | ||
322 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]); | ||
323 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]); | ||
324 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0); | ||
325 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0); | ||
326 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0); | ||
327 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0); | ||
328 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0); | ||
329 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0); | ||
330 | WREG32(VGA_RENDER_CONTROL, save->vga_render_control); | ||
331 | } | ||
332 | |||
333 | static void evergreen_mc_program(struct radeon_device *rdev) | ||
334 | { | ||
335 | struct evergreen_mc_save save; | ||
336 | u32 tmp; | ||
337 | int i, j; | ||
338 | |||
339 | /* Initialize HDP */ | ||
340 | for (i = 0, j = 0; i < 32; i++, j += 0x18) { | ||
341 | WREG32((0x2c14 + j), 0x00000000); | ||
342 | WREG32((0x2c18 + j), 0x00000000); | ||
343 | WREG32((0x2c1c + j), 0x00000000); | ||
344 | WREG32((0x2c20 + j), 0x00000000); | ||
345 | WREG32((0x2c24 + j), 0x00000000); | ||
346 | } | ||
347 | WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0); | ||
348 | |||
349 | evergreen_mc_stop(rdev, &save); | ||
350 | if (evergreen_mc_wait_for_idle(rdev)) { | ||
351 | dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); | ||
352 | } | ||
353 | /* Lockout access through VGA aperture*/ | ||
354 | WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE); | ||
355 | /* Update configuration */ | ||
356 | if (rdev->flags & RADEON_IS_AGP) { | ||
357 | if (rdev->mc.vram_start < rdev->mc.gtt_start) { | ||
358 | /* VRAM before AGP */ | ||
359 | WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR, | ||
360 | rdev->mc.vram_start >> 12); | ||
361 | WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR, | ||
362 | rdev->mc.gtt_end >> 12); | ||
363 | } else { | ||
364 | /* VRAM after AGP */ | ||
365 | WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR, | ||
366 | rdev->mc.gtt_start >> 12); | ||
367 | WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR, | ||
368 | rdev->mc.vram_end >> 12); | ||
369 | } | ||
370 | } else { | ||
371 | WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR, | ||
372 | rdev->mc.vram_start >> 12); | ||
373 | WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR, | ||
374 | rdev->mc.vram_end >> 12); | ||
375 | } | ||
376 | WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0); | ||
377 | tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16; | ||
378 | tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF); | ||
379 | WREG32(MC_VM_FB_LOCATION, tmp); | ||
380 | WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8)); | ||
381 | WREG32(HDP_NONSURFACE_INFO, (2 << 7)); | ||
382 | WREG32(HDP_NONSURFACE_SIZE, (rdev->mc.mc_vram_size - 1) | 0x3FF); | ||
383 | if (rdev->flags & RADEON_IS_AGP) { | ||
384 | WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16); | ||
385 | WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16); | ||
386 | WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22); | ||
387 | } else { | ||
388 | WREG32(MC_VM_AGP_BASE, 0); | ||
389 | WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF); | ||
390 | WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF); | ||
391 | } | ||
392 | if (evergreen_mc_wait_for_idle(rdev)) { | ||
393 | dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); | ||
394 | } | ||
395 | evergreen_mc_resume(rdev, &save); | ||
396 | /* we need to own VRAM, so turn off the VGA renderer here | ||
397 | * to stop it overwriting our objects */ | ||
398 | rv515_vga_render_disable(rdev); | ||
399 | } | ||
400 | |||
401 | #if 0 | ||
402 | /* | ||
403 | * CP. | ||
404 | */ | ||
405 | static void evergreen_cp_stop(struct radeon_device *rdev) | ||
406 | { | ||
407 | /* XXX */ | ||
408 | } | ||
409 | |||
410 | |||
411 | static int evergreen_cp_load_microcode(struct radeon_device *rdev) | ||
412 | { | ||
413 | /* XXX */ | ||
414 | |||
415 | return 0; | ||
416 | } | ||
417 | |||
418 | |||
419 | /* | ||
420 | * Core functions | ||
421 | */ | ||
422 | static u32 evergreen_get_tile_pipe_to_backend_map(u32 num_tile_pipes, | ||
423 | u32 num_backends, | ||
424 | u32 backend_disable_mask) | ||
425 | { | ||
426 | u32 backend_map = 0; | ||
427 | |||
428 | return backend_map; | ||
429 | } | ||
430 | #endif | ||
431 | |||
432 | static void evergreen_gpu_init(struct radeon_device *rdev) | ||
433 | { | ||
434 | /* XXX */ | ||
435 | } | ||
436 | |||
437 | int evergreen_mc_init(struct radeon_device *rdev) | ||
438 | { | ||
439 | fixed20_12 a; | ||
440 | u32 tmp; | ||
441 | int chansize, numchan; | ||
442 | |||
443 | /* Get VRAM informations */ | ||
444 | rdev->mc.vram_is_ddr = true; | ||
445 | tmp = RREG32(MC_ARB_RAMCFG); | ||
446 | if (tmp & CHANSIZE_OVERRIDE) { | ||
447 | chansize = 16; | ||
448 | } else if (tmp & CHANSIZE_MASK) { | ||
449 | chansize = 64; | ||
450 | } else { | ||
451 | chansize = 32; | ||
452 | } | ||
453 | tmp = RREG32(MC_SHARED_CHMAP); | ||
454 | switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) { | ||
455 | case 0: | ||
456 | default: | ||
457 | numchan = 1; | ||
458 | break; | ||
459 | case 1: | ||
460 | numchan = 2; | ||
461 | break; | ||
462 | case 2: | ||
463 | numchan = 4; | ||
464 | break; | ||
465 | case 3: | ||
466 | numchan = 8; | ||
467 | break; | ||
468 | } | ||
469 | rdev->mc.vram_width = numchan * chansize; | ||
470 | /* Could aper size report 0 ? */ | ||
471 | rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0); | ||
472 | rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0); | ||
473 | /* Setup GPU memory space */ | ||
474 | /* size in MB on evergreen */ | ||
475 | rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024; | ||
476 | rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024; | ||
477 | rdev->mc.visible_vram_size = rdev->mc.aper_size; | ||
478 | /* FIXME remove this once we support unmappable VRAM */ | ||
479 | if (rdev->mc.mc_vram_size > rdev->mc.aper_size) { | ||
480 | rdev->mc.mc_vram_size = rdev->mc.aper_size; | ||
481 | rdev->mc.real_vram_size = rdev->mc.aper_size; | ||
482 | } | ||
483 | r600_vram_gtt_location(rdev, &rdev->mc); | ||
484 | /* FIXME: we should enforce default clock in case GPU is not in | ||
485 | * default setup | ||
486 | */ | ||
487 | a.full = rfixed_const(100); | ||
488 | rdev->pm.sclk.full = rfixed_const(rdev->clock.default_sclk); | ||
489 | rdev->pm.sclk.full = rfixed_div(rdev->pm.sclk, a); | ||
490 | return 0; | ||
491 | } | ||
492 | |||
493 | int evergreen_gpu_reset(struct radeon_device *rdev) | ||
494 | { | ||
495 | /* FIXME: implement for evergreen */ | ||
496 | return 0; | ||
497 | } | ||
498 | |||
499 | static int evergreen_startup(struct radeon_device *rdev) | ||
500 | { | ||
501 | #if 0 | ||
502 | int r; | ||
503 | |||
504 | if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) { | ||
505 | r = r600_init_microcode(rdev); | ||
506 | if (r) { | ||
507 | DRM_ERROR("Failed to load firmware!\n"); | ||
508 | return r; | ||
509 | } | ||
510 | } | ||
511 | #endif | ||
512 | evergreen_mc_program(rdev); | ||
513 | #if 0 | ||
514 | if (rdev->flags & RADEON_IS_AGP) { | ||
515 | evergreem_agp_enable(rdev); | ||
516 | } else { | ||
517 | r = evergreen_pcie_gart_enable(rdev); | ||
518 | if (r) | ||
519 | return r; | ||
520 | } | ||
521 | #endif | ||
522 | evergreen_gpu_init(rdev); | ||
523 | #if 0 | ||
524 | if (!rdev->r600_blit.shader_obj) { | ||
525 | r = r600_blit_init(rdev); | ||
526 | if (r) { | ||
527 | DRM_ERROR("radeon: failed blitter (%d).\n", r); | ||
528 | return r; | ||
529 | } | ||
530 | } | ||
531 | |||
532 | r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false); | ||
533 | if (unlikely(r != 0)) | ||
534 | return r; | ||
535 | r = radeon_bo_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM, | ||
536 | &rdev->r600_blit.shader_gpu_addr); | ||
537 | radeon_bo_unreserve(rdev->r600_blit.shader_obj); | ||
538 | if (r) { | ||
539 | DRM_ERROR("failed to pin blit object %d\n", r); | ||
540 | return r; | ||
541 | } | ||
542 | |||
543 | /* Enable IRQ */ | ||
544 | r = r600_irq_init(rdev); | ||
545 | if (r) { | ||
546 | DRM_ERROR("radeon: IH init failed (%d).\n", r); | ||
547 | radeon_irq_kms_fini(rdev); | ||
548 | return r; | ||
549 | } | ||
550 | r600_irq_set(rdev); | ||
551 | |||
552 | r = radeon_ring_init(rdev, rdev->cp.ring_size); | ||
553 | if (r) | ||
554 | return r; | ||
555 | r = evergreen_cp_load_microcode(rdev); | ||
556 | if (r) | ||
557 | return r; | ||
558 | r = r600_cp_resume(rdev); | ||
559 | if (r) | ||
560 | return r; | ||
561 | /* write back buffer are not vital so don't worry about failure */ | ||
562 | r600_wb_enable(rdev); | ||
563 | #endif | ||
564 | return 0; | ||
565 | } | ||
566 | |||
567 | int evergreen_resume(struct radeon_device *rdev) | ||
568 | { | ||
569 | int r; | ||
570 | |||
571 | /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw, | ||
572 | * posting will perform necessary task to bring back GPU into good | ||
573 | * shape. | ||
574 | */ | ||
575 | /* post card */ | ||
576 | atom_asic_init(rdev->mode_info.atom_context); | ||
577 | /* Initialize clocks */ | ||
578 | r = radeon_clocks_init(rdev); | ||
579 | if (r) { | ||
580 | return r; | ||
581 | } | ||
582 | |||
583 | r = evergreen_startup(rdev); | ||
584 | if (r) { | ||
585 | DRM_ERROR("r600 startup failed on resume\n"); | ||
586 | return r; | ||
587 | } | ||
588 | #if 0 | ||
589 | r = r600_ib_test(rdev); | ||
590 | if (r) { | ||
591 | DRM_ERROR("radeon: failled testing IB (%d).\n", r); | ||
592 | return r; | ||
593 | } | ||
594 | #endif | ||
595 | return r; | ||
596 | |||
597 | } | ||
598 | |||
599 | int evergreen_suspend(struct radeon_device *rdev) | ||
600 | { | ||
601 | #if 0 | ||
602 | int r; | ||
603 | |||
604 | /* FIXME: we should wait for ring to be empty */ | ||
605 | r700_cp_stop(rdev); | ||
606 | rdev->cp.ready = false; | ||
607 | r600_wb_disable(rdev); | ||
608 | evergreen_pcie_gart_disable(rdev); | ||
609 | /* unpin shaders bo */ | ||
610 | r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false); | ||
611 | if (likely(r == 0)) { | ||
612 | radeon_bo_unpin(rdev->r600_blit.shader_obj); | ||
613 | radeon_bo_unreserve(rdev->r600_blit.shader_obj); | ||
614 | } | ||
615 | #endif | ||
616 | return 0; | ||
617 | } | ||
618 | |||
619 | static bool evergreen_card_posted(struct radeon_device *rdev) | ||
620 | { | ||
621 | u32 reg; | ||
622 | |||
623 | /* first check CRTCs */ | ||
624 | reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) | | ||
625 | RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) | | ||
626 | RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) | | ||
627 | RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) | | ||
628 | RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) | | ||
629 | RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET); | ||
630 | if (reg & EVERGREEN_CRTC_MASTER_EN) | ||
631 | return true; | ||
632 | |||
633 | /* then check MEM_SIZE, in case the crtcs are off */ | ||
634 | if (RREG32(CONFIG_MEMSIZE)) | ||
635 | return true; | ||
636 | |||
637 | return false; | ||
638 | } | ||
639 | |||
640 | /* Plan is to move initialization in that function and use | ||
641 | * helper function so that radeon_device_init pretty much | ||
642 | * do nothing more than calling asic specific function. This | ||
643 | * should also allow to remove a bunch of callback function | ||
644 | * like vram_info. | ||
645 | */ | ||
646 | int evergreen_init(struct radeon_device *rdev) | ||
647 | { | ||
648 | int r; | ||
649 | |||
650 | r = radeon_dummy_page_init(rdev); | ||
651 | if (r) | ||
652 | return r; | ||
653 | /* This don't do much */ | ||
654 | r = radeon_gem_init(rdev); | ||
655 | if (r) | ||
656 | return r; | ||
657 | /* Read BIOS */ | ||
658 | if (!radeon_get_bios(rdev)) { | ||
659 | if (ASIC_IS_AVIVO(rdev)) | ||
660 | return -EINVAL; | ||
661 | } | ||
662 | /* Must be an ATOMBIOS */ | ||
663 | if (!rdev->is_atom_bios) { | ||
664 | dev_err(rdev->dev, "Expecting atombios for R600 GPU\n"); | ||
665 | return -EINVAL; | ||
666 | } | ||
667 | r = radeon_atombios_init(rdev); | ||
668 | if (r) | ||
669 | return r; | ||
670 | /* Post card if necessary */ | ||
671 | if (!evergreen_card_posted(rdev)) { | ||
672 | if (!rdev->bios) { | ||
673 | dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n"); | ||
674 | return -EINVAL; | ||
675 | } | ||
676 | DRM_INFO("GPU not posted. posting now...\n"); | ||
677 | atom_asic_init(rdev->mode_info.atom_context); | ||
678 | } | ||
679 | /* Initialize scratch registers */ | ||
680 | r600_scratch_init(rdev); | ||
681 | /* Initialize surface registers */ | ||
682 | radeon_surface_init(rdev); | ||
683 | /* Initialize clocks */ | ||
684 | radeon_get_clock_info(rdev->ddev); | ||
685 | r = radeon_clocks_init(rdev); | ||
686 | if (r) | ||
687 | return r; | ||
688 | /* Initialize power management */ | ||
689 | radeon_pm_init(rdev); | ||
690 | /* Fence driver */ | ||
691 | r = radeon_fence_driver_init(rdev); | ||
692 | if (r) | ||
693 | return r; | ||
694 | /* initialize AGP */ | ||
695 | if (rdev->flags & RADEON_IS_AGP) { | ||
696 | r = radeon_agp_init(rdev); | ||
697 | if (r) | ||
698 | radeon_agp_disable(rdev); | ||
699 | } | ||
700 | /* initialize memory controller */ | ||
701 | r = evergreen_mc_init(rdev); | ||
702 | if (r) | ||
703 | return r; | ||
704 | /* Memory manager */ | ||
705 | r = radeon_bo_init(rdev); | ||
706 | if (r) | ||
707 | return r; | ||
708 | #if 0 | ||
709 | r = radeon_irq_kms_init(rdev); | ||
710 | if (r) | ||
711 | return r; | ||
712 | |||
713 | rdev->cp.ring_obj = NULL; | ||
714 | r600_ring_init(rdev, 1024 * 1024); | ||
715 | |||
716 | rdev->ih.ring_obj = NULL; | ||
717 | r600_ih_ring_init(rdev, 64 * 1024); | ||
718 | |||
719 | r = r600_pcie_gart_init(rdev); | ||
720 | if (r) | ||
721 | return r; | ||
722 | #endif | ||
723 | rdev->accel_working = false; | ||
724 | r = evergreen_startup(rdev); | ||
725 | if (r) { | ||
726 | evergreen_suspend(rdev); | ||
727 | /*r600_wb_fini(rdev);*/ | ||
728 | /*radeon_ring_fini(rdev);*/ | ||
729 | /*evergreen_pcie_gart_fini(rdev);*/ | ||
730 | rdev->accel_working = false; | ||
731 | } | ||
732 | if (rdev->accel_working) { | ||
733 | r = radeon_ib_pool_init(rdev); | ||
734 | if (r) { | ||
735 | DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r); | ||
736 | rdev->accel_working = false; | ||
737 | } | ||
738 | r = r600_ib_test(rdev); | ||
739 | if (r) { | ||
740 | DRM_ERROR("radeon: failed testing IB (%d).\n", r); | ||
741 | rdev->accel_working = false; | ||
742 | } | ||
743 | } | ||
744 | return 0; | ||
745 | } | ||
746 | |||
747 | void evergreen_fini(struct radeon_device *rdev) | ||
748 | { | ||
749 | evergreen_suspend(rdev); | ||
750 | #if 0 | ||
751 | r600_blit_fini(rdev); | ||
752 | r600_irq_fini(rdev); | ||
753 | radeon_irq_kms_fini(rdev); | ||
754 | radeon_ring_fini(rdev); | ||
755 | r600_wb_fini(rdev); | ||
756 | evergreen_pcie_gart_fini(rdev); | ||
757 | #endif | ||
758 | radeon_gem_fini(rdev); | ||
759 | radeon_fence_driver_fini(rdev); | ||
760 | radeon_clocks_fini(rdev); | ||
761 | radeon_agp_fini(rdev); | ||
762 | radeon_bo_fini(rdev); | ||
763 | radeon_atombios_fini(rdev); | ||
764 | kfree(rdev->bios); | ||
765 | rdev->bios = NULL; | ||
766 | radeon_dummy_page_fini(rdev); | ||
767 | } | ||