diff options
author | Paul Mackerras <paulus@samba.org> | 2006-03-27 03:15:26 -0500 |
---|---|---|
committer | Paul Mackerras <paulus@samba.org> | 2006-03-27 03:15:26 -0500 |
commit | 9b781727fd1062671afa144b93e8c69b14bcac4d (patch) | |
tree | 1cec35f0cedc664394b15165d96944019b8e1ff2 /arch/powerpc/kernel/cpu_setup_6xx.S | |
parent | 0eb4cb9b16aba6d610a0716503b96d299b308d44 (diff) |
powerpc: Move cpu_setup_6xx.S and temp.c over to arch/powerpc
Also renamed temp.c to tau_6xx.c (for thermal assist unit) and updated
the Kconfig option description and help text for CONFIG_TAU.
Signed-off-by: Paul Mackerras <paulus@samba.org>
Diffstat (limited to 'arch/powerpc/kernel/cpu_setup_6xx.S')
-rw-r--r-- | arch/powerpc/kernel/cpu_setup_6xx.S | 474 |
1 files changed, 474 insertions, 0 deletions
diff --git a/arch/powerpc/kernel/cpu_setup_6xx.S b/arch/powerpc/kernel/cpu_setup_6xx.S new file mode 100644 index 000000000000..55ed7716636f --- /dev/null +++ b/arch/powerpc/kernel/cpu_setup_6xx.S | |||
@@ -0,0 +1,474 @@ | |||
1 | /* | ||
2 | * This file contains low level CPU setup functions. | ||
3 | * Copyright (C) 2003 Benjamin Herrenschmidt (benh@kernel.crashing.org) | ||
4 | * | ||
5 | * This program is free software; you can redistribute it and/or | ||
6 | * modify it under the terms of the GNU General Public License | ||
7 | * as published by the Free Software Foundation; either version | ||
8 | * 2 of the License, or (at your option) any later version. | ||
9 | * | ||
10 | */ | ||
11 | |||
12 | #include <linux/config.h> | ||
13 | #include <asm/processor.h> | ||
14 | #include <asm/page.h> | ||
15 | #include <asm/cputable.h> | ||
16 | #include <asm/ppc_asm.h> | ||
17 | #include <asm/asm-offsets.h> | ||
18 | #include <asm/cache.h> | ||
19 | |||
20 | _GLOBAL(__setup_cpu_603) | ||
21 | b setup_common_caches | ||
22 | _GLOBAL(__setup_cpu_604) | ||
23 | mflr r4 | ||
24 | bl setup_common_caches | ||
25 | bl setup_604_hid0 | ||
26 | mtlr r4 | ||
27 | blr | ||
28 | _GLOBAL(__setup_cpu_750) | ||
29 | mflr r4 | ||
30 | bl __init_fpu_registers | ||
31 | bl setup_common_caches | ||
32 | bl setup_750_7400_hid0 | ||
33 | mtlr r4 | ||
34 | blr | ||
35 | _GLOBAL(__setup_cpu_750cx) | ||
36 | mflr r4 | ||
37 | bl __init_fpu_registers | ||
38 | bl setup_common_caches | ||
39 | bl setup_750_7400_hid0 | ||
40 | bl setup_750cx | ||
41 | mtlr r4 | ||
42 | blr | ||
43 | _GLOBAL(__setup_cpu_750fx) | ||
44 | mflr r4 | ||
45 | bl __init_fpu_registers | ||
46 | bl setup_common_caches | ||
47 | bl setup_750_7400_hid0 | ||
48 | bl setup_750fx | ||
49 | mtlr r4 | ||
50 | blr | ||
51 | _GLOBAL(__setup_cpu_7400) | ||
52 | mflr r4 | ||
53 | bl __init_fpu_registers | ||
54 | bl setup_7400_workarounds | ||
55 | bl setup_common_caches | ||
56 | bl setup_750_7400_hid0 | ||
57 | mtlr r4 | ||
58 | blr | ||
59 | _GLOBAL(__setup_cpu_7410) | ||
60 | mflr r4 | ||
61 | bl __init_fpu_registers | ||
62 | bl setup_7410_workarounds | ||
63 | bl setup_common_caches | ||
64 | bl setup_750_7400_hid0 | ||
65 | li r3,0 | ||
66 | mtspr SPRN_L2CR2,r3 | ||
67 | mtlr r4 | ||
68 | blr | ||
69 | _GLOBAL(__setup_cpu_745x) | ||
70 | mflr r4 | ||
71 | bl setup_common_caches | ||
72 | bl setup_745x_specifics | ||
73 | mtlr r4 | ||
74 | blr | ||
75 | |||
76 | /* Enable caches for 603's, 604, 750 & 7400 */ | ||
77 | setup_common_caches: | ||
78 | mfspr r11,SPRN_HID0 | ||
79 | andi. r0,r11,HID0_DCE | ||
80 | ori r11,r11,HID0_ICE|HID0_DCE | ||
81 | ori r8,r11,HID0_ICFI | ||
82 | bne 1f /* don't invalidate the D-cache */ | ||
83 | ori r8,r8,HID0_DCI /* unless it wasn't enabled */ | ||
84 | 1: sync | ||
85 | mtspr SPRN_HID0,r8 /* enable and invalidate caches */ | ||
86 | sync | ||
87 | mtspr SPRN_HID0,r11 /* enable caches */ | ||
88 | sync | ||
89 | isync | ||
90 | blr | ||
91 | |||
92 | /* 604, 604e, 604ev, ... | ||
93 | * Enable superscalar execution & branch history table | ||
94 | */ | ||
95 | setup_604_hid0: | ||
96 | mfspr r11,SPRN_HID0 | ||
97 | ori r11,r11,HID0_SIED|HID0_BHTE | ||
98 | ori r8,r11,HID0_BTCD | ||
99 | sync | ||
100 | mtspr SPRN_HID0,r8 /* flush branch target address cache */ | ||
101 | sync /* on 604e/604r */ | ||
102 | mtspr SPRN_HID0,r11 | ||
103 | sync | ||
104 | isync | ||
105 | blr | ||
106 | |||
107 | /* 7400 <= rev 2.7 and 7410 rev = 1.0 suffer from some | ||
108 | * erratas we work around here. | ||
109 | * Moto MPC710CE.pdf describes them, those are errata | ||
110 | * #3, #4 and #5 | ||
111 | * Note that we assume the firmware didn't choose to | ||
112 | * apply other workarounds (there are other ones documented | ||
113 | * in the .pdf). It appear that Apple firmware only works | ||
114 | * around #3 and with the same fix we use. We may want to | ||
115 | * check if the CPU is using 60x bus mode in which case | ||
116 | * the workaround for errata #4 is useless. Also, we may | ||
117 | * want to explicitely clear HID0_NOPDST as this is not | ||
118 | * needed once we have applied workaround #5 (though it's | ||
119 | * not set by Apple's firmware at least). | ||
120 | */ | ||
121 | setup_7400_workarounds: | ||
122 | mfpvr r3 | ||
123 | rlwinm r3,r3,0,20,31 | ||
124 | cmpwi 0,r3,0x0207 | ||
125 | ble 1f | ||
126 | blr | ||
127 | setup_7410_workarounds: | ||
128 | mfpvr r3 | ||
129 | rlwinm r3,r3,0,20,31 | ||
130 | cmpwi 0,r3,0x0100 | ||
131 | bnelr | ||
132 | 1: | ||
133 | mfspr r11,SPRN_MSSSR0 | ||
134 | /* Errata #3: Set L1OPQ_SIZE to 0x10 */ | ||
135 | rlwinm r11,r11,0,9,6 | ||
136 | oris r11,r11,0x0100 | ||
137 | /* Errata #4: Set L2MQ_SIZE to 1 (check for MPX mode first ?) */ | ||
138 | oris r11,r11,0x0002 | ||
139 | /* Errata #5: Set DRLT_SIZE to 0x01 */ | ||
140 | rlwinm r11,r11,0,5,2 | ||
141 | oris r11,r11,0x0800 | ||
142 | sync | ||
143 | mtspr SPRN_MSSSR0,r11 | ||
144 | sync | ||
145 | isync | ||
146 | blr | ||
147 | |||
148 | /* 740/750/7400/7410 | ||
149 | * Enable Store Gathering (SGE), Address Brodcast (ABE), | ||
150 | * Branch History Table (BHTE), Branch Target ICache (BTIC) | ||
151 | * Dynamic Power Management (DPM), Speculative (SPD) | ||
152 | * Clear Instruction cache throttling (ICTC) | ||
153 | */ | ||
154 | setup_750_7400_hid0: | ||
155 | mfspr r11,SPRN_HID0 | ||
156 | ori r11,r11,HID0_SGE | HID0_ABE | HID0_BHTE | HID0_BTIC | ||
157 | oris r11,r11,HID0_DPM@h | ||
158 | BEGIN_FTR_SECTION | ||
159 | xori r11,r11,HID0_BTIC | ||
160 | END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC) | ||
161 | BEGIN_FTR_SECTION | ||
162 | xoris r11,r11,HID0_DPM@h /* disable dynamic power mgmt */ | ||
163 | END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM) | ||
164 | li r3,HID0_SPD | ||
165 | andc r11,r11,r3 /* clear SPD: enable speculative */ | ||
166 | li r3,0 | ||
167 | mtspr SPRN_ICTC,r3 /* Instruction Cache Throttling off */ | ||
168 | isync | ||
169 | mtspr SPRN_HID0,r11 | ||
170 | sync | ||
171 | isync | ||
172 | blr | ||
173 | |||
174 | /* 750cx specific | ||
175 | * Looks like we have to disable NAP feature for some PLL settings... | ||
176 | * (waiting for confirmation) | ||
177 | */ | ||
178 | setup_750cx: | ||
179 | mfspr r10, SPRN_HID1 | ||
180 | rlwinm r10,r10,4,28,31 | ||
181 | cmpwi cr0,r10,7 | ||
182 | cmpwi cr1,r10,9 | ||
183 | cmpwi cr2,r10,11 | ||
184 | cror 4*cr0+eq,4*cr0+eq,4*cr1+eq | ||
185 | cror 4*cr0+eq,4*cr0+eq,4*cr2+eq | ||
186 | bnelr | ||
187 | lwz r6,CPU_SPEC_FEATURES(r5) | ||
188 | li r7,CPU_FTR_CAN_NAP | ||
189 | andc r6,r6,r7 | ||
190 | stw r6,CPU_SPEC_FEATURES(r5) | ||
191 | blr | ||
192 | |||
193 | /* 750fx specific | ||
194 | */ | ||
195 | setup_750fx: | ||
196 | blr | ||
197 | |||
198 | /* MPC 745x | ||
199 | * Enable Store Gathering (SGE), Branch Folding (FOLD) | ||
200 | * Branch History Table (BHTE), Branch Target ICache (BTIC) | ||
201 | * Dynamic Power Management (DPM), Speculative (SPD) | ||
202 | * Ensure our data cache instructions really operate. | ||
203 | * Timebase has to be running or we wouldn't have made it here, | ||
204 | * just ensure we don't disable it. | ||
205 | * Clear Instruction cache throttling (ICTC) | ||
206 | * Enable L2 HW prefetch | ||
207 | */ | ||
208 | setup_745x_specifics: | ||
209 | /* We check for the presence of an L3 cache setup by | ||
210 | * the firmware. If any, we disable NAP capability as | ||
211 | * it's known to be bogus on rev 2.1 and earlier | ||
212 | */ | ||
213 | mfspr r11,SPRN_L3CR | ||
214 | andis. r11,r11,L3CR_L3E@h | ||
215 | beq 1f | ||
216 | lwz r6,CPU_SPEC_FEATURES(r5) | ||
217 | andi. r0,r6,CPU_FTR_L3_DISABLE_NAP | ||
218 | beq 1f | ||
219 | li r7,CPU_FTR_CAN_NAP | ||
220 | andc r6,r6,r7 | ||
221 | stw r6,CPU_SPEC_FEATURES(r5) | ||
222 | 1: | ||
223 | mfspr r11,SPRN_HID0 | ||
224 | |||
225 | /* All of the bits we have to set..... | ||
226 | */ | ||
227 | ori r11,r11,HID0_SGE | HID0_FOLD | HID0_BHTE | ||
228 | ori r11,r11,HID0_LRSTK | HID0_BTIC | ||
229 | oris r11,r11,HID0_DPM@h | ||
230 | BEGIN_FTR_SECTION | ||
231 | xori r11,r11,HID0_BTIC | ||
232 | END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC) | ||
233 | BEGIN_FTR_SECTION | ||
234 | xoris r11,r11,HID0_DPM@h /* disable dynamic power mgmt */ | ||
235 | END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM) | ||
236 | |||
237 | /* All of the bits we have to clear.... | ||
238 | */ | ||
239 | li r3,HID0_SPD | HID0_NOPDST | HID0_NOPTI | ||
240 | andc r11,r11,r3 /* clear SPD: enable speculative */ | ||
241 | li r3,0 | ||
242 | |||
243 | mtspr SPRN_ICTC,r3 /* Instruction Cache Throttling off */ | ||
244 | isync | ||
245 | mtspr SPRN_HID0,r11 | ||
246 | sync | ||
247 | isync | ||
248 | |||
249 | /* Enable L2 HW prefetch, if L2 is enabled | ||
250 | */ | ||
251 | mfspr r3,SPRN_L2CR | ||
252 | andis. r3,r3,L2CR_L2E@h | ||
253 | beqlr | ||
254 | mfspr r3,SPRN_MSSCR0 | ||
255 | ori r3,r3,3 | ||
256 | sync | ||
257 | mtspr SPRN_MSSCR0,r3 | ||
258 | sync | ||
259 | isync | ||
260 | blr | ||
261 | |||
262 | /* | ||
263 | * Initialize the FPU registers. This is needed to work around an errata | ||
264 | * in some 750 cpus where using a not yet initialized FPU register after | ||
265 | * power on reset may hang the CPU | ||
266 | */ | ||
267 | _GLOBAL(__init_fpu_registers) | ||
268 | mfmsr r10 | ||
269 | ori r11,r10,MSR_FP | ||
270 | mtmsr r11 | ||
271 | isync | ||
272 | addis r9,r3,empty_zero_page@ha | ||
273 | addi r9,r9,empty_zero_page@l | ||
274 | REST_32FPRS(0,r9) | ||
275 | sync | ||
276 | mtmsr r10 | ||
277 | isync | ||
278 | blr | ||
279 | |||
280 | |||
281 | /* Definitions for the table use to save CPU states */ | ||
282 | #define CS_HID0 0 | ||
283 | #define CS_HID1 4 | ||
284 | #define CS_HID2 8 | ||
285 | #define CS_MSSCR0 12 | ||
286 | #define CS_MSSSR0 16 | ||
287 | #define CS_ICTRL 20 | ||
288 | #define CS_LDSTCR 24 | ||
289 | #define CS_LDSTDB 28 | ||
290 | #define CS_SIZE 32 | ||
291 | |||
292 | .data | ||
293 | .balign L1_CACHE_BYTES | ||
294 | cpu_state_storage: | ||
295 | .space CS_SIZE | ||
296 | .balign L1_CACHE_BYTES,0 | ||
297 | .text | ||
298 | |||
299 | /* Called in normal context to backup CPU 0 state. This | ||
300 | * does not include cache settings. This function is also | ||
301 | * called for machine sleep. This does not include the MMU | ||
302 | * setup, BATs, etc... but rather the "special" registers | ||
303 | * like HID0, HID1, MSSCR0, etc... | ||
304 | */ | ||
305 | _GLOBAL(__save_cpu_setup) | ||
306 | /* Some CR fields are volatile, we back it up all */ | ||
307 | mfcr r7 | ||
308 | |||
309 | /* Get storage ptr */ | ||
310 | lis r5,cpu_state_storage@h | ||
311 | ori r5,r5,cpu_state_storage@l | ||
312 | |||
313 | /* Save HID0 (common to all CONFIG_6xx cpus) */ | ||
314 | mfspr r3,SPRN_HID0 | ||
315 | stw r3,CS_HID0(r5) | ||
316 | |||
317 | /* Now deal with CPU type dependent registers */ | ||
318 | mfspr r3,SPRN_PVR | ||
319 | srwi r3,r3,16 | ||
320 | cmplwi cr0,r3,0x8000 /* 7450 */ | ||
321 | cmplwi cr1,r3,0x000c /* 7400 */ | ||
322 | cmplwi cr2,r3,0x800c /* 7410 */ | ||
323 | cmplwi cr3,r3,0x8001 /* 7455 */ | ||
324 | cmplwi cr4,r3,0x8002 /* 7457 */ | ||
325 | cmplwi cr5,r3,0x8003 /* 7447A */ | ||
326 | cmplwi cr6,r3,0x7000 /* 750FX */ | ||
327 | cmplwi cr7,r3,0x8004 /* 7448 */ | ||
328 | /* cr1 is 7400 || 7410 */ | ||
329 | cror 4*cr1+eq,4*cr1+eq,4*cr2+eq | ||
330 | /* cr0 is 74xx */ | ||
331 | cror 4*cr0+eq,4*cr0+eq,4*cr3+eq | ||
332 | cror 4*cr0+eq,4*cr0+eq,4*cr4+eq | ||
333 | cror 4*cr0+eq,4*cr0+eq,4*cr1+eq | ||
334 | cror 4*cr0+eq,4*cr0+eq,4*cr5+eq | ||
335 | cror 4*cr0+eq,4*cr0+eq,4*cr7+eq | ||
336 | bne 1f | ||
337 | /* Backup 74xx specific regs */ | ||
338 | mfspr r4,SPRN_MSSCR0 | ||
339 | stw r4,CS_MSSCR0(r5) | ||
340 | mfspr r4,SPRN_MSSSR0 | ||
341 | stw r4,CS_MSSSR0(r5) | ||
342 | beq cr1,1f | ||
343 | /* Backup 745x specific registers */ | ||
344 | mfspr r4,SPRN_HID1 | ||
345 | stw r4,CS_HID1(r5) | ||
346 | mfspr r4,SPRN_ICTRL | ||
347 | stw r4,CS_ICTRL(r5) | ||
348 | mfspr r4,SPRN_LDSTCR | ||
349 | stw r4,CS_LDSTCR(r5) | ||
350 | mfspr r4,SPRN_LDSTDB | ||
351 | stw r4,CS_LDSTDB(r5) | ||
352 | 1: | ||
353 | bne cr6,1f | ||
354 | /* Backup 750FX specific registers */ | ||
355 | mfspr r4,SPRN_HID1 | ||
356 | stw r4,CS_HID1(r5) | ||
357 | /* If rev 2.x, backup HID2 */ | ||
358 | mfspr r3,SPRN_PVR | ||
359 | andi. r3,r3,0xff00 | ||
360 | cmpwi cr0,r3,0x0200 | ||
361 | bne 1f | ||
362 | mfspr r4,SPRN_HID2 | ||
363 | stw r4,CS_HID2(r5) | ||
364 | 1: | ||
365 | mtcr r7 | ||
366 | blr | ||
367 | |||
368 | /* Called with no MMU context (typically MSR:IR/DR off) to | ||
369 | * restore CPU state as backed up by the previous | ||
370 | * function. This does not include cache setting | ||
371 | */ | ||
372 | _GLOBAL(__restore_cpu_setup) | ||
373 | /* Some CR fields are volatile, we back it up all */ | ||
374 | mfcr r7 | ||
375 | |||
376 | /* Get storage ptr */ | ||
377 | lis r5,(cpu_state_storage-KERNELBASE)@h | ||
378 | ori r5,r5,cpu_state_storage@l | ||
379 | |||
380 | /* Restore HID0 */ | ||
381 | lwz r3,CS_HID0(r5) | ||
382 | sync | ||
383 | isync | ||
384 | mtspr SPRN_HID0,r3 | ||
385 | sync | ||
386 | isync | ||
387 | |||
388 | /* Now deal with CPU type dependent registers */ | ||
389 | mfspr r3,SPRN_PVR | ||
390 | srwi r3,r3,16 | ||
391 | cmplwi cr0,r3,0x8000 /* 7450 */ | ||
392 | cmplwi cr1,r3,0x000c /* 7400 */ | ||
393 | cmplwi cr2,r3,0x800c /* 7410 */ | ||
394 | cmplwi cr3,r3,0x8001 /* 7455 */ | ||
395 | cmplwi cr4,r3,0x8002 /* 7457 */ | ||
396 | cmplwi cr5,r3,0x8003 /* 7447A */ | ||
397 | cmplwi cr6,r3,0x7000 /* 750FX */ | ||
398 | cmplwi cr7,r3,0x8004 /* 7448 */ | ||
399 | /* cr1 is 7400 || 7410 */ | ||
400 | cror 4*cr1+eq,4*cr1+eq,4*cr2+eq | ||
401 | /* cr0 is 74xx */ | ||
402 | cror 4*cr0+eq,4*cr0+eq,4*cr3+eq | ||
403 | cror 4*cr0+eq,4*cr0+eq,4*cr4+eq | ||
404 | cror 4*cr0+eq,4*cr0+eq,4*cr1+eq | ||
405 | cror 4*cr0+eq,4*cr0+eq,4*cr5+eq | ||
406 | cror 4*cr0+eq,4*cr0+eq,4*cr7+eq | ||
407 | bne 2f | ||
408 | /* Restore 74xx specific regs */ | ||
409 | lwz r4,CS_MSSCR0(r5) | ||
410 | sync | ||
411 | mtspr SPRN_MSSCR0,r4 | ||
412 | sync | ||
413 | isync | ||
414 | lwz r4,CS_MSSSR0(r5) | ||
415 | sync | ||
416 | mtspr SPRN_MSSSR0,r4 | ||
417 | sync | ||
418 | isync | ||
419 | bne cr2,1f | ||
420 | /* Clear 7410 L2CR2 */ | ||
421 | li r4,0 | ||
422 | mtspr SPRN_L2CR2,r4 | ||
423 | 1: beq cr1,2f | ||
424 | /* Restore 745x specific registers */ | ||
425 | lwz r4,CS_HID1(r5) | ||
426 | sync | ||
427 | mtspr SPRN_HID1,r4 | ||
428 | isync | ||
429 | sync | ||
430 | lwz r4,CS_ICTRL(r5) | ||
431 | sync | ||
432 | mtspr SPRN_ICTRL,r4 | ||
433 | isync | ||
434 | sync | ||
435 | lwz r4,CS_LDSTCR(r5) | ||
436 | sync | ||
437 | mtspr SPRN_LDSTCR,r4 | ||
438 | isync | ||
439 | sync | ||
440 | lwz r4,CS_LDSTDB(r5) | ||
441 | sync | ||
442 | mtspr SPRN_LDSTDB,r4 | ||
443 | isync | ||
444 | sync | ||
445 | 2: bne cr6,1f | ||
446 | /* Restore 750FX specific registers | ||
447 | * that is restore HID2 on rev 2.x and PLL config & switch | ||
448 | * to PLL 0 on all | ||
449 | */ | ||
450 | /* If rev 2.x, restore HID2 with low voltage bit cleared */ | ||
451 | mfspr r3,SPRN_PVR | ||
452 | andi. r3,r3,0xff00 | ||
453 | cmpwi cr0,r3,0x0200 | ||
454 | bne 4f | ||
455 | lwz r4,CS_HID2(r5) | ||
456 | rlwinm r4,r4,0,19,17 | ||
457 | mtspr SPRN_HID2,r4 | ||
458 | sync | ||
459 | 4: | ||
460 | lwz r4,CS_HID1(r5) | ||
461 | rlwinm r5,r4,0,16,14 | ||
462 | mtspr SPRN_HID1,r5 | ||
463 | /* Wait for PLL to stabilize */ | ||
464 | mftbl r5 | ||
465 | 3: mftbl r6 | ||
466 | sub r6,r6,r5 | ||
467 | cmplwi cr0,r6,10000 | ||
468 | ble 3b | ||
469 | /* Setup final PLL */ | ||
470 | mtspr SPRN_HID1,r4 | ||
471 | 1: | ||
472 | mtcr r7 | ||
473 | blr | ||
474 | |||