diff options
Diffstat (limited to 'arch/powerpc/kernel/head_44x.S')
-rw-r--r-- | arch/powerpc/kernel/head_44x.S | 828 |
1 files changed, 656 insertions, 172 deletions
diff --git a/arch/powerpc/kernel/head_44x.S b/arch/powerpc/kernel/head_44x.S index 711368b993f2..5ab484ef06a7 100644 --- a/arch/powerpc/kernel/head_44x.S +++ b/arch/powerpc/kernel/head_44x.S | |||
@@ -37,6 +37,7 @@ | |||
37 | #include <asm/thread_info.h> | 37 | #include <asm/thread_info.h> |
38 | #include <asm/ppc_asm.h> | 38 | #include <asm/ppc_asm.h> |
39 | #include <asm/asm-offsets.h> | 39 | #include <asm/asm-offsets.h> |
40 | #include <asm/synch.h> | ||
40 | #include "head_booke.h" | 41 | #include "head_booke.h" |
41 | 42 | ||
42 | 43 | ||
@@ -69,165 +70,7 @@ _ENTRY(_start); | |||
69 | mr r27,r7 | 70 | mr r27,r7 |
70 | li r24,0 /* CPU number */ | 71 | li r24,0 /* CPU number */ |
71 | 72 | ||
72 | /* | 73 | bl init_cpu_state |
73 | * In case the firmware didn't do it, we apply some workarounds | ||
74 | * that are good for all 440 core variants here | ||
75 | */ | ||
76 | mfspr r3,SPRN_CCR0 | ||
77 | rlwinm r3,r3,0,0,27 /* disable icache prefetch */ | ||
78 | isync | ||
79 | mtspr SPRN_CCR0,r3 | ||
80 | isync | ||
81 | sync | ||
82 | |||
83 | /* | ||
84 | * Set up the initial MMU state | ||
85 | * | ||
86 | * We are still executing code at the virtual address | ||
87 | * mappings set by the firmware for the base of RAM. | ||
88 | * | ||
89 | * We first invalidate all TLB entries but the one | ||
90 | * we are running from. We then load the KERNELBASE | ||
91 | * mappings so we can begin to use kernel addresses | ||
92 | * natively and so the interrupt vector locations are | ||
93 | * permanently pinned (necessary since Book E | ||
94 | * implementations always have translation enabled). | ||
95 | * | ||
96 | * TODO: Use the known TLB entry we are running from to | ||
97 | * determine which physical region we are located | ||
98 | * in. This can be used to determine where in RAM | ||
99 | * (on a shared CPU system) or PCI memory space | ||
100 | * (on a DRAMless system) we are located. | ||
101 | * For now, we assume a perfect world which means | ||
102 | * we are located at the base of DRAM (physical 0). | ||
103 | */ | ||
104 | |||
105 | /* | ||
106 | * Search TLB for entry that we are currently using. | ||
107 | * Invalidate all entries but the one we are using. | ||
108 | */ | ||
109 | /* Load our current PID->MMUCR TID and MSR IS->MMUCR STS */ | ||
110 | mfspr r3,SPRN_PID /* Get PID */ | ||
111 | mfmsr r4 /* Get MSR */ | ||
112 | andi. r4,r4,MSR_IS@l /* TS=1? */ | ||
113 | beq wmmucr /* If not, leave STS=0 */ | ||
114 | oris r3,r3,PPC44x_MMUCR_STS@h /* Set STS=1 */ | ||
115 | wmmucr: mtspr SPRN_MMUCR,r3 /* Put MMUCR */ | ||
116 | sync | ||
117 | |||
118 | bl invstr /* Find our address */ | ||
119 | invstr: mflr r5 /* Make it accessible */ | ||
120 | tlbsx r23,0,r5 /* Find entry we are in */ | ||
121 | li r4,0 /* Start at TLB entry 0 */ | ||
122 | li r3,0 /* Set PAGEID inval value */ | ||
123 | 1: cmpw r23,r4 /* Is this our entry? */ | ||
124 | beq skpinv /* If so, skip the inval */ | ||
125 | tlbwe r3,r4,PPC44x_TLB_PAGEID /* If not, inval the entry */ | ||
126 | skpinv: addi r4,r4,1 /* Increment */ | ||
127 | cmpwi r4,64 /* Are we done? */ | ||
128 | bne 1b /* If not, repeat */ | ||
129 | isync /* If so, context change */ | ||
130 | |||
131 | /* | ||
132 | * Configure and load pinned entry into TLB slot 63. | ||
133 | */ | ||
134 | |||
135 | lis r3,PAGE_OFFSET@h | ||
136 | ori r3,r3,PAGE_OFFSET@l | ||
137 | |||
138 | /* Kernel is at the base of RAM */ | ||
139 | li r4, 0 /* Load the kernel physical address */ | ||
140 | |||
141 | /* Load the kernel PID = 0 */ | ||
142 | li r0,0 | ||
143 | mtspr SPRN_PID,r0 | ||
144 | sync | ||
145 | |||
146 | /* Initialize MMUCR */ | ||
147 | li r5,0 | ||
148 | mtspr SPRN_MMUCR,r5 | ||
149 | sync | ||
150 | |||
151 | /* pageid fields */ | ||
152 | clrrwi r3,r3,10 /* Mask off the effective page number */ | ||
153 | ori r3,r3,PPC44x_TLB_VALID | PPC44x_TLB_256M | ||
154 | |||
155 | /* xlat fields */ | ||
156 | clrrwi r4,r4,10 /* Mask off the real page number */ | ||
157 | /* ERPN is 0 for first 4GB page */ | ||
158 | |||
159 | /* attrib fields */ | ||
160 | /* Added guarded bit to protect against speculative loads/stores */ | ||
161 | li r5,0 | ||
162 | ori r5,r5,(PPC44x_TLB_SW | PPC44x_TLB_SR | PPC44x_TLB_SX | PPC44x_TLB_G) | ||
163 | |||
164 | li r0,63 /* TLB slot 63 */ | ||
165 | |||
166 | tlbwe r3,r0,PPC44x_TLB_PAGEID /* Load the pageid fields */ | ||
167 | tlbwe r4,r0,PPC44x_TLB_XLAT /* Load the translation fields */ | ||
168 | tlbwe r5,r0,PPC44x_TLB_ATTRIB /* Load the attrib/access fields */ | ||
169 | |||
170 | /* Force context change */ | ||
171 | mfmsr r0 | ||
172 | mtspr SPRN_SRR1, r0 | ||
173 | lis r0,3f@h | ||
174 | ori r0,r0,3f@l | ||
175 | mtspr SPRN_SRR0,r0 | ||
176 | sync | ||
177 | rfi | ||
178 | |||
179 | /* If necessary, invalidate original entry we used */ | ||
180 | 3: cmpwi r23,63 | ||
181 | beq 4f | ||
182 | li r6,0 | ||
183 | tlbwe r6,r23,PPC44x_TLB_PAGEID | ||
184 | isync | ||
185 | |||
186 | 4: | ||
187 | #ifdef CONFIG_PPC_EARLY_DEBUG_44x | ||
188 | /* Add UART mapping for early debug. */ | ||
189 | |||
190 | /* pageid fields */ | ||
191 | lis r3,PPC44x_EARLY_DEBUG_VIRTADDR@h | ||
192 | ori r3,r3,PPC44x_TLB_VALID|PPC44x_TLB_TS|PPC44x_TLB_64K | ||
193 | |||
194 | /* xlat fields */ | ||
195 | lis r4,CONFIG_PPC_EARLY_DEBUG_44x_PHYSLOW@h | ||
196 | ori r4,r4,CONFIG_PPC_EARLY_DEBUG_44x_PHYSHIGH | ||
197 | |||
198 | /* attrib fields */ | ||
199 | li r5,(PPC44x_TLB_SW|PPC44x_TLB_SR|PPC44x_TLB_I|PPC44x_TLB_G) | ||
200 | li r0,62 /* TLB slot 0 */ | ||
201 | |||
202 | tlbwe r3,r0,PPC44x_TLB_PAGEID | ||
203 | tlbwe r4,r0,PPC44x_TLB_XLAT | ||
204 | tlbwe r5,r0,PPC44x_TLB_ATTRIB | ||
205 | |||
206 | /* Force context change */ | ||
207 | isync | ||
208 | #endif /* CONFIG_PPC_EARLY_DEBUG_44x */ | ||
209 | |||
210 | /* Establish the interrupt vector offsets */ | ||
211 | SET_IVOR(0, CriticalInput); | ||
212 | SET_IVOR(1, MachineCheck); | ||
213 | SET_IVOR(2, DataStorage); | ||
214 | SET_IVOR(3, InstructionStorage); | ||
215 | SET_IVOR(4, ExternalInput); | ||
216 | SET_IVOR(5, Alignment); | ||
217 | SET_IVOR(6, Program); | ||
218 | SET_IVOR(7, FloatingPointUnavailable); | ||
219 | SET_IVOR(8, SystemCall); | ||
220 | SET_IVOR(9, AuxillaryProcessorUnavailable); | ||
221 | SET_IVOR(10, Decrementer); | ||
222 | SET_IVOR(11, FixedIntervalTimer); | ||
223 | SET_IVOR(12, WatchdogTimer); | ||
224 | SET_IVOR(13, DataTLBError); | ||
225 | SET_IVOR(14, InstructionTLBError); | ||
226 | SET_IVOR(15, DebugCrit); | ||
227 | |||
228 | /* Establish the interrupt vector base */ | ||
229 | lis r4,interrupt_base@h /* IVPR only uses the high 16-bits */ | ||
230 | mtspr SPRN_IVPR,r4 | ||
231 | 74 | ||
232 | /* | 75 | /* |
233 | * This is where the main kernel code starts. | 76 | * This is where the main kernel code starts. |
@@ -349,7 +192,7 @@ interrupt_base: | |||
349 | #endif | 192 | #endif |
350 | 193 | ||
351 | /* Data TLB Error Interrupt */ | 194 | /* Data TLB Error Interrupt */ |
352 | START_EXCEPTION(DataTLBError) | 195 | START_EXCEPTION(DataTLBError44x) |
353 | mtspr SPRN_SPRG_WSCRATCH0, r10 /* Save some working registers */ | 196 | mtspr SPRN_SPRG_WSCRATCH0, r10 /* Save some working registers */ |
354 | mtspr SPRN_SPRG_WSCRATCH1, r11 | 197 | mtspr SPRN_SPRG_WSCRATCH1, r11 |
355 | mtspr SPRN_SPRG_WSCRATCH2, r12 | 198 | mtspr SPRN_SPRG_WSCRATCH2, r12 |
@@ -440,7 +283,7 @@ tlb_44x_patch_hwater_D: | |||
440 | mfspr r10,SPRN_DEAR | 283 | mfspr r10,SPRN_DEAR |
441 | 284 | ||
442 | /* Jump to common tlb load */ | 285 | /* Jump to common tlb load */ |
443 | b finish_tlb_load | 286 | b finish_tlb_load_44x |
444 | 287 | ||
445 | 2: | 288 | 2: |
446 | /* The bailout. Restore registers to pre-exception conditions | 289 | /* The bailout. Restore registers to pre-exception conditions |
@@ -460,7 +303,7 @@ tlb_44x_patch_hwater_D: | |||
460 | * information from different registers and bailout | 303 | * information from different registers and bailout |
461 | * to a different point. | 304 | * to a different point. |
462 | */ | 305 | */ |
463 | START_EXCEPTION(InstructionTLBError) | 306 | START_EXCEPTION(InstructionTLBError44x) |
464 | mtspr SPRN_SPRG_WSCRATCH0, r10 /* Save some working registers */ | 307 | mtspr SPRN_SPRG_WSCRATCH0, r10 /* Save some working registers */ |
465 | mtspr SPRN_SPRG_WSCRATCH1, r11 | 308 | mtspr SPRN_SPRG_WSCRATCH1, r11 |
466 | mtspr SPRN_SPRG_WSCRATCH2, r12 | 309 | mtspr SPRN_SPRG_WSCRATCH2, r12 |
@@ -536,7 +379,7 @@ tlb_44x_patch_hwater_I: | |||
536 | mfspr r10,SPRN_SRR0 | 379 | mfspr r10,SPRN_SRR0 |
537 | 380 | ||
538 | /* Jump to common TLB load point */ | 381 | /* Jump to common TLB load point */ |
539 | b finish_tlb_load | 382 | b finish_tlb_load_44x |
540 | 383 | ||
541 | 2: | 384 | 2: |
542 | /* The bailout. Restore registers to pre-exception conditions | 385 | /* The bailout. Restore registers to pre-exception conditions |
@@ -550,15 +393,7 @@ tlb_44x_patch_hwater_I: | |||
550 | mfspr r10, SPRN_SPRG_RSCRATCH0 | 393 | mfspr r10, SPRN_SPRG_RSCRATCH0 |
551 | b InstructionStorage | 394 | b InstructionStorage |
552 | 395 | ||
553 | /* Debug Interrupt */ | ||
554 | DEBUG_CRIT_EXCEPTION | ||
555 | |||
556 | /* | ||
557 | * Local functions | ||
558 | */ | ||
559 | |||
560 | /* | 396 | /* |
561 | |||
562 | * Both the instruction and data TLB miss get to this | 397 | * Both the instruction and data TLB miss get to this |
563 | * point to load the TLB. | 398 | * point to load the TLB. |
564 | * r10 - EA of fault | 399 | * r10 - EA of fault |
@@ -568,7 +403,7 @@ tlb_44x_patch_hwater_I: | |||
568 | * MMUCR - loaded with proper value when we get here | 403 | * MMUCR - loaded with proper value when we get here |
569 | * Upon exit, we reload everything and RFI. | 404 | * Upon exit, we reload everything and RFI. |
570 | */ | 405 | */ |
571 | finish_tlb_load: | 406 | finish_tlb_load_44x: |
572 | /* Combine RPN & ERPN an write WS 0 */ | 407 | /* Combine RPN & ERPN an write WS 0 */ |
573 | rlwimi r11,r12,0,0,31-PAGE_SHIFT | 408 | rlwimi r11,r12,0,0,31-PAGE_SHIFT |
574 | tlbwe r11,r13,PPC44x_TLB_XLAT | 409 | tlbwe r11,r13,PPC44x_TLB_XLAT |
@@ -601,6 +436,227 @@ finish_tlb_load: | |||
601 | mfspr r10, SPRN_SPRG_RSCRATCH0 | 436 | mfspr r10, SPRN_SPRG_RSCRATCH0 |
602 | rfi /* Force context change */ | 437 | rfi /* Force context change */ |
603 | 438 | ||
439 | /* TLB error interrupts for 476 | ||
440 | */ | ||
441 | #ifdef CONFIG_PPC_47x | ||
442 | START_EXCEPTION(DataTLBError47x) | ||
443 | mtspr SPRN_SPRG_WSCRATCH0,r10 /* Save some working registers */ | ||
444 | mtspr SPRN_SPRG_WSCRATCH1,r11 | ||
445 | mtspr SPRN_SPRG_WSCRATCH2,r12 | ||
446 | mtspr SPRN_SPRG_WSCRATCH3,r13 | ||
447 | mfcr r11 | ||
448 | mtspr SPRN_SPRG_WSCRATCH4,r11 | ||
449 | mfspr r10,SPRN_DEAR /* Get faulting address */ | ||
450 | |||
451 | /* If we are faulting a kernel address, we have to use the | ||
452 | * kernel page tables. | ||
453 | */ | ||
454 | lis r11,PAGE_OFFSET@h | ||
455 | cmplw cr0,r10,r11 | ||
456 | blt+ 3f | ||
457 | lis r11,swapper_pg_dir@h | ||
458 | ori r11,r11, swapper_pg_dir@l | ||
459 | li r12,0 /* MMUCR = 0 */ | ||
460 | b 4f | ||
461 | |||
462 | /* Get the PGD for the current thread and setup MMUCR */ | ||
463 | 3: mfspr r11,SPRN_SPRG3 | ||
464 | lwz r11,PGDIR(r11) | ||
465 | mfspr r12,SPRN_PID /* Get PID */ | ||
466 | 4: mtspr SPRN_MMUCR,r12 /* Set MMUCR */ | ||
467 | |||
468 | /* Mask of required permission bits. Note that while we | ||
469 | * do copy ESR:ST to _PAGE_RW position as trying to write | ||
470 | * to an RO page is pretty common, we don't do it with | ||
471 | * _PAGE_DIRTY. We could do it, but it's a fairly rare | ||
472 | * event so I'd rather take the overhead when it happens | ||
473 | * rather than adding an instruction here. We should measure | ||
474 | * whether the whole thing is worth it in the first place | ||
475 | * as we could avoid loading SPRN_ESR completely in the first | ||
476 | * place... | ||
477 | * | ||
478 | * TODO: Is it worth doing that mfspr & rlwimi in the first | ||
479 | * place or can we save a couple of instructions here ? | ||
480 | */ | ||
481 | mfspr r12,SPRN_ESR | ||
482 | li r13,_PAGE_PRESENT|_PAGE_ACCESSED | ||
483 | rlwimi r13,r12,10,30,30 | ||
484 | |||
485 | /* Load the PTE */ | ||
486 | /* Compute pgdir/pmd offset */ | ||
487 | rlwinm r12,r10,PPC44x_PGD_OFF_SHIFT,PPC44x_PGD_OFF_MASK_BIT,29 | ||
488 | lwzx r11,r12,r11 /* Get pgd/pmd entry */ | ||
489 | |||
490 | /* Word 0 is EPN,V,TS,DSIZ */ | ||
491 | li r12,PPC47x_TLB0_VALID | PPC47x_TLBE_SIZE | ||
492 | rlwimi r10,r12,0,32-PAGE_SHIFT,31 /* Insert valid and page size*/ | ||
493 | li r12,0 | ||
494 | tlbwe r10,r12,0 | ||
495 | |||
496 | /* XXX can we do better ? Need to make sure tlbwe has established | ||
497 | * latch V bit in MMUCR0 before the PTE is loaded further down */ | ||
498 | #ifdef CONFIG_SMP | ||
499 | isync | ||
500 | #endif | ||
501 | |||
502 | rlwinm. r12,r11,0,0,20 /* Extract pt base address */ | ||
503 | /* Compute pte address */ | ||
504 | rlwimi r12,r10,PPC44x_PTE_ADD_SHIFT,PPC44x_PTE_ADD_MASK_BIT,28 | ||
505 | beq 2f /* Bail if no table */ | ||
506 | lwz r11,0(r12) /* Get high word of pte entry */ | ||
507 | |||
508 | /* XXX can we do better ? maybe insert a known 0 bit from r11 into the | ||
509 | * bottom of r12 to create a data dependency... We can also use r10 | ||
510 | * as destination nowadays | ||
511 | */ | ||
512 | #ifdef CONFIG_SMP | ||
513 | lwsync | ||
514 | #endif | ||
515 | lwz r12,4(r12) /* Get low word of pte entry */ | ||
516 | |||
517 | andc. r13,r13,r12 /* Check permission */ | ||
518 | |||
519 | /* Jump to common tlb load */ | ||
520 | beq finish_tlb_load_47x | ||
521 | |||
522 | 2: /* The bailout. Restore registers to pre-exception conditions | ||
523 | * and call the heavyweights to help us out. | ||
524 | */ | ||
525 | mfspr r11,SPRN_SPRG_RSCRATCH4 | ||
526 | mtcr r11 | ||
527 | mfspr r13,SPRN_SPRG_RSCRATCH3 | ||
528 | mfspr r12,SPRN_SPRG_RSCRATCH2 | ||
529 | mfspr r11,SPRN_SPRG_RSCRATCH1 | ||
530 | mfspr r10,SPRN_SPRG_RSCRATCH0 | ||
531 | b DataStorage | ||
532 | |||
533 | /* Instruction TLB Error Interrupt */ | ||
534 | /* | ||
535 | * Nearly the same as above, except we get our | ||
536 | * information from different registers and bailout | ||
537 | * to a different point. | ||
538 | */ | ||
539 | START_EXCEPTION(InstructionTLBError47x) | ||
540 | mtspr SPRN_SPRG_WSCRATCH0,r10 /* Save some working registers */ | ||
541 | mtspr SPRN_SPRG_WSCRATCH1,r11 | ||
542 | mtspr SPRN_SPRG_WSCRATCH2,r12 | ||
543 | mtspr SPRN_SPRG_WSCRATCH3,r13 | ||
544 | mfcr r11 | ||
545 | mtspr SPRN_SPRG_WSCRATCH4,r11 | ||
546 | mfspr r10,SPRN_SRR0 /* Get faulting address */ | ||
547 | |||
548 | /* If we are faulting a kernel address, we have to use the | ||
549 | * kernel page tables. | ||
550 | */ | ||
551 | lis r11,PAGE_OFFSET@h | ||
552 | cmplw cr0,r10,r11 | ||
553 | blt+ 3f | ||
554 | lis r11,swapper_pg_dir@h | ||
555 | ori r11,r11, swapper_pg_dir@l | ||
556 | li r12,0 /* MMUCR = 0 */ | ||
557 | b 4f | ||
558 | |||
559 | /* Get the PGD for the current thread and setup MMUCR */ | ||
560 | 3: mfspr r11,SPRN_SPRG_THREAD | ||
561 | lwz r11,PGDIR(r11) | ||
562 | mfspr r12,SPRN_PID /* Get PID */ | ||
563 | 4: mtspr SPRN_MMUCR,r12 /* Set MMUCR */ | ||
564 | |||
565 | /* Make up the required permissions */ | ||
566 | li r13,_PAGE_PRESENT | _PAGE_ACCESSED | _PAGE_EXEC | ||
567 | |||
568 | /* Load PTE */ | ||
569 | /* Compute pgdir/pmd offset */ | ||
570 | rlwinm r12,r10,PPC44x_PGD_OFF_SHIFT,PPC44x_PGD_OFF_MASK_BIT,29 | ||
571 | lwzx r11,r12,r11 /* Get pgd/pmd entry */ | ||
572 | |||
573 | /* Word 0 is EPN,V,TS,DSIZ */ | ||
574 | li r12,PPC47x_TLB0_VALID | PPC47x_TLBE_SIZE | ||
575 | rlwimi r10,r12,0,32-PAGE_SHIFT,31 /* Insert valid and page size*/ | ||
576 | li r12,0 | ||
577 | tlbwe r10,r12,0 | ||
578 | |||
579 | /* XXX can we do better ? Need to make sure tlbwe has established | ||
580 | * latch V bit in MMUCR0 before the PTE is loaded further down */ | ||
581 | #ifdef CONFIG_SMP | ||
582 | isync | ||
583 | #endif | ||
584 | |||
585 | rlwinm. r12,r11,0,0,20 /* Extract pt base address */ | ||
586 | /* Compute pte address */ | ||
587 | rlwimi r12,r10,PPC44x_PTE_ADD_SHIFT,PPC44x_PTE_ADD_MASK_BIT,28 | ||
588 | beq 2f /* Bail if no table */ | ||
589 | |||
590 | lwz r11,0(r12) /* Get high word of pte entry */ | ||
591 | /* XXX can we do better ? maybe insert a known 0 bit from r11 into the | ||
592 | * bottom of r12 to create a data dependency... We can also use r10 | ||
593 | * as destination nowadays | ||
594 | */ | ||
595 | #ifdef CONFIG_SMP | ||
596 | lwsync | ||
597 | #endif | ||
598 | lwz r12,4(r12) /* Get low word of pte entry */ | ||
599 | |||
600 | andc. r13,r13,r12 /* Check permission */ | ||
601 | |||
602 | /* Jump to common TLB load point */ | ||
603 | beq finish_tlb_load_47x | ||
604 | |||
605 | 2: /* The bailout. Restore registers to pre-exception conditions | ||
606 | * and call the heavyweights to help us out. | ||
607 | */ | ||
608 | mfspr r11, SPRN_SPRG_RSCRATCH4 | ||
609 | mtcr r11 | ||
610 | mfspr r13, SPRN_SPRG_RSCRATCH3 | ||
611 | mfspr r12, SPRN_SPRG_RSCRATCH2 | ||
612 | mfspr r11, SPRN_SPRG_RSCRATCH1 | ||
613 | mfspr r10, SPRN_SPRG_RSCRATCH0 | ||
614 | b InstructionStorage | ||
615 | |||
616 | /* | ||
617 | * Both the instruction and data TLB miss get to this | ||
618 | * point to load the TLB. | ||
619 | * r10 - free to use | ||
620 | * r11 - PTE high word value | ||
621 | * r12 - PTE low word value | ||
622 | * r13 - free to use | ||
623 | * MMUCR - loaded with proper value when we get here | ||
624 | * Upon exit, we reload everything and RFI. | ||
625 | */ | ||
626 | finish_tlb_load_47x: | ||
627 | /* Combine RPN & ERPN an write WS 1 */ | ||
628 | rlwimi r11,r12,0,0,31-PAGE_SHIFT | ||
629 | tlbwe r11,r13,1 | ||
630 | |||
631 | /* And make up word 2 */ | ||
632 | li r10,0xf85 /* Mask to apply from PTE */ | ||
633 | rlwimi r10,r12,29,30,30 /* DIRTY -> SW position */ | ||
634 | and r11,r12,r10 /* Mask PTE bits to keep */ | ||
635 | andi. r10,r12,_PAGE_USER /* User page ? */ | ||
636 | beq 1f /* nope, leave U bits empty */ | ||
637 | rlwimi r11,r11,3,26,28 /* yes, copy S bits to U */ | ||
638 | 1: tlbwe r11,r13,2 | ||
639 | |||
640 | /* Done...restore registers and get out of here. | ||
641 | */ | ||
642 | mfspr r11, SPRN_SPRG_RSCRATCH4 | ||
643 | mtcr r11 | ||
644 | mfspr r13, SPRN_SPRG_RSCRATCH3 | ||
645 | mfspr r12, SPRN_SPRG_RSCRATCH2 | ||
646 | mfspr r11, SPRN_SPRG_RSCRATCH1 | ||
647 | mfspr r10, SPRN_SPRG_RSCRATCH0 | ||
648 | rfi | ||
649 | |||
650 | #endif /* CONFIG_PPC_47x */ | ||
651 | |||
652 | /* Debug Interrupt */ | ||
653 | /* | ||
654 | * This statement needs to exist at the end of the IVPR | ||
655 | * definition just in case you end up taking a debug | ||
656 | * exception within another exception. | ||
657 | */ | ||
658 | DEBUG_CRIT_EXCEPTION | ||
659 | |||
604 | /* | 660 | /* |
605 | * Global functions | 661 | * Global functions |
606 | */ | 662 | */ |
@@ -647,6 +703,428 @@ _GLOBAL(set_context) | |||
647 | blr | 703 | blr |
648 | 704 | ||
649 | /* | 705 | /* |
706 | * Init CPU state. This is called at boot time or for secondary CPUs | ||
707 | * to setup initial TLB entries, setup IVORs, etc... | ||
708 | * | ||
709 | */ | ||
710 | _GLOBAL(init_cpu_state) | ||
711 | mflr r22 | ||
712 | #ifdef CONFIG_PPC_47x | ||
713 | /* We use the PVR to differenciate 44x cores from 476 */ | ||
714 | mfspr r3,SPRN_PVR | ||
715 | srwi r3,r3,16 | ||
716 | cmplwi cr0,r3,PVR_476@h | ||
717 | beq head_start_47x | ||
718 | cmplwi cr0,r3,PVR_476_ISS@h | ||
719 | beq head_start_47x | ||
720 | #endif /* CONFIG_PPC_47x */ | ||
721 | |||
722 | /* | ||
723 | * In case the firmware didn't do it, we apply some workarounds | ||
724 | * that are good for all 440 core variants here | ||
725 | */ | ||
726 | mfspr r3,SPRN_CCR0 | ||
727 | rlwinm r3,r3,0,0,27 /* disable icache prefetch */ | ||
728 | isync | ||
729 | mtspr SPRN_CCR0,r3 | ||
730 | isync | ||
731 | sync | ||
732 | |||
733 | /* | ||
734 | * Set up the initial MMU state for 44x | ||
735 | * | ||
736 | * We are still executing code at the virtual address | ||
737 | * mappings set by the firmware for the base of RAM. | ||
738 | * | ||
739 | * We first invalidate all TLB entries but the one | ||
740 | * we are running from. We then load the KERNELBASE | ||
741 | * mappings so we can begin to use kernel addresses | ||
742 | * natively and so the interrupt vector locations are | ||
743 | * permanently pinned (necessary since Book E | ||
744 | * implementations always have translation enabled). | ||
745 | * | ||
746 | * TODO: Use the known TLB entry we are running from to | ||
747 | * determine which physical region we are located | ||
748 | * in. This can be used to determine where in RAM | ||
749 | * (on a shared CPU system) or PCI memory space | ||
750 | * (on a DRAMless system) we are located. | ||
751 | * For now, we assume a perfect world which means | ||
752 | * we are located at the base of DRAM (physical 0). | ||
753 | */ | ||
754 | |||
755 | /* | ||
756 | * Search TLB for entry that we are currently using. | ||
757 | * Invalidate all entries but the one we are using. | ||
758 | */ | ||
759 | /* Load our current PID->MMUCR TID and MSR IS->MMUCR STS */ | ||
760 | mfspr r3,SPRN_PID /* Get PID */ | ||
761 | mfmsr r4 /* Get MSR */ | ||
762 | andi. r4,r4,MSR_IS@l /* TS=1? */ | ||
763 | beq wmmucr /* If not, leave STS=0 */ | ||
764 | oris r3,r3,PPC44x_MMUCR_STS@h /* Set STS=1 */ | ||
765 | wmmucr: mtspr SPRN_MMUCR,r3 /* Put MMUCR */ | ||
766 | sync | ||
767 | |||
768 | bl invstr /* Find our address */ | ||
769 | invstr: mflr r5 /* Make it accessible */ | ||
770 | tlbsx r23,0,r5 /* Find entry we are in */ | ||
771 | li r4,0 /* Start at TLB entry 0 */ | ||
772 | li r3,0 /* Set PAGEID inval value */ | ||
773 | 1: cmpw r23,r4 /* Is this our entry? */ | ||
774 | beq skpinv /* If so, skip the inval */ | ||
775 | tlbwe r3,r4,PPC44x_TLB_PAGEID /* If not, inval the entry */ | ||
776 | skpinv: addi r4,r4,1 /* Increment */ | ||
777 | cmpwi r4,64 /* Are we done? */ | ||
778 | bne 1b /* If not, repeat */ | ||
779 | isync /* If so, context change */ | ||
780 | |||
781 | /* | ||
782 | * Configure and load pinned entry into TLB slot 63. | ||
783 | */ | ||
784 | |||
785 | lis r3,PAGE_OFFSET@h | ||
786 | ori r3,r3,PAGE_OFFSET@l | ||
787 | |||
788 | /* Kernel is at the base of RAM */ | ||
789 | li r4, 0 /* Load the kernel physical address */ | ||
790 | |||
791 | /* Load the kernel PID = 0 */ | ||
792 | li r0,0 | ||
793 | mtspr SPRN_PID,r0 | ||
794 | sync | ||
795 | |||
796 | /* Initialize MMUCR */ | ||
797 | li r5,0 | ||
798 | mtspr SPRN_MMUCR,r5 | ||
799 | sync | ||
800 | |||
801 | /* pageid fields */ | ||
802 | clrrwi r3,r3,10 /* Mask off the effective page number */ | ||
803 | ori r3,r3,PPC44x_TLB_VALID | PPC44x_TLB_256M | ||
804 | |||
805 | /* xlat fields */ | ||
806 | clrrwi r4,r4,10 /* Mask off the real page number */ | ||
807 | /* ERPN is 0 for first 4GB page */ | ||
808 | |||
809 | /* attrib fields */ | ||
810 | /* Added guarded bit to protect against speculative loads/stores */ | ||
811 | li r5,0 | ||
812 | ori r5,r5,(PPC44x_TLB_SW | PPC44x_TLB_SR | PPC44x_TLB_SX | PPC44x_TLB_G) | ||
813 | |||
814 | li r0,63 /* TLB slot 63 */ | ||
815 | |||
816 | tlbwe r3,r0,PPC44x_TLB_PAGEID /* Load the pageid fields */ | ||
817 | tlbwe r4,r0,PPC44x_TLB_XLAT /* Load the translation fields */ | ||
818 | tlbwe r5,r0,PPC44x_TLB_ATTRIB /* Load the attrib/access fields */ | ||
819 | |||
820 | /* Force context change */ | ||
821 | mfmsr r0 | ||
822 | mtspr SPRN_SRR1, r0 | ||
823 | lis r0,3f@h | ||
824 | ori r0,r0,3f@l | ||
825 | mtspr SPRN_SRR0,r0 | ||
826 | sync | ||
827 | rfi | ||
828 | |||
829 | /* If necessary, invalidate original entry we used */ | ||
830 | 3: cmpwi r23,63 | ||
831 | beq 4f | ||
832 | li r6,0 | ||
833 | tlbwe r6,r23,PPC44x_TLB_PAGEID | ||
834 | isync | ||
835 | |||
836 | 4: | ||
837 | #ifdef CONFIG_PPC_EARLY_DEBUG_44x | ||
838 | /* Add UART mapping for early debug. */ | ||
839 | |||
840 | /* pageid fields */ | ||
841 | lis r3,PPC44x_EARLY_DEBUG_VIRTADDR@h | ||
842 | ori r3,r3,PPC44x_TLB_VALID|PPC44x_TLB_TS|PPC44x_TLB_64K | ||
843 | |||
844 | /* xlat fields */ | ||
845 | lis r4,CONFIG_PPC_EARLY_DEBUG_44x_PHYSLOW@h | ||
846 | ori r4,r4,CONFIG_PPC_EARLY_DEBUG_44x_PHYSHIGH | ||
847 | |||
848 | /* attrib fields */ | ||
849 | li r5,(PPC44x_TLB_SW|PPC44x_TLB_SR|PPC44x_TLB_I|PPC44x_TLB_G) | ||
850 | li r0,62 /* TLB slot 0 */ | ||
851 | |||
852 | tlbwe r3,r0,PPC44x_TLB_PAGEID | ||
853 | tlbwe r4,r0,PPC44x_TLB_XLAT | ||
854 | tlbwe r5,r0,PPC44x_TLB_ATTRIB | ||
855 | |||
856 | /* Force context change */ | ||
857 | isync | ||
858 | #endif /* CONFIG_PPC_EARLY_DEBUG_44x */ | ||
859 | |||
860 | /* Establish the interrupt vector offsets */ | ||
861 | SET_IVOR(0, CriticalInput); | ||
862 | SET_IVOR(1, MachineCheck); | ||
863 | SET_IVOR(2, DataStorage); | ||
864 | SET_IVOR(3, InstructionStorage); | ||
865 | SET_IVOR(4, ExternalInput); | ||
866 | SET_IVOR(5, Alignment); | ||
867 | SET_IVOR(6, Program); | ||
868 | SET_IVOR(7, FloatingPointUnavailable); | ||
869 | SET_IVOR(8, SystemCall); | ||
870 | SET_IVOR(9, AuxillaryProcessorUnavailable); | ||
871 | SET_IVOR(10, Decrementer); | ||
872 | SET_IVOR(11, FixedIntervalTimer); | ||
873 | SET_IVOR(12, WatchdogTimer); | ||
874 | SET_IVOR(13, DataTLBError44x); | ||
875 | SET_IVOR(14, InstructionTLBError44x); | ||
876 | SET_IVOR(15, DebugCrit); | ||
877 | |||
878 | b head_start_common | ||
879 | |||
880 | |||
881 | #ifdef CONFIG_PPC_47x | ||
882 | |||
883 | #ifdef CONFIG_SMP | ||
884 | |||
885 | /* Entry point for secondary 47x processors */ | ||
886 | _GLOBAL(start_secondary_47x) | ||
887 | mr r24,r3 /* CPU number */ | ||
888 | |||
889 | bl init_cpu_state | ||
890 | |||
891 | /* Now we need to bolt the rest of kernel memory which | ||
892 | * is done in C code. We must be careful because our task | ||
893 | * struct or our stack can (and will probably) be out | ||
894 | * of reach of the initial 256M TLB entry, so we use a | ||
895 | * small temporary stack in .bss for that. This works | ||
896 | * because only one CPU at a time can be in this code | ||
897 | */ | ||
898 | lis r1,temp_boot_stack@h | ||
899 | ori r1,r1,temp_boot_stack@l | ||
900 | addi r1,r1,1024-STACK_FRAME_OVERHEAD | ||
901 | li r0,0 | ||
902 | stw r0,0(r1) | ||
903 | bl mmu_init_secondary | ||
904 | |||
905 | /* Now we can get our task struct and real stack pointer */ | ||
906 | |||
907 | /* Get current_thread_info and current */ | ||
908 | lis r1,secondary_ti@ha | ||
909 | lwz r1,secondary_ti@l(r1) | ||
910 | lwz r2,TI_TASK(r1) | ||
911 | |||
912 | /* Current stack pointer */ | ||
913 | addi r1,r1,THREAD_SIZE-STACK_FRAME_OVERHEAD | ||
914 | li r0,0 | ||
915 | stw r0,0(r1) | ||
916 | |||
917 | /* Kernel stack for exception entry in SPRG3 */ | ||
918 | addi r4,r2,THREAD /* init task's THREAD */ | ||
919 | mtspr SPRN_SPRG3,r4 | ||
920 | |||
921 | b start_secondary | ||
922 | |||
923 | #endif /* CONFIG_SMP */ | ||
924 | |||
925 | /* | ||
926 | * Set up the initial MMU state for 44x | ||
927 | * | ||
928 | * We are still executing code at the virtual address | ||
929 | * mappings set by the firmware for the base of RAM. | ||
930 | */ | ||
931 | |||
932 | head_start_47x: | ||
933 | /* Load our current PID->MMUCR TID and MSR IS->MMUCR STS */ | ||
934 | mfspr r3,SPRN_PID /* Get PID */ | ||
935 | mfmsr r4 /* Get MSR */ | ||
936 | andi. r4,r4,MSR_IS@l /* TS=1? */ | ||
937 | beq 1f /* If not, leave STS=0 */ | ||
938 | oris r3,r3,PPC47x_MMUCR_STS@h /* Set STS=1 */ | ||
939 | 1: mtspr SPRN_MMUCR,r3 /* Put MMUCR */ | ||
940 | sync | ||
941 | |||
942 | /* Find the entry we are running from */ | ||
943 | bl 1f | ||
944 | 1: mflr r23 | ||
945 | tlbsx r23,0,r23 | ||
946 | tlbre r24,r23,0 | ||
947 | tlbre r25,r23,1 | ||
948 | tlbre r26,r23,2 | ||
949 | |||
950 | /* | ||
951 | * Cleanup time | ||
952 | */ | ||
953 | |||
954 | /* Initialize MMUCR */ | ||
955 | li r5,0 | ||
956 | mtspr SPRN_MMUCR,r5 | ||
957 | sync | ||
958 | |||
959 | clear_all_utlb_entries: | ||
960 | |||
961 | #; Set initial values. | ||
962 | |||
963 | addis r3,0,0x8000 | ||
964 | addi r4,0,0 | ||
965 | addi r5,0,0 | ||
966 | b clear_utlb_entry | ||
967 | |||
968 | #; Align the loop to speed things up. | ||
969 | |||
970 | .align 6 | ||
971 | |||
972 | clear_utlb_entry: | ||
973 | |||
974 | tlbwe r4,r3,0 | ||
975 | tlbwe r5,r3,1 | ||
976 | tlbwe r5,r3,2 | ||
977 | addis r3,r3,0x2000 | ||
978 | cmpwi r3,0 | ||
979 | bne clear_utlb_entry | ||
980 | addis r3,0,0x8000 | ||
981 | addis r4,r4,0x100 | ||
982 | cmpwi r4,0 | ||
983 | bne clear_utlb_entry | ||
984 | |||
985 | #; Restore original entry. | ||
986 | |||
987 | oris r23,r23,0x8000 /* specify the way */ | ||
988 | tlbwe r24,r23,0 | ||
989 | tlbwe r25,r23,1 | ||
990 | tlbwe r26,r23,2 | ||
991 | |||
992 | /* | ||
993 | * Configure and load pinned entry into TLB for the kernel core | ||
994 | */ | ||
995 | |||
996 | lis r3,PAGE_OFFSET@h | ||
997 | ori r3,r3,PAGE_OFFSET@l | ||
998 | |||
999 | /* Kernel is at the base of RAM */ | ||
1000 | li r4, 0 /* Load the kernel physical address */ | ||
1001 | |||
1002 | /* Load the kernel PID = 0 */ | ||
1003 | li r0,0 | ||
1004 | mtspr SPRN_PID,r0 | ||
1005 | sync | ||
1006 | |||
1007 | /* Word 0 */ | ||
1008 | clrrwi r3,r3,12 /* Mask off the effective page number */ | ||
1009 | ori r3,r3,PPC47x_TLB0_VALID | PPC47x_TLB0_256M | ||
1010 | |||
1011 | /* Word 1 */ | ||
1012 | clrrwi r4,r4,12 /* Mask off the real page number */ | ||
1013 | /* ERPN is 0 for first 4GB page */ | ||
1014 | /* Word 2 */ | ||
1015 | li r5,0 | ||
1016 | ori r5,r5,PPC47x_TLB2_S_RWX | ||
1017 | #ifdef CONFIG_SMP | ||
1018 | ori r5,r5,PPC47x_TLB2_M | ||
1019 | #endif | ||
1020 | |||
1021 | /* We write to way 0 and bolted 0 */ | ||
1022 | lis r0,0x8800 | ||
1023 | tlbwe r3,r0,0 | ||
1024 | tlbwe r4,r0,1 | ||
1025 | tlbwe r5,r0,2 | ||
1026 | |||
1027 | /* | ||
1028 | * Configure SSPCR, ISPCR and USPCR for now to search everything, we can fix | ||
1029 | * them up later | ||
1030 | */ | ||
1031 | LOAD_REG_IMMEDIATE(r3, 0x9abcdef0) | ||
1032 | mtspr SPRN_SSPCR,r3 | ||
1033 | mtspr SPRN_USPCR,r3 | ||
1034 | LOAD_REG_IMMEDIATE(r3, 0x12345670) | ||
1035 | mtspr SPRN_ISPCR,r3 | ||
1036 | |||
1037 | /* Force context change */ | ||
1038 | mfmsr r0 | ||
1039 | mtspr SPRN_SRR1, r0 | ||
1040 | lis r0,3f@h | ||
1041 | ori r0,r0,3f@l | ||
1042 | mtspr SPRN_SRR0,r0 | ||
1043 | sync | ||
1044 | rfi | ||
1045 | |||
1046 | /* Invalidate original entry we used */ | ||
1047 | 3: | ||
1048 | rlwinm r24,r24,0,21,19 /* clear the "valid" bit */ | ||
1049 | tlbwe r24,r23,0 | ||
1050 | addi r24,0,0 | ||
1051 | tlbwe r24,r23,1 | ||
1052 | tlbwe r24,r23,2 | ||
1053 | isync /* Clear out the shadow TLB entries */ | ||
1054 | |||
1055 | #ifdef CONFIG_PPC_EARLY_DEBUG_44x | ||
1056 | /* Add UART mapping for early debug. */ | ||
1057 | |||
1058 | /* Word 0 */ | ||
1059 | lis r3,PPC44x_EARLY_DEBUG_VIRTADDR@h | ||
1060 | ori r3,r3,PPC47x_TLB0_VALID | PPC47x_TLB0_TS | PPC47x_TLB0_1M | ||
1061 | |||
1062 | /* Word 1 */ | ||
1063 | lis r4,CONFIG_PPC_EARLY_DEBUG_44x_PHYSLOW@h | ||
1064 | ori r4,r4,CONFIG_PPC_EARLY_DEBUG_44x_PHYSHIGH | ||
1065 | |||
1066 | /* Word 2 */ | ||
1067 | li r5,(PPC47x_TLB2_S_RW | PPC47x_TLB2_IMG) | ||
1068 | |||
1069 | /* Bolted in way 0, bolt slot 5, we -hope- we don't hit the same | ||
1070 | * congruence class as the kernel, we need to make sure of it at | ||
1071 | * some point | ||
1072 | */ | ||
1073 | lis r0,0x8d00 | ||
1074 | tlbwe r3,r0,0 | ||
1075 | tlbwe r4,r0,1 | ||
1076 | tlbwe r5,r0,2 | ||
1077 | |||
1078 | /* Force context change */ | ||
1079 | isync | ||
1080 | #endif /* CONFIG_PPC_EARLY_DEBUG_44x */ | ||
1081 | |||
1082 | /* Establish the interrupt vector offsets */ | ||
1083 | SET_IVOR(0, CriticalInput); | ||
1084 | SET_IVOR(1, MachineCheckA); | ||
1085 | SET_IVOR(2, DataStorage); | ||
1086 | SET_IVOR(3, InstructionStorage); | ||
1087 | SET_IVOR(4, ExternalInput); | ||
1088 | SET_IVOR(5, Alignment); | ||
1089 | SET_IVOR(6, Program); | ||
1090 | SET_IVOR(7, FloatingPointUnavailable); | ||
1091 | SET_IVOR(8, SystemCall); | ||
1092 | SET_IVOR(9, AuxillaryProcessorUnavailable); | ||
1093 | SET_IVOR(10, Decrementer); | ||
1094 | SET_IVOR(11, FixedIntervalTimer); | ||
1095 | SET_IVOR(12, WatchdogTimer); | ||
1096 | SET_IVOR(13, DataTLBError47x); | ||
1097 | SET_IVOR(14, InstructionTLBError47x); | ||
1098 | SET_IVOR(15, DebugCrit); | ||
1099 | |||
1100 | /* We configure icbi to invalidate 128 bytes at a time since the | ||
1101 | * current 32-bit kernel code isn't too happy with icache != dcache | ||
1102 | * block size | ||
1103 | */ | ||
1104 | mfspr r3,SPRN_CCR0 | ||
1105 | oris r3,r3,0x0020 | ||
1106 | mtspr SPRN_CCR0,r3 | ||
1107 | isync | ||
1108 | |||
1109 | #endif /* CONFIG_PPC_47x */ | ||
1110 | |||
1111 | /* | ||
1112 | * Here we are back to code that is common between 44x and 47x | ||
1113 | * | ||
1114 | * We proceed to further kernel initialization and return to the | ||
1115 | * main kernel entry | ||
1116 | */ | ||
1117 | head_start_common: | ||
1118 | /* Establish the interrupt vector base */ | ||
1119 | lis r4,interrupt_base@h /* IVPR only uses the high 16-bits */ | ||
1120 | mtspr SPRN_IVPR,r4 | ||
1121 | |||
1122 | addis r22,r22,KERNELBASE@h | ||
1123 | mtlr r22 | ||
1124 | isync | ||
1125 | blr | ||
1126 | |||
1127 | /* | ||
650 | * We put a few things here that have to be page-aligned. This stuff | 1128 | * We put a few things here that have to be page-aligned. This stuff |
651 | * goes at the beginning of the data segment, which is page-aligned. | 1129 | * goes at the beginning of the data segment, which is page-aligned. |
652 | */ | 1130 | */ |
@@ -671,3 +1149,9 @@ swapper_pg_dir: | |||
671 | */ | 1149 | */ |
672 | abatron_pteptrs: | 1150 | abatron_pteptrs: |
673 | .space 8 | 1151 | .space 8 |
1152 | |||
1153 | #ifdef CONFIG_SMP | ||
1154 | .align 12 | ||
1155 | temp_boot_stack: | ||
1156 | .space 1024 | ||
1157 | #endif /* CONFIG_SMP */ | ||