aboutsummaryrefslogtreecommitdiffstats
path: root/arch/powerpc/kernel/head_fsl_booke.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/powerpc/kernel/head_fsl_booke.S')
-rw-r--r--arch/powerpc/kernel/head_fsl_booke.S175
1 files changed, 140 insertions, 35 deletions
diff --git a/arch/powerpc/kernel/head_fsl_booke.S b/arch/powerpc/kernel/head_fsl_booke.S
index 50845924b7d9..9f5d210ddf3f 100644
--- a/arch/powerpc/kernel/head_fsl_booke.S
+++ b/arch/powerpc/kernel/head_fsl_booke.S
@@ -63,17 +63,30 @@ _ENTRY(_start);
63 * of abatron_pteptrs 63 * of abatron_pteptrs
64 */ 64 */
65 nop 65 nop
66/* 66
67 * Save parameters we are passed 67 /* Translate device tree address to physical, save in r30/r31 */
68 */ 68 mfmsr r16
69 mr r31,r3 69 mfspr r17,SPRN_PID
70 mr r30,r4 70 rlwinm r17,r17,16,0x3fff0000 /* turn PID into MAS6[SPID] */
71 mr r29,r5 71 rlwimi r17,r16,28,0x00000001 /* turn MSR[DS] into MAS6[SAS] */
72 mr r28,r6 72 mtspr SPRN_MAS6,r17
73 mr r27,r7 73
74 li r25,0 /* phys kernel start (low) */ 74 tlbsx 0,r3 /* must succeed */
75 li r24,0 /* CPU number */ 75
76 li r23,0 /* phys kernel start (high) */ 76 mfspr r16,SPRN_MAS1
77 mfspr r20,SPRN_MAS3
78 rlwinm r17,r16,25,0x1f /* r17 = log2(page size) */
79 li r18,1024
80 slw r18,r18,r17 /* r18 = page size */
81 addi r18,r18,-1
82 and r19,r3,r18 /* r19 = page offset */
83 andc r31,r20,r18 /* r31 = page base */
84 or r31,r31,r19 /* r31 = devtree phys addr */
85 mfspr r30,SPRN_MAS7
86
87 li r25,0 /* phys kernel start (low) */
88 li r24,0 /* CPU number */
89 li r23,0 /* phys kernel start (high) */
77 90
78/* We try to not make any assumptions about how the boot loader 91/* We try to not make any assumptions about how the boot loader
79 * setup or used the TLBs. We invalidate all mappings from the 92 * setup or used the TLBs. We invalidate all mappings from the
@@ -198,11 +211,8 @@ _ENTRY(__early_start)
198/* 211/*
199 * Decide what sort of machine this is and initialize the MMU. 212 * Decide what sort of machine this is and initialize the MMU.
200 */ 213 */
201 mr r3,r31 214 mr r3,r30
202 mr r4,r30 215 mr r4,r31
203 mr r5,r29
204 mr r6,r28
205 mr r7,r27
206 bl machine_init 216 bl machine_init
207 bl MMU_init 217 bl MMU_init
208 218
@@ -236,8 +246,24 @@ _ENTRY(__early_start)
236 * if we find the pte (fall through): 246 * if we find the pte (fall through):
237 * r11 is low pte word 247 * r11 is low pte word
238 * r12 is pointer to the pte 248 * r12 is pointer to the pte
249 * r10 is the pshift from the PGD, if we're a hugepage
239 */ 250 */
240#ifdef CONFIG_PTE_64BIT 251#ifdef CONFIG_PTE_64BIT
252#ifdef CONFIG_HUGETLB_PAGE
253#define FIND_PTE \
254 rlwinm r12, r10, 13, 19, 29; /* Compute pgdir/pmd offset */ \
255 lwzx r11, r12, r11; /* Get pgd/pmd entry */ \
256 rlwinm. r12, r11, 0, 0, 20; /* Extract pt base address */ \
257 blt 1000f; /* Normal non-huge page */ \
258 beq 2f; /* Bail if no table */ \
259 oris r11, r11, PD_HUGE@h; /* Put back address bit */ \
260 andi. r10, r11, HUGEPD_SHIFT_MASK@l; /* extract size field */ \
261 xor r12, r10, r11; /* drop size bits from pointer */ \
262 b 1001f; \
2631000: rlwimi r12, r10, 23, 20, 28; /* Compute pte address */ \
264 li r10, 0; /* clear r10 */ \
2651001: lwz r11, 4(r12); /* Get pte entry */
266#else
241#define FIND_PTE \ 267#define FIND_PTE \
242 rlwinm r12, r10, 13, 19, 29; /* Compute pgdir/pmd offset */ \ 268 rlwinm r12, r10, 13, 19, 29; /* Compute pgdir/pmd offset */ \
243 lwzx r11, r12, r11; /* Get pgd/pmd entry */ \ 269 lwzx r11, r12, r11; /* Get pgd/pmd entry */ \
@@ -245,7 +271,8 @@ _ENTRY(__early_start)
245 beq 2f; /* Bail if no table */ \ 271 beq 2f; /* Bail if no table */ \
246 rlwimi r12, r10, 23, 20, 28; /* Compute pte address */ \ 272 rlwimi r12, r10, 23, 20, 28; /* Compute pte address */ \
247 lwz r11, 4(r12); /* Get pte entry */ 273 lwz r11, 4(r12); /* Get pte entry */
248#else 274#endif /* HUGEPAGE */
275#else /* !PTE_64BIT */
249#define FIND_PTE \ 276#define FIND_PTE \
250 rlwimi r11, r10, 12, 20, 29; /* Create L1 (pgdir/pmd) address */ \ 277 rlwimi r11, r10, 12, 20, 29; /* Create L1 (pgdir/pmd) address */ \
251 lwz r11, 0(r11); /* Get L1 entry */ \ 278 lwz r11, 0(r11); /* Get L1 entry */ \
@@ -402,8 +429,8 @@ interrupt_base:
402 429
403#ifdef CONFIG_PTE_64BIT 430#ifdef CONFIG_PTE_64BIT
404#ifdef CONFIG_SMP 431#ifdef CONFIG_SMP
405 subf r10,r11,r12 /* create false data dep */ 432 subf r13,r11,r12 /* create false data dep */
406 lwzx r13,r11,r10 /* Get upper pte bits */ 433 lwzx r13,r11,r13 /* Get upper pte bits */
407#else 434#else
408 lwz r13,0(r12) /* Get upper pte bits */ 435 lwz r13,0(r12) /* Get upper pte bits */
409#endif 436#endif
@@ -483,8 +510,8 @@ interrupt_base:
483 510
484#ifdef CONFIG_PTE_64BIT 511#ifdef CONFIG_PTE_64BIT
485#ifdef CONFIG_SMP 512#ifdef CONFIG_SMP
486 subf r10,r11,r12 /* create false data dep */ 513 subf r13,r11,r12 /* create false data dep */
487 lwzx r13,r11,r10 /* Get upper pte bits */ 514 lwzx r13,r11,r13 /* Get upper pte bits */
488#else 515#else
489 lwz r13,0(r12) /* Get upper pte bits */ 516 lwz r13,0(r12) /* Get upper pte bits */
490#endif 517#endif
@@ -548,7 +575,7 @@ interrupt_base:
548/* 575/*
549 * Both the instruction and data TLB miss get to this 576 * Both the instruction and data TLB miss get to this
550 * point to load the TLB. 577 * point to load the TLB.
551 * r10 - available to use 578 * r10 - tsize encoding (if HUGETLB_PAGE) or available to use
552 * r11 - TLB (info from Linux PTE) 579 * r11 - TLB (info from Linux PTE)
553 * r12 - available to use 580 * r12 - available to use
554 * r13 - upper bits of PTE (if PTE_64BIT) or available to use 581 * r13 - upper bits of PTE (if PTE_64BIT) or available to use
@@ -558,21 +585,73 @@ interrupt_base:
558 * Upon exit, we reload everything and RFI. 585 * Upon exit, we reload everything and RFI.
559 */ 586 */
560finish_tlb_load: 587finish_tlb_load:
588#ifdef CONFIG_HUGETLB_PAGE
589 cmpwi 6, r10, 0 /* check for huge page */
590 beq 6, finish_tlb_load_cont /* !huge */
591
592 /* Alas, we need more scratch registers for hugepages */
593 mfspr r12, SPRN_SPRG_THREAD
594 stw r14, THREAD_NORMSAVE(4)(r12)
595 stw r15, THREAD_NORMSAVE(5)(r12)
596 stw r16, THREAD_NORMSAVE(6)(r12)
597 stw r17, THREAD_NORMSAVE(7)(r12)
598
599 /* Get the next_tlbcam_idx percpu var */
600#ifdef CONFIG_SMP
601 lwz r12, THREAD_INFO-THREAD(r12)
602 lwz r15, TI_CPU(r12)
603 lis r14, __per_cpu_offset@h
604 ori r14, r14, __per_cpu_offset@l
605 rlwinm r15, r15, 2, 0, 29
606 lwzx r16, r14, r15
607#else
608 li r16, 0
609#endif
610 lis r17, next_tlbcam_idx@h
611 ori r17, r17, next_tlbcam_idx@l
612 add r17, r17, r16 /* r17 = *next_tlbcam_idx */
613 lwz r15, 0(r17) /* r15 = next_tlbcam_idx */
614
615 lis r14, MAS0_TLBSEL(1)@h /* select TLB1 (TLBCAM) */
616 rlwimi r14, r15, 16, 4, 15 /* next_tlbcam_idx entry */
617 mtspr SPRN_MAS0, r14
618
619 /* Extract TLB1CFG(NENTRY) */
620 mfspr r16, SPRN_TLB1CFG
621 andi. r16, r16, 0xfff
622
623 /* Update next_tlbcam_idx, wrapping when necessary */
624 addi r15, r15, 1
625 cmpw r15, r16
626 blt 100f
627 lis r14, tlbcam_index@h
628 ori r14, r14, tlbcam_index@l
629 lwz r15, 0(r14)
630100: stw r15, 0(r17)
631
632 /*
633 * Calc MAS1_TSIZE from r10 (which has pshift encoded)
634 * tlb_enc = (pshift - 10).
635 */
636 subi r15, r10, 10
637 mfspr r16, SPRN_MAS1
638 rlwimi r16, r15, 7, 20, 24
639 mtspr SPRN_MAS1, r16
640
641 /* copy the pshift for use later */
642 mr r14, r10
643
644 /* fall through */
645
646#endif /* CONFIG_HUGETLB_PAGE */
647
561 /* 648 /*
562 * We set execute, because we don't have the granularity to 649 * We set execute, because we don't have the granularity to
563 * properly set this at the page level (Linux problem). 650 * properly set this at the page level (Linux problem).
564 * Many of these bits are software only. Bits we don't set 651 * Many of these bits are software only. Bits we don't set
565 * here we (properly should) assume have the appropriate value. 652 * here we (properly should) assume have the appropriate value.
566 */ 653 */
567 654finish_tlb_load_cont:
568 mfspr r12, SPRN_MAS2
569#ifdef CONFIG_PTE_64BIT
570 rlwimi r12, r11, 32-19, 27, 31 /* extract WIMGE from pte */
571#else
572 rlwimi r12, r11, 26, 27, 31 /* extract WIMGE from pte */
573#endif
574 mtspr SPRN_MAS2, r12
575
576#ifdef CONFIG_PTE_64BIT 655#ifdef CONFIG_PTE_64BIT
577 rlwinm r12, r11, 32-2, 26, 31 /* Move in perm bits */ 656 rlwinm r12, r11, 32-2, 26, 31 /* Move in perm bits */
578 andi. r10, r11, _PAGE_DIRTY 657 andi. r10, r11, _PAGE_DIRTY
@@ -581,22 +660,40 @@ finish_tlb_load:
581 andc r12, r12, r10 660 andc r12, r12, r10
5821: rlwimi r12, r13, 20, 0, 11 /* grab RPN[32:43] */ 6611: rlwimi r12, r13, 20, 0, 11 /* grab RPN[32:43] */
583 rlwimi r12, r11, 20, 12, 19 /* grab RPN[44:51] */ 662 rlwimi r12, r11, 20, 12, 19 /* grab RPN[44:51] */
584 mtspr SPRN_MAS3, r12 6632: mtspr SPRN_MAS3, r12
585BEGIN_MMU_FTR_SECTION 664BEGIN_MMU_FTR_SECTION
586 srwi r10, r13, 12 /* grab RPN[12:31] */ 665 srwi r10, r13, 12 /* grab RPN[12:31] */
587 mtspr SPRN_MAS7, r10 666 mtspr SPRN_MAS7, r10
588END_MMU_FTR_SECTION_IFSET(MMU_FTR_BIG_PHYS) 667END_MMU_FTR_SECTION_IFSET(MMU_FTR_BIG_PHYS)
589#else 668#else
590 li r10, (_PAGE_EXEC | _PAGE_PRESENT) 669 li r10, (_PAGE_EXEC | _PAGE_PRESENT)
670 mr r13, r11
591 rlwimi r10, r11, 31, 29, 29 /* extract _PAGE_DIRTY into SW */ 671 rlwimi r10, r11, 31, 29, 29 /* extract _PAGE_DIRTY into SW */
592 and r12, r11, r10 672 and r12, r11, r10
593 andi. r10, r11, _PAGE_USER /* Test for _PAGE_USER */ 673 andi. r10, r11, _PAGE_USER /* Test for _PAGE_USER */
594 slwi r10, r12, 1 674 slwi r10, r12, 1
595 or r10, r10, r12 675 or r10, r10, r12
596 iseleq r12, r12, r10 676 iseleq r12, r12, r10
597 rlwimi r11, r12, 0, 20, 31 /* Extract RPN from PTE and merge with perms */ 677 rlwimi r13, r12, 0, 20, 31 /* Get RPN from PTE, merge w/ perms */
598 mtspr SPRN_MAS3, r11 678 mtspr SPRN_MAS3, r13
599#endif 679#endif
680
681 mfspr r12, SPRN_MAS2
682#ifdef CONFIG_PTE_64BIT
683 rlwimi r12, r11, 32-19, 27, 31 /* extract WIMGE from pte */
684#else
685 rlwimi r12, r11, 26, 27, 31 /* extract WIMGE from pte */
686#endif
687#ifdef CONFIG_HUGETLB_PAGE
688 beq 6, 3f /* don't mask if page isn't huge */
689 li r13, 1
690 slw r13, r13, r14
691 subi r13, r13, 1
692 rlwinm r13, r13, 0, 0, 19 /* bottom bits used for WIMGE/etc */
693 andc r12, r12, r13 /* mask off ea bits within the page */
694#endif
6953: mtspr SPRN_MAS2, r12
696
600#ifdef CONFIG_E200 697#ifdef CONFIG_E200
601 /* Round robin TLB1 entries assignment */ 698 /* Round robin TLB1 entries assignment */
602 mfspr r12, SPRN_MAS0 699 mfspr r12, SPRN_MAS0
@@ -622,11 +719,19 @@ END_MMU_FTR_SECTION_IFSET(MMU_FTR_BIG_PHYS)
622 mtspr SPRN_MAS0,r12 719 mtspr SPRN_MAS0,r12
623#endif /* CONFIG_E200 */ 720#endif /* CONFIG_E200 */
624 721
722tlb_write_entry:
625 tlbwe 723 tlbwe
626 724
627 /* Done...restore registers and get out of here. */ 725 /* Done...restore registers and get out of here. */
628 mfspr r10, SPRN_SPRG_THREAD 726 mfspr r10, SPRN_SPRG_THREAD
629 lwz r11, THREAD_NORMSAVE(3)(r10) 727#ifdef CONFIG_HUGETLB_PAGE
728 beq 6, 8f /* skip restore for 4k page faults */
729 lwz r14, THREAD_NORMSAVE(4)(r10)
730 lwz r15, THREAD_NORMSAVE(5)(r10)
731 lwz r16, THREAD_NORMSAVE(6)(r10)
732 lwz r17, THREAD_NORMSAVE(7)(r10)
733#endif
7348: lwz r11, THREAD_NORMSAVE(3)(r10)
630 mtcr r11 735 mtcr r11
631 lwz r13, THREAD_NORMSAVE(2)(r10) 736 lwz r13, THREAD_NORMSAVE(2)(r10)
632 lwz r12, THREAD_NORMSAVE(1)(r10) 737 lwz r12, THREAD_NORMSAVE(1)(r10)