diff options
-rw-r--r-- | arch/powerpc/kernel/setup_64.c | 8 | ||||
-rw-r--r-- | arch/powerpc/mm/fsl_booke_mmu.c | 4 | ||||
-rw-r--r-- | arch/powerpc/mm/mmu_decl.h | 1 | ||||
-rw-r--r-- | arch/powerpc/mm/tlb_nohash.c | 19 | ||||
-rw-r--r-- | arch/powerpc/mm/tlb_nohash_low.S | 63 |
5 files changed, 92 insertions, 3 deletions
diff --git a/arch/powerpc/kernel/setup_64.c b/arch/powerpc/kernel/setup_64.c index bdcbb716f4d6..505ec2c698e0 100644 --- a/arch/powerpc/kernel/setup_64.c +++ b/arch/powerpc/kernel/setup_64.c | |||
@@ -108,6 +108,14 @@ static void setup_tlb_core_data(void) | |||
108 | for_each_possible_cpu(cpu) { | 108 | for_each_possible_cpu(cpu) { |
109 | int first = cpu_first_thread_sibling(cpu); | 109 | int first = cpu_first_thread_sibling(cpu); |
110 | 110 | ||
111 | /* | ||
112 | * If we boot via kdump on a non-primary thread, | ||
113 | * make sure we point at the thread that actually | ||
114 | * set up this TLB. | ||
115 | */ | ||
116 | if (cpu_first_thread_sibling(boot_cpuid) == first) | ||
117 | first = boot_cpuid; | ||
118 | |||
111 | paca[cpu].tcd_ptr = &paca[first].tcd; | 119 | paca[cpu].tcd_ptr = &paca[first].tcd; |
112 | 120 | ||
113 | /* | 121 | /* |
diff --git a/arch/powerpc/mm/fsl_booke_mmu.c b/arch/powerpc/mm/fsl_booke_mmu.c index 354ba3c09ef3..bb1f88c10377 100644 --- a/arch/powerpc/mm/fsl_booke_mmu.c +++ b/arch/powerpc/mm/fsl_booke_mmu.c | |||
@@ -141,8 +141,6 @@ static void settlbcam(int index, unsigned long virt, phys_addr_t phys, | |||
141 | tlbcam_addrs[index].start = virt; | 141 | tlbcam_addrs[index].start = virt; |
142 | tlbcam_addrs[index].limit = virt + size - 1; | 142 | tlbcam_addrs[index].limit = virt + size - 1; |
143 | tlbcam_addrs[index].phys = phys; | 143 | tlbcam_addrs[index].phys = phys; |
144 | |||
145 | loadcam_entry(index); | ||
146 | } | 144 | } |
147 | 145 | ||
148 | unsigned long calc_cam_sz(unsigned long ram, unsigned long virt, | 146 | unsigned long calc_cam_sz(unsigned long ram, unsigned long virt, |
@@ -188,6 +186,8 @@ static unsigned long map_mem_in_cams_addr(phys_addr_t phys, unsigned long virt, | |||
188 | virt += cam_sz; | 186 | virt += cam_sz; |
189 | phys += cam_sz; | 187 | phys += cam_sz; |
190 | } | 188 | } |
189 | |||
190 | loadcam_multi(0, i, max_cam_idx); | ||
191 | tlbcam_index = i; | 191 | tlbcam_index = i; |
192 | 192 | ||
193 | #ifdef CONFIG_PPC64 | 193 | #ifdef CONFIG_PPC64 |
diff --git a/arch/powerpc/mm/mmu_decl.h b/arch/powerpc/mm/mmu_decl.h index 085b66b10891..27c3a2d3a4f1 100644 --- a/arch/powerpc/mm/mmu_decl.h +++ b/arch/powerpc/mm/mmu_decl.h | |||
@@ -152,6 +152,7 @@ extern int switch_to_as1(void); | |||
152 | extern void restore_to_as0(int esel, int offset, void *dt_ptr, int bootcpu); | 152 | extern void restore_to_as0(int esel, int offset, void *dt_ptr, int bootcpu); |
153 | #endif | 153 | #endif |
154 | extern void loadcam_entry(unsigned int index); | 154 | extern void loadcam_entry(unsigned int index); |
155 | extern void loadcam_multi(int first_idx, int num, int tmp_idx); | ||
155 | 156 | ||
156 | struct tlbcam { | 157 | struct tlbcam { |
157 | u32 MAS0; | 158 | u32 MAS0; |
diff --git a/arch/powerpc/mm/tlb_nohash.c b/arch/powerpc/mm/tlb_nohash.c index 723a099f6be3..a7381fbdd6ab 100644 --- a/arch/powerpc/mm/tlb_nohash.c +++ b/arch/powerpc/mm/tlb_nohash.c | |||
@@ -42,6 +42,7 @@ | |||
42 | #include <asm/tlbflush.h> | 42 | #include <asm/tlbflush.h> |
43 | #include <asm/tlb.h> | 43 | #include <asm/tlb.h> |
44 | #include <asm/code-patching.h> | 44 | #include <asm/code-patching.h> |
45 | #include <asm/cputhreads.h> | ||
45 | #include <asm/hugetlb.h> | 46 | #include <asm/hugetlb.h> |
46 | #include <asm/paca.h> | 47 | #include <asm/paca.h> |
47 | 48 | ||
@@ -628,10 +629,26 @@ static void early_init_this_mmu(void) | |||
628 | #ifdef CONFIG_PPC_FSL_BOOK3E | 629 | #ifdef CONFIG_PPC_FSL_BOOK3E |
629 | if (mmu_has_feature(MMU_FTR_TYPE_FSL_E)) { | 630 | if (mmu_has_feature(MMU_FTR_TYPE_FSL_E)) { |
630 | unsigned int num_cams; | 631 | unsigned int num_cams; |
632 | int __maybe_unused cpu = smp_processor_id(); | ||
633 | bool map = true; | ||
631 | 634 | ||
632 | /* use a quarter of the TLBCAM for bolted linear map */ | 635 | /* use a quarter of the TLBCAM for bolted linear map */ |
633 | num_cams = (mfspr(SPRN_TLB1CFG) & TLBnCFG_N_ENTRY) / 4; | 636 | num_cams = (mfspr(SPRN_TLB1CFG) & TLBnCFG_N_ENTRY) / 4; |
634 | linear_map_top = map_mem_in_cams(linear_map_top, num_cams); | 637 | |
638 | /* | ||
639 | * Only do the mapping once per core, or else the | ||
640 | * transient mapping would cause problems. | ||
641 | */ | ||
642 | #ifdef CONFIG_SMP | ||
643 | if (cpu != boot_cpuid && | ||
644 | (cpu != cpu_first_thread_sibling(cpu) || | ||
645 | cpu == cpu_first_thread_sibling(boot_cpuid))) | ||
646 | map = false; | ||
647 | #endif | ||
648 | |||
649 | if (map) | ||
650 | linear_map_top = map_mem_in_cams(linear_map_top, | ||
651 | num_cams); | ||
635 | } | 652 | } |
636 | #endif | 653 | #endif |
637 | 654 | ||
diff --git a/arch/powerpc/mm/tlb_nohash_low.S b/arch/powerpc/mm/tlb_nohash_low.S index 43ff3c797fbf..68c477592e43 100644 --- a/arch/powerpc/mm/tlb_nohash_low.S +++ b/arch/powerpc/mm/tlb_nohash_low.S | |||
@@ -400,6 +400,7 @@ _GLOBAL(set_context) | |||
400 | * extern void loadcam_entry(unsigned int index) | 400 | * extern void loadcam_entry(unsigned int index) |
401 | * | 401 | * |
402 | * Load TLBCAM[index] entry in to the L2 CAM MMU | 402 | * Load TLBCAM[index] entry in to the L2 CAM MMU |
403 | * Must preserve r7, r8, r9, and r10 | ||
403 | */ | 404 | */ |
404 | _GLOBAL(loadcam_entry) | 405 | _GLOBAL(loadcam_entry) |
405 | mflr r5 | 406 | mflr r5 |
@@ -423,4 +424,66 @@ END_MMU_FTR_SECTION_IFSET(MMU_FTR_BIG_PHYS) | |||
423 | tlbwe | 424 | tlbwe |
424 | isync | 425 | isync |
425 | blr | 426 | blr |
427 | |||
428 | /* | ||
429 | * Load multiple TLB entries at once, using an alternate-space | ||
430 | * trampoline so that we don't have to care about whether the same | ||
431 | * TLB entry maps us before and after. | ||
432 | * | ||
433 | * r3 = first entry to write | ||
434 | * r4 = number of entries to write | ||
435 | * r5 = temporary tlb entry | ||
436 | */ | ||
437 | _GLOBAL(loadcam_multi) | ||
438 | mflr r8 | ||
439 | |||
440 | /* | ||
441 | * Set up temporary TLB entry that is the same as what we're | ||
442 | * running from, but in AS=1. | ||
443 | */ | ||
444 | bl 1f | ||
445 | 1: mflr r6 | ||
446 | tlbsx 0,r8 | ||
447 | mfspr r6,SPRN_MAS1 | ||
448 | ori r6,r6,MAS1_TS | ||
449 | mtspr SPRN_MAS1,r6 | ||
450 | mfspr r6,SPRN_MAS0 | ||
451 | rlwimi r6,r5,MAS0_ESEL_SHIFT,MAS0_ESEL_MASK | ||
452 | mr r7,r5 | ||
453 | mtspr SPRN_MAS0,r6 | ||
454 | isync | ||
455 | tlbwe | ||
456 | isync | ||
457 | |||
458 | /* Switch to AS=1 */ | ||
459 | mfmsr r6 | ||
460 | ori r6,r6,MSR_IS|MSR_DS | ||
461 | mtmsr r6 | ||
462 | isync | ||
463 | |||
464 | mr r9,r3 | ||
465 | add r10,r3,r4 | ||
466 | 2: bl loadcam_entry | ||
467 | addi r9,r9,1 | ||
468 | cmpw r9,r10 | ||
469 | mr r3,r9 | ||
470 | blt 2b | ||
471 | |||
472 | /* Return to AS=0 and clear the temporary entry */ | ||
473 | mfmsr r6 | ||
474 | rlwinm. r6,r6,0,~(MSR_IS|MSR_DS) | ||
475 | mtmsr r6 | ||
476 | isync | ||
477 | |||
478 | li r6,0 | ||
479 | mtspr SPRN_MAS1,r6 | ||
480 | rlwinm r6,r7,MAS0_ESEL_SHIFT,MAS0_ESEL_MASK | ||
481 | oris r6,r6,MAS0_TLBSEL(1)@h | ||
482 | mtspr SPRN_MAS0,r6 | ||
483 | isync | ||
484 | tlbwe | ||
485 | isync | ||
486 | |||
487 | mtlr r8 | ||
488 | blr | ||
426 | #endif | 489 | #endif |