diff options
author | Benjamin Herrenschmidt <benh@kernel.crashing.org> | 2008-12-18 14:13:42 -0500 |
---|---|---|
committer | Paul Mackerras <paulus@samba.org> | 2008-12-20 22:21:16 -0500 |
commit | 2a4aca1144394653269720ffbb5a325a77abd5fa (patch) | |
tree | 553bbcbb294ac5923f72430b7317b5c80a27141c /arch/powerpc/mm/hash_low_32.S | |
parent | f048aace29e007f2b642097e2da8231e0e9cce2d (diff) |
powerpc/mm: Split low level tlb invalidate for nohash processors
Currently, the various forms of low level TLB invalidations are all
implemented in misc_32.S for 32-bit processors, in a fairly scary
mess of #ifdef's and with interesting duplication such as a whole
bunch of code for FSL _tlbie and _tlbia which are no longer used.
This moves things around such that _tlbie is now defined in
hash_low_32.S and is only used by the 32-bit hash code, and all
nohash CPUs use the various _tlbil_* forms that are now moved to
a new file, tlb_nohash_low.S.
I moved all the definitions for that stuff out of
include/asm/tlbflush.h as they are really internal mm stuff, into
mm/mmu_decl.h
The code should have no functional changes. I kept some variants
inline for trivial forms on things like 40x and 8xx.
Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
Acked-by: Kumar Gala <galak@kernel.crashing.org>
Signed-off-by: Paul Mackerras <paulus@samba.org>
Diffstat (limited to 'arch/powerpc/mm/hash_low_32.S')
-rw-r--r-- | arch/powerpc/mm/hash_low_32.S | 76 |
1 files changed, 76 insertions, 0 deletions
diff --git a/arch/powerpc/mm/hash_low_32.S b/arch/powerpc/mm/hash_low_32.S index c5536b8b37a9..c8eac22a8f00 100644 --- a/arch/powerpc/mm/hash_low_32.S +++ b/arch/powerpc/mm/hash_low_32.S | |||
@@ -633,3 +633,79 @@ _GLOBAL(flush_hash_patch_B) | |||
633 | SYNC_601 | 633 | SYNC_601 |
634 | isync | 634 | isync |
635 | blr | 635 | blr |
636 | |||
637 | /* | ||
638 | * Flush an entry from the TLB | ||
639 | */ | ||
640 | _GLOBAL(_tlbie) | ||
641 | #ifdef CONFIG_SMP | ||
642 | rlwinm r8,r1,0,0,(31-THREAD_SHIFT) | ||
643 | lwz r8,TI_CPU(r8) | ||
644 | oris r8,r8,11 | ||
645 | mfmsr r10 | ||
646 | SYNC | ||
647 | rlwinm r0,r10,0,17,15 /* clear bit 16 (MSR_EE) */ | ||
648 | rlwinm r0,r0,0,28,26 /* clear DR */ | ||
649 | mtmsr r0 | ||
650 | SYNC_601 | ||
651 | isync | ||
652 | lis r9,mmu_hash_lock@h | ||
653 | ori r9,r9,mmu_hash_lock@l | ||
654 | tophys(r9,r9) | ||
655 | 10: lwarx r7,0,r9 | ||
656 | cmpwi 0,r7,0 | ||
657 | bne- 10b | ||
658 | stwcx. r8,0,r9 | ||
659 | bne- 10b | ||
660 | eieio | ||
661 | tlbie r3 | ||
662 | sync | ||
663 | TLBSYNC | ||
664 | li r0,0 | ||
665 | stw r0,0(r9) /* clear mmu_hash_lock */ | ||
666 | mtmsr r10 | ||
667 | SYNC_601 | ||
668 | isync | ||
669 | #else /* CONFIG_SMP */ | ||
670 | tlbie r3 | ||
671 | sync | ||
672 | #endif /* CONFIG_SMP */ | ||
673 | blr | ||
674 | |||
675 | /* | ||
676 | * Flush the entire TLB. 603/603e only | ||
677 | */ | ||
678 | _GLOBAL(_tlbia) | ||
679 | #if defined(CONFIG_SMP) | ||
680 | rlwinm r8,r1,0,0,(31-THREAD_SHIFT) | ||
681 | lwz r8,TI_CPU(r8) | ||
682 | oris r8,r8,10 | ||
683 | mfmsr r10 | ||
684 | SYNC | ||
685 | rlwinm r0,r10,0,17,15 /* clear bit 16 (MSR_EE) */ | ||
686 | rlwinm r0,r0,0,28,26 /* clear DR */ | ||
687 | mtmsr r0 | ||
688 | SYNC_601 | ||
689 | isync | ||
690 | lis r9,mmu_hash_lock@h | ||
691 | ori r9,r9,mmu_hash_lock@l | ||
692 | tophys(r9,r9) | ||
693 | 10: lwarx r7,0,r9 | ||
694 | cmpwi 0,r7,0 | ||
695 | bne- 10b | ||
696 | stwcx. r8,0,r9 | ||
697 | bne- 10b | ||
698 | sync | ||
699 | tlbia | ||
700 | sync | ||
701 | TLBSYNC | ||
702 | li r0,0 | ||
703 | stw r0,0(r9) /* clear mmu_hash_lock */ | ||
704 | mtmsr r10 | ||
705 | SYNC_601 | ||
706 | isync | ||
707 | #else /* CONFIG_SMP */ | ||
708 | sync | ||
709 | tlbia | ||
710 | sync | ||
711 | #endif /* CONFIG_SMP */ | ||