aboutsummaryrefslogtreecommitdiffstats
path: root/arch/powerpc
diff options
context:
space:
mode:
authorBenjamin Herrenschmidt <benh@kernel.crashing.org>2008-12-18 14:13:46 -0500
committerPaul Mackerras <paulus@samba.org>2008-12-20 22:21:16 -0500
commit760ec0e02d8a13d0ed60d99f47879d4aa8ef1910 (patch)
tree180754d608bad7ba08d9a2f24a4ef930f564a391 /arch/powerpc
parent2a4aca1144394653269720ffbb5a325a77abd5fa (diff)
powerpc/44x: No need to mask MSR:CE, ME or DE in _tlbil_va on 440
The handlers for Critical, Machine Check or Debug interrupts will save and restore MMUCR nowadays, thus we only need to disable normal interrupts when invalidating TLB entries. Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org> Acked-by: Kumar Gala <galak@kernel.crashing.org> Acked-by: Josh Boyer <jwboyer@linux.vnet.ibm.com> Signed-off-by: Paul Mackerras <paulus@samba.org>
Diffstat (limited to 'arch/powerpc')
-rw-r--r--arch/powerpc/mm/tlb_nohash_low.S19
1 files changed, 10 insertions, 9 deletions
diff --git a/arch/powerpc/mm/tlb_nohash_low.S b/arch/powerpc/mm/tlb_nohash_low.S
index 763c59fe0076..f900a39e6ec4 100644
--- a/arch/powerpc/mm/tlb_nohash_low.S
+++ b/arch/powerpc/mm/tlb_nohash_low.S
@@ -75,18 +75,19 @@ _GLOBAL(_tlbil_va)
75 mfspr r5,SPRN_MMUCR 75 mfspr r5,SPRN_MMUCR
76 rlwimi r5,r4,0,24,31 /* Set TID */ 76 rlwimi r5,r4,0,24,31 /* Set TID */
77 77
78 /* We have to run the search with interrupts disabled, even critical 78 /* We have to run the search with interrupts disabled, otherwise
79 * and debug interrupts (in fact the only critical exceptions we have 79 * an interrupt which causes a TLB miss can clobber the MMUCR
80 * are debug and machine check). Otherwise an interrupt which causes 80 * between the mtspr and the tlbsx.
81 * a TLB miss can clobber the MMUCR between the mtspr and the tlbsx. */ 81 *
82 * Critical and Machine Check interrupts take care of saving
83 * and restoring MMUCR, so only normal interrupts have to be
84 * taken care of.
85 */
82 mfmsr r4 86 mfmsr r4
83 lis r6,(MSR_EE|MSR_CE|MSR_ME|MSR_DE)@ha 87 wrteei 0
84 addi r6,r6,(MSR_EE|MSR_CE|MSR_ME|MSR_DE)@l
85 andc r6,r4,r6
86 mtmsr r6
87 mtspr SPRN_MMUCR,r5 88 mtspr SPRN_MMUCR,r5
88 tlbsx. r3, 0, r3 89 tlbsx. r3, 0, r3
89 mtmsr r4 90 wrtee r4
90 bne 1f 91 bne 1f
91 sync 92 sync
92 /* There are only 64 TLB entries, so r3 < 64, 93 /* There are only 64 TLB entries, so r3 < 64,