aboutsummaryrefslogtreecommitdiffstats
path: root/arch/ppc/kernel/misc.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/ppc/kernel/misc.S')
-rw-r--r--arch/ppc/kernel/misc.S22
1 files changed, 15 insertions, 7 deletions
diff --git a/arch/ppc/kernel/misc.S b/arch/ppc/kernel/misc.S
index a22e1f4d94c8..2b81e71d6b2d 100644
--- a/arch/ppc/kernel/misc.S
+++ b/arch/ppc/kernel/misc.S
@@ -224,7 +224,16 @@ _GLOBAL(_tlbia)
224 */ 224 */
225_GLOBAL(_tlbie) 225_GLOBAL(_tlbie)
226#if defined(CONFIG_40x) 226#if defined(CONFIG_40x)
227 /* We run the search with interrupts disabled because we have to change
228 * the PID and I don't want to preempt when that happens.
229 */
230 mfmsr r5
231 mfspr r6,SPRN_PID
232 wrteei 0
233 mtspr SPRN_PID,r4
227 tlbsx. r3, 0, r3 234 tlbsx. r3, 0, r3
235 mtspr SPRN_PID,r6
236 wrtee r5
228 bne 10f 237 bne 10f
229 sync 238 sync
230 /* There are only 64 TLB entries, so r3 < 64, which means bit 25 is clear. 239 /* There are only 64 TLB entries, so r3 < 64, which means bit 25 is clear.
@@ -234,22 +243,21 @@ _GLOBAL(_tlbie)
234 isync 243 isync
23510: 24410:
236#elif defined(CONFIG_44x) 245#elif defined(CONFIG_44x)
237 mfspr r4,SPRN_MMUCR 246 mfspr r5,SPRN_MMUCR
238 mfspr r5,SPRN_PID /* Get PID */ 247 rlwimi r5,r4,0,24,31 /* Set TID */
239 rlwimi r4,r5,0,24,31 /* Set TID */
240 248
241 /* We have to run the search with interrupts disabled, even critical 249 /* We have to run the search with interrupts disabled, even critical
242 * and debug interrupts (in fact the only critical exceptions we have 250 * and debug interrupts (in fact the only critical exceptions we have
243 * are debug and machine check). Otherwise an interrupt which causes 251 * are debug and machine check). Otherwise an interrupt which causes
244 * a TLB miss can clobber the MMUCR between the mtspr and the tlbsx. */ 252 * a TLB miss can clobber the MMUCR between the mtspr and the tlbsx. */
245 mfmsr r5 253 mfmsr r4
246 lis r6,(MSR_EE|MSR_CE|MSR_ME|MSR_DE)@ha 254 lis r6,(MSR_EE|MSR_CE|MSR_ME|MSR_DE)@ha
247 addi r6,r6,(MSR_EE|MSR_CE|MSR_ME|MSR_DE)@l 255 addi r6,r6,(MSR_EE|MSR_CE|MSR_ME|MSR_DE)@l
248 andc r6,r5,r6 256 andc r6,r4,r6
249 mtmsr r6 257 mtmsr r6
250 mtspr SPRN_MMUCR,r4 258 mtspr SPRN_MMUCR,r5
251 tlbsx. r3, 0, r3 259 tlbsx. r3, 0, r3
252 mtmsr r5 260 mtmsr r4
253 bne 10f 261 bne 10f
254 sync 262 sync
255 /* There are only 64 TLB entries, so r3 < 64, 263 /* There are only 64 TLB entries, so r3 < 64,