diff options
author | Michael Neuling <mikey@neuling.org> | 2012-06-25 09:33:25 -0400 |
---|---|---|
committer | Benjamin Herrenschmidt <benh@kernel.crashing.org> | 2012-07-10 05:18:35 -0400 |
commit | 962cffbd8a21ad380ec71a6f5ea55a8e08f32dd1 (patch) | |
tree | 378842a721bfd35cc54bd94e1a26d499c6fb43ac /arch/powerpc/mm | |
parent | f4c015795c74ec31b7ad0b8e11d07946fe853db4 (diff) |
powerpc: Enforce usage of RA 0-R31 where possible
Some macros use RA where when RA=R0 the values is 0, so make this
the enforced mnemonic in the macro.
Idea suggested by Andreas Schwab.
Signed-off-by: Michael Neuling <mikey@neuling.org>
Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
Diffstat (limited to 'arch/powerpc/mm')
-rw-r--r-- | arch/powerpc/mm/tlb_low_64e.S | 10 | ||||
-rw-r--r-- | arch/powerpc/mm/tlb_nohash_low.S | 16 |
2 files changed, 13 insertions, 13 deletions
diff --git a/arch/powerpc/mm/tlb_low_64e.S b/arch/powerpc/mm/tlb_low_64e.S index 4b9e2643d21b..f09d48e3268d 100644 --- a/arch/powerpc/mm/tlb_low_64e.S +++ b/arch/powerpc/mm/tlb_low_64e.S | |||
@@ -126,7 +126,7 @@ BEGIN_MMU_FTR_SECTION | |||
126 | /* Set the TLB reservation and search for existing entry. Then load | 126 | /* Set the TLB reservation and search for existing entry. Then load |
127 | * the entry. | 127 | * the entry. |
128 | */ | 128 | */ |
129 | PPC_TLBSRX_DOT(R0,R16) | 129 | PPC_TLBSRX_DOT(0,R16) |
130 | ldx r14,r14,r15 /* grab pgd entry */ | 130 | ldx r14,r14,r15 /* grab pgd entry */ |
131 | beq normal_tlb_miss_done /* tlb exists already, bail */ | 131 | beq normal_tlb_miss_done /* tlb exists already, bail */ |
132 | MMU_FTR_SECTION_ELSE | 132 | MMU_FTR_SECTION_ELSE |
@@ -395,7 +395,7 @@ BEGIN_MMU_FTR_SECTION | |||
395 | /* Set the TLB reservation and search for existing entry. Then load | 395 | /* Set the TLB reservation and search for existing entry. Then load |
396 | * the entry. | 396 | * the entry. |
397 | */ | 397 | */ |
398 | PPC_TLBSRX_DOT(R0,R16) | 398 | PPC_TLBSRX_DOT(0,R16) |
399 | ld r14,0(r10) | 399 | ld r14,0(r10) |
400 | beq normal_tlb_miss_done | 400 | beq normal_tlb_miss_done |
401 | MMU_FTR_SECTION_ELSE | 401 | MMU_FTR_SECTION_ELSE |
@@ -528,7 +528,7 @@ BEGIN_MMU_FTR_SECTION | |||
528 | /* Search if we already have a TLB entry for that virtual address, and | 528 | /* Search if we already have a TLB entry for that virtual address, and |
529 | * if we do, bail out. | 529 | * if we do, bail out. |
530 | */ | 530 | */ |
531 | PPC_TLBSRX_DOT(R0,R16) | 531 | PPC_TLBSRX_DOT(0,R16) |
532 | beq virt_page_table_tlb_miss_done | 532 | beq virt_page_table_tlb_miss_done |
533 | END_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_TLBRSRV) | 533 | END_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_TLBRSRV) |
534 | 534 | ||
@@ -779,7 +779,7 @@ htw_tlb_miss: | |||
779 | * | 779 | * |
780 | * MAS1:IND should be already set based on MAS4 | 780 | * MAS1:IND should be already set based on MAS4 |
781 | */ | 781 | */ |
782 | PPC_TLBSRX_DOT(R0,R16) | 782 | PPC_TLBSRX_DOT(0,R16) |
783 | beq htw_tlb_miss_done | 783 | beq htw_tlb_miss_done |
784 | 784 | ||
785 | /* Now, we need to walk the page tables. First check if we are in | 785 | /* Now, we need to walk the page tables. First check if we are in |
@@ -919,7 +919,7 @@ tlb_load_linear: | |||
919 | mtspr SPRN_MAS1,r15 | 919 | mtspr SPRN_MAS1,r15 |
920 | 920 | ||
921 | /* Already somebody there ? */ | 921 | /* Already somebody there ? */ |
922 | PPC_TLBSRX_DOT(R0,R16) | 922 | PPC_TLBSRX_DOT(0,R16) |
923 | beq tlb_load_linear_done | 923 | beq tlb_load_linear_done |
924 | 924 | ||
925 | /* Now we build the remaining MAS. MAS0 and 2 should be fine | 925 | /* Now we build the remaining MAS. MAS0 and 2 should be fine |
diff --git a/arch/powerpc/mm/tlb_nohash_low.S b/arch/powerpc/mm/tlb_nohash_low.S index 75a9d14a3dea..fab919fd1384 100644 --- a/arch/powerpc/mm/tlb_nohash_low.S +++ b/arch/powerpc/mm/tlb_nohash_low.S | |||
@@ -266,7 +266,7 @@ BEGIN_MMU_FTR_SECTION | |||
266 | andi. r3,r3,MMUCSR0_TLBFI@l | 266 | andi. r3,r3,MMUCSR0_TLBFI@l |
267 | bne 1b | 267 | bne 1b |
268 | MMU_FTR_SECTION_ELSE | 268 | MMU_FTR_SECTION_ELSE |
269 | PPC_TLBILX_ALL(R0,R0) | 269 | PPC_TLBILX_ALL(0,R0) |
270 | ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX) | 270 | ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX) |
271 | msync | 271 | msync |
272 | isync | 272 | isync |
@@ -279,7 +279,7 @@ BEGIN_MMU_FTR_SECTION | |||
279 | wrteei 0 | 279 | wrteei 0 |
280 | mfspr r4,SPRN_MAS6 /* save MAS6 */ | 280 | mfspr r4,SPRN_MAS6 /* save MAS6 */ |
281 | mtspr SPRN_MAS6,r3 | 281 | mtspr SPRN_MAS6,r3 |
282 | PPC_TLBILX_PID(R0,R0) | 282 | PPC_TLBILX_PID(0,R0) |
283 | mtspr SPRN_MAS6,r4 /* restore MAS6 */ | 283 | mtspr SPRN_MAS6,r4 /* restore MAS6 */ |
284 | wrtee r10 | 284 | wrtee r10 |
285 | MMU_FTR_SECTION_ELSE | 285 | MMU_FTR_SECTION_ELSE |
@@ -313,7 +313,7 @@ BEGIN_MMU_FTR_SECTION | |||
313 | mtspr SPRN_MAS1,r4 | 313 | mtspr SPRN_MAS1,r4 |
314 | tlbwe | 314 | tlbwe |
315 | MMU_FTR_SECTION_ELSE | 315 | MMU_FTR_SECTION_ELSE |
316 | PPC_TLBILX_VA(R0,R3) | 316 | PPC_TLBILX_VA(0,R3) |
317 | ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX) | 317 | ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX) |
318 | msync | 318 | msync |
319 | isync | 319 | isync |
@@ -331,7 +331,7 @@ _GLOBAL(_tlbil_pid) | |||
331 | mfmsr r10 | 331 | mfmsr r10 |
332 | wrteei 0 | 332 | wrteei 0 |
333 | mtspr SPRN_MAS6,r4 | 333 | mtspr SPRN_MAS6,r4 |
334 | PPC_TLBILX_PID(R0,R0) | 334 | PPC_TLBILX_PID(0,R0) |
335 | wrtee r10 | 335 | wrtee r10 |
336 | msync | 336 | msync |
337 | isync | 337 | isync |
@@ -343,14 +343,14 @@ _GLOBAL(_tlbil_pid_noind) | |||
343 | ori r4,r4,MAS6_SIND | 343 | ori r4,r4,MAS6_SIND |
344 | wrteei 0 | 344 | wrteei 0 |
345 | mtspr SPRN_MAS6,r4 | 345 | mtspr SPRN_MAS6,r4 |
346 | PPC_TLBILX_PID(R0,R0) | 346 | PPC_TLBILX_PID(0,R0) |
347 | wrtee r10 | 347 | wrtee r10 |
348 | msync | 348 | msync |
349 | isync | 349 | isync |
350 | blr | 350 | blr |
351 | 351 | ||
352 | _GLOBAL(_tlbil_all) | 352 | _GLOBAL(_tlbil_all) |
353 | PPC_TLBILX_ALL(R0,R0) | 353 | PPC_TLBILX_ALL(0,R0) |
354 | msync | 354 | msync |
355 | isync | 355 | isync |
356 | blr | 356 | blr |
@@ -364,7 +364,7 @@ _GLOBAL(_tlbil_va) | |||
364 | beq 1f | 364 | beq 1f |
365 | rlwimi r4,r6,MAS6_SIND_SHIFT,MAS6_SIND | 365 | rlwimi r4,r6,MAS6_SIND_SHIFT,MAS6_SIND |
366 | 1: mtspr SPRN_MAS6,r4 /* assume AS=0 for now */ | 366 | 1: mtspr SPRN_MAS6,r4 /* assume AS=0 for now */ |
367 | PPC_TLBILX_VA(R0,R3) | 367 | PPC_TLBILX_VA(0,R3) |
368 | msync | 368 | msync |
369 | isync | 369 | isync |
370 | wrtee r10 | 370 | wrtee r10 |
@@ -379,7 +379,7 @@ _GLOBAL(_tlbivax_bcast) | |||
379 | beq 1f | 379 | beq 1f |
380 | rlwimi r4,r6,MAS6_SIND_SHIFT,MAS6_SIND | 380 | rlwimi r4,r6,MAS6_SIND_SHIFT,MAS6_SIND |
381 | 1: mtspr SPRN_MAS6,r4 /* assume AS=0 for now */ | 381 | 1: mtspr SPRN_MAS6,r4 /* assume AS=0 for now */ |
382 | PPC_TLBIVAX(R0,R3) | 382 | PPC_TLBIVAX(0,R3) |
383 | eieio | 383 | eieio |
384 | tlbsync | 384 | tlbsync |
385 | sync | 385 | sync |