aboutsummaryrefslogtreecommitdiffstats
path: root/arch/powerpc/mm
diff options
context:
space:
mode:
authorKumar Gala <galak@kernel.crashing.org>2009-08-24 11:52:48 -0400
committerBenjamin Herrenschmidt <benh@kernel.crashing.org>2009-08-28 00:24:12 -0400
commitdf5d6ecf8157245ef733db87597adb2c6e2510da (patch)
tree9b1df8a76713d55ca08d11bd212281b11f6af652 /arch/powerpc/mm
parent23e55f92d4fd733365dd572ea6e9e211387123c2 (diff)
powerpc/mm: Add MMU features for TLB reservation & Paired MAS registers
Support for TLB reservation (or TLB Write Conditional) and Paired MAS registers are optional for a processor implementation so we handle them via MMU feature sections. We currently only used paired MAS registers to access the full RPN + perm bits that are kept in MAS7||MAS3. We assume that if an implementation has hardware page table at this time it also implements in TLB reservations. Signed-off-by: Kumar Gala <galak@kernel.crashing.org> Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
Diffstat (limited to 'arch/powerpc/mm')
-rw-r--r--arch/powerpc/mm/tlb_low_64e.S38
1 files changed, 37 insertions, 1 deletions
diff --git a/arch/powerpc/mm/tlb_low_64e.S b/arch/powerpc/mm/tlb_low_64e.S
index cd92f62f9cf5..ef1cccf71173 100644
--- a/arch/powerpc/mm/tlb_low_64e.S
+++ b/arch/powerpc/mm/tlb_low_64e.S
@@ -189,12 +189,16 @@ normal_tlb_miss:
189 clrrdi r14,r14,3 189 clrrdi r14,r14,3
190 or r10,r15,r14 190 or r10,r15,r14
191 191
192BEGIN_MMU_FTR_SECTION
192 /* Set the TLB reservation and seach for existing entry. Then load 193 /* Set the TLB reservation and seach for existing entry. Then load
193 * the entry. 194 * the entry.
194 */ 195 */
195 PPC_TLBSRX_DOT(0,r16) 196 PPC_TLBSRX_DOT(0,r16)
196 ld r14,0(r10) 197 ld r14,0(r10)
197 beq normal_tlb_miss_done 198 beq normal_tlb_miss_done
199MMU_FTR_SECTION_ELSE
200 ld r14,0(r10)
201ALT_MMU_FTR_SECTION_END_IFSET(MMU_FTR_USE_TLBRSRV)
198 202
199finish_normal_tlb_miss: 203finish_normal_tlb_miss:
200 /* Check if required permissions are met */ 204 /* Check if required permissions are met */
@@ -241,7 +245,14 @@ finish_normal_tlb_miss:
241 bne 1f 245 bne 1f
242 li r11,MAS3_SW|MAS3_UW 246 li r11,MAS3_SW|MAS3_UW
243 andc r15,r15,r11 247 andc r15,r15,r11
2441: mtspr SPRN_MAS7_MAS3,r15 2481:
249BEGIN_MMU_FTR_SECTION
250 srdi r16,r15,32
251 mtspr SPRN_MAS3,r15
252 mtspr SPRN_MAS7,r16
253MMU_FTR_SECTION_ELSE
254 mtspr SPRN_MAS7_MAS3,r15
255ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_PAIRED_MAS)
245 256
246 tlbwe 257 tlbwe
247 258
@@ -311,11 +322,13 @@ virt_page_table_tlb_miss:
311 rlwinm r10,r10,0,16,1 /* Clear TID */ 322 rlwinm r10,r10,0,16,1 /* Clear TID */
312 mtspr SPRN_MAS1,r10 323 mtspr SPRN_MAS1,r10
3131: 3241:
325BEGIN_MMU_FTR_SECTION
314 /* Search if we already have a TLB entry for that virtual address, and 326 /* Search if we already have a TLB entry for that virtual address, and
315 * if we do, bail out. 327 * if we do, bail out.
316 */ 328 */
317 PPC_TLBSRX_DOT(0,r16) 329 PPC_TLBSRX_DOT(0,r16)
318 beq virt_page_table_tlb_miss_done 330 beq virt_page_table_tlb_miss_done
331END_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_TLBRSRV)
319 332
320 /* Now, we need to walk the page tables. First check if we are in 333 /* Now, we need to walk the page tables. First check if we are in
321 * range. 334 * range.
@@ -367,10 +380,18 @@ virt_page_table_tlb_miss:
367 */ 380 */
368 clrldi r11,r15,4 /* remove region ID from RPN */ 381 clrldi r11,r15,4 /* remove region ID from RPN */
369 ori r10,r11,1 /* Or-in SR */ 382 ori r10,r11,1 /* Or-in SR */
383
384BEGIN_MMU_FTR_SECTION
385 srdi r16,r10,32
386 mtspr SPRN_MAS3,r10
387 mtspr SPRN_MAS7,r16
388MMU_FTR_SECTION_ELSE
370 mtspr SPRN_MAS7_MAS3,r10 389 mtspr SPRN_MAS7_MAS3,r10
390ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_PAIRED_MAS)
371 391
372 tlbwe 392 tlbwe
373 393
394BEGIN_MMU_FTR_SECTION
374virt_page_table_tlb_miss_done: 395virt_page_table_tlb_miss_done:
375 396
376 /* We have overriden MAS2:EPN but currently our primary TLB miss 397 /* We have overriden MAS2:EPN but currently our primary TLB miss
@@ -394,6 +415,7 @@ virt_page_table_tlb_miss_done:
394 addi r10,r11,-4 415 addi r10,r11,-4
395 std r10,PACA_EXTLB+EX_TLB_SIZE+EX_TLB_SRR0(r13) 416 std r10,PACA_EXTLB+EX_TLB_SIZE+EX_TLB_SRR0(r13)
3961: 4171:
418END_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_TLBRSRV)
397 /* Return to caller, normal case */ 419 /* Return to caller, normal case */
398 TLB_MISS_STATS_X(MMSTAT_TLB_MISS_PT_OK); 420 TLB_MISS_STATS_X(MMSTAT_TLB_MISS_PT_OK);
399 TLB_MISS_EPILOG_SUCCESS 421 TLB_MISS_EPILOG_SUCCESS
@@ -618,7 +640,14 @@ htw_tlb_miss:
618#else 640#else
619 ori r10,r15,(BOOK3E_PAGESZ_4K << MAS3_SPSIZE_SHIFT) 641 ori r10,r15,(BOOK3E_PAGESZ_4K << MAS3_SPSIZE_SHIFT)
620#endif 642#endif
643
644BEGIN_MMU_FTR_SECTION
645 srdi r16,r10,32
646 mtspr SPRN_MAS3,r10
647 mtspr SPRN_MAS7,r16
648MMU_FTR_SECTION_ELSE
621 mtspr SPRN_MAS7_MAS3,r10 649 mtspr SPRN_MAS7_MAS3,r10
650ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_PAIRED_MAS)
622 651
623 tlbwe 652 tlbwe
624 653
@@ -700,7 +729,14 @@ tlb_load_linear:
700 clrrdi r10,r16,30 /* 1G page index */ 729 clrrdi r10,r16,30 /* 1G page index */
701 clrldi r10,r10,4 /* clear region bits */ 730 clrldi r10,r10,4 /* clear region bits */
702 ori r10,r10,MAS3_SR|MAS3_SW|MAS3_SX 731 ori r10,r10,MAS3_SR|MAS3_SW|MAS3_SX
732
733BEGIN_MMU_FTR_SECTION
734 srdi r16,r10,32
735 mtspr SPRN_MAS3,r10
736 mtspr SPRN_MAS7,r16
737MMU_FTR_SECTION_ELSE
703 mtspr SPRN_MAS7_MAS3,r10 738 mtspr SPRN_MAS7_MAS3,r10
739ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_PAIRED_MAS)
704 740
705 tlbwe 741 tlbwe
706 742