aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sparc64/kernel/ktlb.S
diff options
context:
space:
mode:
authorDavid S. Miller <davem@davemloft.net>2006-02-22 01:31:11 -0500
committerDavid S. Miller <davem@sunset.davemloft.net>2006-03-20 04:13:56 -0500
commitd7744a09504d5ae84edc8289a02254e1f2102410 (patch)
treebe0f245ee0725f2f066bf87d17d254ce1e7279bf /arch/sparc64/kernel/ktlb.S
parent9cc3a1ac9a819cadff05ca37bb7f208013a22035 (diff)
[SPARC64]: Create a seperate kernel TSB for 4MB/256MB mappings.
It can map all of the linear kernel mappings with zero TSB hash conflicts for systems with 16GB or less ram. In such cases, on SUN4V, once we load up this TSB the first time with all the mappings, we never take a linear kernel mapping TLB miss ever again, the hypervisor handles them all. Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc64/kernel/ktlb.S')
-rw-r--r--arch/sparc64/kernel/ktlb.S15
1 files changed, 14 insertions, 1 deletions
diff --git a/arch/sparc64/kernel/ktlb.S b/arch/sparc64/kernel/ktlb.S
index ae1dac17bc8d..efcf38b6e284 100644
--- a/arch/sparc64/kernel/ktlb.S
+++ b/arch/sparc64/kernel/ktlb.S
@@ -121,6 +121,12 @@ kvmap_dtlb_obp:
121 nop 121 nop
122 122
123 .align 32 123 .align 32
124kvmap_dtlb_tsb4m_load:
125 KTSB_LOCK_TAG(%g1, %g2, %g7)
126 KTSB_WRITE(%g1, %g5, %g6)
127 ba,pt %xcc, kvmap_dtlb_load
128 nop
129
124kvmap_dtlb: 130kvmap_dtlb:
125 /* %g6: TAG TARGET */ 131 /* %g6: TAG TARGET */
126 mov TLB_TAG_ACCESS, %g4 132 mov TLB_TAG_ACCESS, %g4
@@ -133,6 +139,13 @@ kvmap_dtlb_4v:
133 brgez,pn %g4, kvmap_dtlb_nonlinear 139 brgez,pn %g4, kvmap_dtlb_nonlinear
134 nop 140 nop
135 141
142 /* Correct TAG_TARGET is already in %g6, check 4mb TSB. */
143 KERN_TSB4M_LOOKUP_TL1(%g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
144
145 /* TSB entry address left in %g1, lookup linear PTE.
146 * Must preserve %g1 and %g6 (TAG).
147 */
148kvmap_dtlb_tsb4m_miss:
136 sethi %hi(kpte_linear_bitmap), %g2 149 sethi %hi(kpte_linear_bitmap), %g2
137 or %g2, %lo(kpte_linear_bitmap), %g2 150 or %g2, %lo(kpte_linear_bitmap), %g2
138 151
@@ -163,7 +176,7 @@ kvmap_dtlb_4v:
163 176
164 .globl kvmap_linear_patch 177 .globl kvmap_linear_patch
165kvmap_linear_patch: 178kvmap_linear_patch:
166 ba,pt %xcc, kvmap_dtlb_load 179 ba,pt %xcc, kvmap_dtlb_tsb4m_load
167 xor %g2, %g4, %g5 180 xor %g2, %g4, %g5
168 181
169kvmap_dtlb_vmalloc_addr: 182kvmap_dtlb_vmalloc_addr: