diff options
Diffstat (limited to 'arch/sparc/kernel/ktlb.S')
-rw-r--r-- | arch/sparc/kernel/ktlb.S | 125 |
1 files changed, 15 insertions, 110 deletions
diff --git a/arch/sparc/kernel/ktlb.S b/arch/sparc/kernel/ktlb.S index 605d49204580..ef0d8e9e1210 100644 --- a/arch/sparc/kernel/ktlb.S +++ b/arch/sparc/kernel/ktlb.S | |||
@@ -47,14 +47,6 @@ kvmap_itlb_vmalloc_addr: | |||
47 | KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath) | 47 | KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath) |
48 | 48 | ||
49 | TSB_LOCK_TAG(%g1, %g2, %g7) | 49 | TSB_LOCK_TAG(%g1, %g2, %g7) |
50 | |||
51 | /* Load and check PTE. */ | ||
52 | ldxa [%g5] ASI_PHYS_USE_EC, %g5 | ||
53 | mov 1, %g7 | ||
54 | sllx %g7, TSB_TAG_INVALID_BIT, %g7 | ||
55 | brgez,a,pn %g5, kvmap_itlb_longpath | ||
56 | TSB_STORE(%g1, %g7) | ||
57 | |||
58 | TSB_WRITE(%g1, %g5, %g6) | 50 | TSB_WRITE(%g1, %g5, %g6) |
59 | 51 | ||
60 | /* fallthrough to TLB load */ | 52 | /* fallthrough to TLB load */ |
@@ -118,6 +110,12 @@ kvmap_dtlb_obp: | |||
118 | ba,pt %xcc, kvmap_dtlb_load | 110 | ba,pt %xcc, kvmap_dtlb_load |
119 | nop | 111 | nop |
120 | 112 | ||
113 | kvmap_linear_early: | ||
114 | sethi %hi(kern_linear_pte_xor), %g7 | ||
115 | ldx [%g7 + %lo(kern_linear_pte_xor)], %g2 | ||
116 | ba,pt %xcc, kvmap_dtlb_tsb4m_load | ||
117 | xor %g2, %g4, %g5 | ||
118 | |||
121 | .align 32 | 119 | .align 32 |
122 | kvmap_dtlb_tsb4m_load: | 120 | kvmap_dtlb_tsb4m_load: |
123 | TSB_LOCK_TAG(%g1, %g2, %g7) | 121 | TSB_LOCK_TAG(%g1, %g2, %g7) |
@@ -146,105 +144,17 @@ kvmap_dtlb_4v: | |||
146 | /* Correct TAG_TARGET is already in %g6, check 4mb TSB. */ | 144 | /* Correct TAG_TARGET is already in %g6, check 4mb TSB. */ |
147 | KERN_TSB4M_LOOKUP_TL1(%g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load) | 145 | KERN_TSB4M_LOOKUP_TL1(%g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load) |
148 | #endif | 146 | #endif |
149 | /* TSB entry address left in %g1, lookup linear PTE. | 147 | /* Linear mapping TSB lookup failed. Fallthrough to kernel |
150 | * Must preserve %g1 and %g6 (TAG). | 148 | * page table based lookup. |
151 | */ | ||
152 | kvmap_dtlb_tsb4m_miss: | ||
153 | /* Clear the PAGE_OFFSET top virtual bits, shift | ||
154 | * down to get PFN, and make sure PFN is in range. | ||
155 | */ | ||
156 | 661: sllx %g4, 0, %g5 | ||
157 | .section .page_offset_shift_patch, "ax" | ||
158 | .word 661b | ||
159 | .previous | ||
160 | |||
161 | /* Check to see if we know about valid memory at the 4MB | ||
162 | * chunk this physical address will reside within. | ||
163 | */ | 149 | */ |
164 | 661: srlx %g5, MAX_PHYS_ADDRESS_BITS, %g2 | ||
165 | .section .page_offset_shift_patch, "ax" | ||
166 | .word 661b | ||
167 | .previous | ||
168 | |||
169 | brnz,pn %g2, kvmap_dtlb_longpath | ||
170 | nop | ||
171 | |||
172 | /* This unconditional branch and delay-slot nop gets patched | ||
173 | * by the sethi sequence once the bitmap is properly setup. | ||
174 | */ | ||
175 | .globl valid_addr_bitmap_insn | ||
176 | valid_addr_bitmap_insn: | ||
177 | ba,pt %xcc, 2f | ||
178 | nop | ||
179 | .subsection 2 | ||
180 | .globl valid_addr_bitmap_patch | ||
181 | valid_addr_bitmap_patch: | ||
182 | sethi %hi(sparc64_valid_addr_bitmap), %g7 | ||
183 | or %g7, %lo(sparc64_valid_addr_bitmap), %g7 | ||
184 | .previous | ||
185 | |||
186 | 661: srlx %g5, ILOG2_4MB, %g2 | ||
187 | .section .page_offset_shift_patch, "ax" | ||
188 | .word 661b | ||
189 | .previous | ||
190 | |||
191 | srlx %g2, 6, %g5 | ||
192 | and %g2, 63, %g2 | ||
193 | sllx %g5, 3, %g5 | ||
194 | ldx [%g7 + %g5], %g5 | ||
195 | mov 1, %g7 | ||
196 | sllx %g7, %g2, %g7 | ||
197 | andcc %g5, %g7, %g0 | ||
198 | be,pn %xcc, kvmap_dtlb_longpath | ||
199 | |||
200 | 2: sethi %hi(kpte_linear_bitmap), %g2 | ||
201 | |||
202 | /* Get the 256MB physical address index. */ | ||
203 | 661: sllx %g4, 0, %g5 | ||
204 | .section .page_offset_shift_patch, "ax" | ||
205 | .word 661b | ||
206 | .previous | ||
207 | |||
208 | or %g2, %lo(kpte_linear_bitmap), %g2 | ||
209 | |||
210 | 661: srlx %g5, ILOG2_256MB, %g5 | ||
211 | .section .page_offset_shift_patch, "ax" | ||
212 | .word 661b | ||
213 | .previous | ||
214 | |||
215 | and %g5, (32 - 1), %g7 | ||
216 | |||
217 | /* Divide by 32 to get the offset into the bitmask. */ | ||
218 | srlx %g5, 5, %g5 | ||
219 | add %g7, %g7, %g7 | ||
220 | sllx %g5, 3, %g5 | ||
221 | |||
222 | /* kern_linear_pte_xor[(mask >> shift) & 3)] */ | ||
223 | ldx [%g2 + %g5], %g2 | ||
224 | srlx %g2, %g7, %g7 | ||
225 | sethi %hi(kern_linear_pte_xor), %g5 | ||
226 | and %g7, 3, %g7 | ||
227 | or %g5, %lo(kern_linear_pte_xor), %g5 | ||
228 | sllx %g7, 3, %g7 | ||
229 | ldx [%g5 + %g7], %g2 | ||
230 | |||
231 | .globl kvmap_linear_patch | 150 | .globl kvmap_linear_patch |
232 | kvmap_linear_patch: | 151 | kvmap_linear_patch: |
233 | ba,pt %xcc, kvmap_dtlb_tsb4m_load | 152 | ba,a,pt %xcc, kvmap_linear_early |
234 | xor %g2, %g4, %g5 | ||
235 | 153 | ||
236 | kvmap_dtlb_vmalloc_addr: | 154 | kvmap_dtlb_vmalloc_addr: |
237 | KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath) | 155 | KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath) |
238 | 156 | ||
239 | TSB_LOCK_TAG(%g1, %g2, %g7) | 157 | TSB_LOCK_TAG(%g1, %g2, %g7) |
240 | |||
241 | /* Load and check PTE. */ | ||
242 | ldxa [%g5] ASI_PHYS_USE_EC, %g5 | ||
243 | mov 1, %g7 | ||
244 | sllx %g7, TSB_TAG_INVALID_BIT, %g7 | ||
245 | brgez,a,pn %g5, kvmap_dtlb_longpath | ||
246 | TSB_STORE(%g1, %g7) | ||
247 | |||
248 | TSB_WRITE(%g1, %g5, %g6) | 158 | TSB_WRITE(%g1, %g5, %g6) |
249 | 159 | ||
250 | /* fallthrough to TLB load */ | 160 | /* fallthrough to TLB load */ |
@@ -276,13 +186,8 @@ kvmap_dtlb_load: | |||
276 | 186 | ||
277 | #ifdef CONFIG_SPARSEMEM_VMEMMAP | 187 | #ifdef CONFIG_SPARSEMEM_VMEMMAP |
278 | kvmap_vmemmap: | 188 | kvmap_vmemmap: |
279 | sub %g4, %g5, %g5 | 189 | KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath) |
280 | srlx %g5, ILOG2_4MB, %g5 | 190 | ba,a,pt %xcc, kvmap_dtlb_load |
281 | sethi %hi(vmemmap_table), %g1 | ||
282 | sllx %g5, 3, %g5 | ||
283 | or %g1, %lo(vmemmap_table), %g1 | ||
284 | ba,pt %xcc, kvmap_dtlb_load | ||
285 | ldx [%g1 + %g5], %g5 | ||
286 | #endif | 191 | #endif |
287 | 192 | ||
288 | kvmap_dtlb_nonlinear: | 193 | kvmap_dtlb_nonlinear: |
@@ -294,8 +199,8 @@ kvmap_dtlb_nonlinear: | |||
294 | 199 | ||
295 | #ifdef CONFIG_SPARSEMEM_VMEMMAP | 200 | #ifdef CONFIG_SPARSEMEM_VMEMMAP |
296 | /* Do not use the TSB for vmemmap. */ | 201 | /* Do not use the TSB for vmemmap. */ |
297 | mov (VMEMMAP_BASE >> 40), %g5 | 202 | sethi %hi(VMEMMAP_BASE), %g5 |
298 | sllx %g5, 40, %g5 | 203 | ldx [%g5 + %lo(VMEMMAP_BASE)], %g5 |
299 | cmp %g4,%g5 | 204 | cmp %g4,%g5 |
300 | bgeu,pn %xcc, kvmap_vmemmap | 205 | bgeu,pn %xcc, kvmap_vmemmap |
301 | nop | 206 | nop |
@@ -307,8 +212,8 @@ kvmap_dtlb_tsbmiss: | |||
307 | sethi %hi(MODULES_VADDR), %g5 | 212 | sethi %hi(MODULES_VADDR), %g5 |
308 | cmp %g4, %g5 | 213 | cmp %g4, %g5 |
309 | blu,pn %xcc, kvmap_dtlb_longpath | 214 | blu,pn %xcc, kvmap_dtlb_longpath |
310 | mov (VMALLOC_END >> 40), %g5 | 215 | sethi %hi(VMALLOC_END), %g5 |
311 | sllx %g5, 40, %g5 | 216 | ldx [%g5 + %lo(VMALLOC_END)], %g5 |
312 | cmp %g4, %g5 | 217 | cmp %g4, %g5 |
313 | bgeu,pn %xcc, kvmap_dtlb_longpath | 218 | bgeu,pn %xcc, kvmap_dtlb_longpath |
314 | nop | 219 | nop |