diff options
Diffstat (limited to 'arch/sparc64/kernel/tsb.S')
-rw-r--r-- | arch/sparc64/kernel/tsb.S | 71 |
1 files changed, 70 insertions, 1 deletions
diff --git a/arch/sparc64/kernel/tsb.S b/arch/sparc64/kernel/tsb.S index d738910153f6..1b154c863628 100644 --- a/arch/sparc64/kernel/tsb.S +++ b/arch/sparc64/kernel/tsb.S | |||
@@ -34,8 +34,9 @@ tsb_miss_itlb: | |||
34 | ldxa [%g4] ASI_IMMU, %g4 | 34 | ldxa [%g4] ASI_IMMU, %g4 |
35 | 35 | ||
36 | /* At this point we have: | 36 | /* At this point we have: |
37 | * %g4 -- missing virtual address | ||
38 | * %g1 -- TSB entry address | 37 | * %g1 -- TSB entry address |
38 | * %g3 -- FAULT_CODE_{D,I}TLB | ||
39 | * %g4 -- missing virtual address | ||
39 | * %g6 -- TAG TARGET (vaddr >> 22) | 40 | * %g6 -- TAG TARGET (vaddr >> 22) |
40 | */ | 41 | */ |
41 | tsb_miss_page_table_walk: | 42 | tsb_miss_page_table_walk: |
@@ -45,6 +46,12 @@ tsb_miss_page_table_walk: | |||
45 | tsb_miss_page_table_walk_sun4v_fastpath: | 46 | tsb_miss_page_table_walk_sun4v_fastpath: |
46 | USER_PGTABLE_WALK_TL1(%g4, %g7, %g5, %g2, tsb_do_fault) | 47 | USER_PGTABLE_WALK_TL1(%g4, %g7, %g5, %g2, tsb_do_fault) |
47 | 48 | ||
49 | /* At this point we have: | ||
50 | * %g1 -- TSB entry address | ||
51 | * %g3 -- FAULT_CODE_{D,I}TLB | ||
52 | * %g5 -- physical address of PTE in Linux page tables | ||
53 | * %g6 -- TAG TARGET (vaddr >> 22) | ||
54 | */ | ||
48 | tsb_reload: | 55 | tsb_reload: |
49 | TSB_LOCK_TAG(%g1, %g2, %g7) | 56 | TSB_LOCK_TAG(%g1, %g2, %g7) |
50 | 57 | ||
@@ -199,6 +206,7 @@ __tsb_insert: | |||
199 | wrpr %o5, %pstate | 206 | wrpr %o5, %pstate |
200 | retl | 207 | retl |
201 | nop | 208 | nop |
209 | .size __tsb_insert, .-__tsb_insert | ||
202 | 210 | ||
203 | /* Flush the given TSB entry if it has the matching | 211 | /* Flush the given TSB entry if it has the matching |
204 | * tag. | 212 | * tag. |
@@ -208,6 +216,7 @@ __tsb_insert: | |||
208 | */ | 216 | */ |
209 | .align 32 | 217 | .align 32 |
210 | .globl tsb_flush | 218 | .globl tsb_flush |
219 | .type tsb_flush,#function | ||
211 | tsb_flush: | 220 | tsb_flush: |
212 | sethi %hi(TSB_TAG_LOCK_HIGH), %g2 | 221 | sethi %hi(TSB_TAG_LOCK_HIGH), %g2 |
213 | 1: TSB_LOAD_TAG(%o0, %g1) | 222 | 1: TSB_LOAD_TAG(%o0, %g1) |
@@ -225,6 +234,7 @@ tsb_flush: | |||
225 | nop | 234 | nop |
226 | 2: retl | 235 | 2: retl |
227 | TSB_MEMBAR | 236 | TSB_MEMBAR |
237 | .size tsb_flush, .-tsb_flush | ||
228 | 238 | ||
229 | /* Reload MMU related context switch state at | 239 | /* Reload MMU related context switch state at |
230 | * schedule() time. | 240 | * schedule() time. |
@@ -241,6 +251,7 @@ tsb_flush: | |||
241 | */ | 251 | */ |
242 | .align 32 | 252 | .align 32 |
243 | .globl __tsb_context_switch | 253 | .globl __tsb_context_switch |
254 | .type __tsb_context_switch,#function | ||
244 | __tsb_context_switch: | 255 | __tsb_context_switch: |
245 | rdpr %pstate, %o5 | 256 | rdpr %pstate, %o5 |
246 | wrpr %o5, PSTATE_IE, %pstate | 257 | wrpr %o5, PSTATE_IE, %pstate |
@@ -302,3 +313,61 @@ __tsb_context_switch: | |||
302 | 313 | ||
303 | retl | 314 | retl |
304 | nop | 315 | nop |
316 | .size __tsb_context_switch, .-__tsb_context_switch | ||
317 | |||
318 | #define TSB_PASS_BITS ((1 << TSB_TAG_LOCK_BIT) | \ | ||
319 | (1 << TSB_TAG_INVALID_BIT)) | ||
320 | |||
321 | .align 32 | ||
322 | .globl copy_tsb | ||
323 | .type copy_tsb,#function | ||
324 | copy_tsb: /* %o0=old_tsb_base, %o1=old_tsb_size | ||
325 | * %o2=new_tsb_base, %o3=new_tsb_size | ||
326 | */ | ||
327 | sethi %uhi(TSB_PASS_BITS), %g7 | ||
328 | srlx %o3, 4, %o3 | ||
329 | add %o0, %o1, %g1 /* end of old tsb */ | ||
330 | sllx %g7, 32, %g7 | ||
331 | sub %o3, 1, %o3 /* %o3 == new tsb hash mask */ | ||
332 | |||
333 | 661: prefetcha [%o0] ASI_N, #one_read | ||
334 | .section .tsb_phys_patch, "ax" | ||
335 | .word 661b | ||
336 | prefetcha [%o0] ASI_PHYS_USE_EC, #one_read | ||
337 | .previous | ||
338 | |||
339 | 90: andcc %o0, (64 - 1), %g0 | ||
340 | bne 1f | ||
341 | add %o0, 64, %o5 | ||
342 | |||
343 | 661: prefetcha [%o5] ASI_N, #one_read | ||
344 | .section .tsb_phys_patch, "ax" | ||
345 | .word 661b | ||
346 | prefetcha [%o5] ASI_PHYS_USE_EC, #one_read | ||
347 | .previous | ||
348 | |||
349 | 1: TSB_LOAD_QUAD(%o0, %g2) /* %g2/%g3 == TSB entry */ | ||
350 | andcc %g2, %g7, %g0 /* LOCK or INVALID set? */ | ||
351 | bne,pn %xcc, 80f /* Skip it */ | ||
352 | sllx %g2, 22, %o4 /* TAG --> VADDR */ | ||
353 | |||
354 | /* This can definitely be computed faster... */ | ||
355 | srlx %o0, 4, %o5 /* Build index */ | ||
356 | and %o5, 511, %o5 /* Mask index */ | ||
357 | sllx %o5, PAGE_SHIFT, %o5 /* Put into vaddr position */ | ||
358 | or %o4, %o5, %o4 /* Full VADDR. */ | ||
359 | srlx %o4, PAGE_SHIFT, %o4 /* Shift down to create index */ | ||
360 | and %o4, %o3, %o4 /* Mask with new_tsb_nents-1 */ | ||
361 | sllx %o4, 4, %o4 /* Shift back up into tsb ent offset */ | ||
362 | TSB_STORE(%o2 + %o4, %g2) /* Store TAG */ | ||
363 | add %o4, 0x8, %o4 /* Advance to TTE */ | ||
364 | TSB_STORE(%o2 + %o4, %g3) /* Store TTE */ | ||
365 | |||
366 | 80: add %o0, 16, %o0 | ||
367 | cmp %o0, %g1 | ||
368 | bne,pt %xcc, 90b | ||
369 | nop | ||
370 | |||
371 | retl | ||
372 | TSB_MEMBAR | ||
373 | .size copy_tsb, .-copy_tsb | ||