aboutsummaryrefslogtreecommitdiffstats
path: root/arch
diff options
context:
space:
mode:
authorDavid S. Miller <davem@davemloft.net>2005-08-30 23:21:34 -0400
committerDavid S. Miller <davem@davemloft.net>2005-08-30 23:21:34 -0400
commit2ef27778a26dd828dd0d348ff12d2c180062746e (patch)
tree83a7dc19bc79b6b7abde3387db469bcf5daa9a05 /arch
parent3c2cafaf50a0f9e7efe2b3f584f3bba6c5ee929a (diff)
[SPARC64]: Preserve nucleus ctx page size during TLB flushes.
Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch')
-rw-r--r--arch/sparc64/mm/ultra.S39
1 files changed, 25 insertions, 14 deletions
diff --git a/arch/sparc64/mm/ultra.S b/arch/sparc64/mm/ultra.S
index 363770893797..8dfa825eca51 100644
--- a/arch/sparc64/mm/ultra.S
+++ b/arch/sparc64/mm/ultra.S
@@ -10,6 +10,7 @@
10#include <asm/page.h> 10#include <asm/page.h>
11#include <asm/spitfire.h> 11#include <asm/spitfire.h>
12#include <asm/mmu_context.h> 12#include <asm/mmu_context.h>
13#include <asm/mmu.h>
13#include <asm/pil.h> 14#include <asm/pil.h>
14#include <asm/head.h> 15#include <asm/head.h>
15#include <asm/thread_info.h> 16#include <asm/thread_info.h>
@@ -45,6 +46,8 @@ __flush_tlb_mm: /* %o0=(ctx & TAG_CONTEXT_BITS), %o1=SECONDARY_CONTEXT */
45 nop 46 nop
46 nop 47 nop
47 nop 48 nop
49 nop
50 nop
48 51
49 .align 32 52 .align 32
50 .globl __flush_tlb_pending 53 .globl __flush_tlb_pending
@@ -73,6 +76,9 @@ __flush_tlb_pending:
73 retl 76 retl
74 wrpr %g7, 0x0, %pstate 77 wrpr %g7, 0x0, %pstate
75 nop 78 nop
79 nop
80 nop
81 nop
76 82
77 .align 32 83 .align 32
78 .globl __flush_tlb_kernel_range 84 .globl __flush_tlb_kernel_range
@@ -224,16 +230,8 @@ __update_mmu_cache: /* %o0=hw_context, %o1=address, %o2=pte, %o3=fault_code */
224 or %o5, %o0, %o5 230 or %o5, %o0, %o5
225 ba,a,pt %xcc, __prefill_itlb 231 ba,a,pt %xcc, __prefill_itlb
226 232
227 /* Cheetah specific versions, patched at boot time. 233 /* Cheetah specific versions, patched at boot time. */
228 * 234__cheetah_flush_tlb_mm: /* 18 insns */
229 * This writes of the PRIMARY_CONTEXT register in this file are
230 * safe even on Cheetah+ and later wrt. the page size fields.
231 * The nucleus page size fields do not matter because we make
232 * no data references, and these instructions execute out of a
233 * locked I-TLB entry sitting in the fully assosciative I-TLB.
234 * This sequence should also never trap.
235 */
236__cheetah_flush_tlb_mm: /* 15 insns */
237 rdpr %pstate, %g7 235 rdpr %pstate, %g7
238 andn %g7, PSTATE_IE, %g2 236 andn %g7, PSTATE_IE, %g2
239 wrpr %g2, 0x0, %pstate 237 wrpr %g2, 0x0, %pstate
@@ -241,6 +239,9 @@ __cheetah_flush_tlb_mm: /* 15 insns */
241 mov PRIMARY_CONTEXT, %o2 239 mov PRIMARY_CONTEXT, %o2
242 mov 0x40, %g3 240 mov 0x40, %g3
243 ldxa [%o2] ASI_DMMU, %g2 241 ldxa [%o2] ASI_DMMU, %g2
242 srlx %g2, CTX_PGSZ1_NUC_SHIFT, %o1
243 sllx %o1, CTX_PGSZ1_NUC_SHIFT, %o1
244 or %o0, %o1, %o0 /* Preserve nucleus page size fields */
244 stxa %o0, [%o2] ASI_DMMU 245 stxa %o0, [%o2] ASI_DMMU
245 stxa %g0, [%g3] ASI_DMMU_DEMAP 246 stxa %g0, [%g3] ASI_DMMU_DEMAP
246 stxa %g0, [%g3] ASI_IMMU_DEMAP 247 stxa %g0, [%g3] ASI_IMMU_DEMAP
@@ -250,7 +251,7 @@ __cheetah_flush_tlb_mm: /* 15 insns */
250 retl 251 retl
251 wrpr %g7, 0x0, %pstate 252 wrpr %g7, 0x0, %pstate
252 253
253__cheetah_flush_tlb_pending: /* 23 insns */ 254__cheetah_flush_tlb_pending: /* 26 insns */
254 /* %o0 = context, %o1 = nr, %o2 = vaddrs[] */ 255 /* %o0 = context, %o1 = nr, %o2 = vaddrs[] */
255 rdpr %pstate, %g7 256 rdpr %pstate, %g7
256 sllx %o1, 3, %o1 257 sllx %o1, 3, %o1
@@ -259,6 +260,9 @@ __cheetah_flush_tlb_pending: /* 23 insns */
259 wrpr %g0, 1, %tl 260 wrpr %g0, 1, %tl
260 mov PRIMARY_CONTEXT, %o4 261 mov PRIMARY_CONTEXT, %o4
261 ldxa [%o4] ASI_DMMU, %g2 262 ldxa [%o4] ASI_DMMU, %g2
263 srlx %g2, CTX_PGSZ1_NUC_SHIFT, %o3
264 sllx %o3, CTX_PGSZ1_NUC_SHIFT, %o3
265 or %o0, %o3, %o0 /* Preserve nucleus page size fields */
262 stxa %o0, [%o4] ASI_DMMU 266 stxa %o0, [%o4] ASI_DMMU
2631: sub %o1, (1 << 3), %o1 2671: sub %o1, (1 << 3), %o1
264 ldx [%o2 + %o1], %o3 268 ldx [%o2 + %o1], %o3
@@ -311,14 +315,14 @@ cheetah_patch_cachetlbops:
311 sethi %hi(__cheetah_flush_tlb_mm), %o1 315 sethi %hi(__cheetah_flush_tlb_mm), %o1
312 or %o1, %lo(__cheetah_flush_tlb_mm), %o1 316 or %o1, %lo(__cheetah_flush_tlb_mm), %o1
313 call cheetah_patch_one 317 call cheetah_patch_one
314 mov 15, %o2 318 mov 18, %o2
315 319
316 sethi %hi(__flush_tlb_pending), %o0 320 sethi %hi(__flush_tlb_pending), %o0
317 or %o0, %lo(__flush_tlb_pending), %o0 321 or %o0, %lo(__flush_tlb_pending), %o0
318 sethi %hi(__cheetah_flush_tlb_pending), %o1 322 sethi %hi(__cheetah_flush_tlb_pending), %o1
319 or %o1, %lo(__cheetah_flush_tlb_pending), %o1 323 or %o1, %lo(__cheetah_flush_tlb_pending), %o1
320 call cheetah_patch_one 324 call cheetah_patch_one
321 mov 23, %o2 325 mov 26, %o2
322 326
323#ifdef DCACHE_ALIASING_POSSIBLE 327#ifdef DCACHE_ALIASING_POSSIBLE
324 sethi %hi(__flush_dcache_page), %o0 328 sethi %hi(__flush_dcache_page), %o0
@@ -352,9 +356,12 @@ cheetah_patch_cachetlbops:
352 .globl xcall_flush_tlb_mm 356 .globl xcall_flush_tlb_mm
353xcall_flush_tlb_mm: 357xcall_flush_tlb_mm:
354 mov PRIMARY_CONTEXT, %g2 358 mov PRIMARY_CONTEXT, %g2
355 mov 0x40, %g4
356 ldxa [%g2] ASI_DMMU, %g3 359 ldxa [%g2] ASI_DMMU, %g3
360 srlx %g3, CTX_PGSZ1_NUC_SHIFT, %g4
361 sllx %g4, CTX_PGSZ1_NUC_SHIFT, %g4
362 or %g5, %g4, %g5 /* Preserve nucleus page size fields */
357 stxa %g5, [%g2] ASI_DMMU 363 stxa %g5, [%g2] ASI_DMMU
364 mov 0x40, %g4
358 stxa %g0, [%g4] ASI_DMMU_DEMAP 365 stxa %g0, [%g4] ASI_DMMU_DEMAP
359 stxa %g0, [%g4] ASI_IMMU_DEMAP 366 stxa %g0, [%g4] ASI_IMMU_DEMAP
360 stxa %g3, [%g2] ASI_DMMU 367 stxa %g3, [%g2] ASI_DMMU
@@ -366,6 +373,10 @@ xcall_flush_tlb_pending:
366 sllx %g1, 3, %g1 373 sllx %g1, 3, %g1
367 mov PRIMARY_CONTEXT, %g4 374 mov PRIMARY_CONTEXT, %g4
368 ldxa [%g4] ASI_DMMU, %g2 375 ldxa [%g4] ASI_DMMU, %g2
376 srlx %g2, CTX_PGSZ1_NUC_SHIFT, %g4
377 sllx %g4, CTX_PGSZ1_NUC_SHIFT, %g4
378 or %g5, %g4, %g5
379 mov PRIMARY_CONTEXT, %g4
369 stxa %g5, [%g4] ASI_DMMU 380 stxa %g5, [%g4] ASI_DMMU
3701: sub %g1, (1 << 3), %g1 3811: sub %g1, (1 << 3), %g1
371 ldx [%g7 + %g1], %g5 382 ldx [%g7 + %g1], %g5