aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDavid S. Miller <davem@sunset.davemloft.net>2006-02-03 00:55:10 -0500
committerDavid S. Miller <davem@sunset.davemloft.net>2006-03-20 04:11:35 -0500
commitffe483d55229fadbaf4cc7316d47024a24ecd1a2 (patch)
tree70bdb6c94d5b3512a7b2a3ff06979ac2e4e869bf
parent92704a1c63c3b481870d02636d0b5a70c7e21cd1 (diff)
[SPARC64]: Add explicit register args to trap state loading macros.
This, as well as making the code cleaner, allows a simplification in the TSB miss handling path. Signed-off-by: David S. Miller <davem@davemloft.net>
-rw-r--r--arch/sparc64/kernel/entry.S8
-rw-r--r--arch/sparc64/kernel/etrap.S10
-rw-r--r--arch/sparc64/kernel/rtrap.S2
-rw-r--r--arch/sparc64/kernel/tsb.S9
-rw-r--r--arch/sparc64/kernel/winfixup.S18
-rw-r--r--include/asm-sparc64/cpudata.h88
6 files changed, 64 insertions, 71 deletions
diff --git a/arch/sparc64/kernel/entry.S b/arch/sparc64/kernel/entry.S
index b3511ff5d04a..4ca3ea0beaf9 100644
--- a/arch/sparc64/kernel/entry.S
+++ b/arch/sparc64/kernel/entry.S
@@ -50,7 +50,7 @@ do_fpdis:
50 add %g0, %g0, %g0 50 add %g0, %g0, %g0
51 ba,a,pt %xcc, rtrap_clr_l6 51 ba,a,pt %xcc, rtrap_clr_l6
52 52
531: TRAP_LOAD_THREAD_REG 531: TRAP_LOAD_THREAD_REG(%g6, %g1)
54 ldub [%g6 + TI_FPSAVED], %g5 54 ldub [%g6 + TI_FPSAVED], %g5
55 wr %g0, FPRS_FEF, %fprs 55 wr %g0, FPRS_FEF, %fprs
56 andcc %g5, FPRS_FEF, %g0 56 andcc %g5, FPRS_FEF, %g0
@@ -190,7 +190,7 @@ fp_other_bounce:
190 .globl do_fpother_check_fitos 190 .globl do_fpother_check_fitos
191 .align 32 191 .align 32
192do_fpother_check_fitos: 192do_fpother_check_fitos:
193 TRAP_LOAD_THREAD_REG 193 TRAP_LOAD_THREAD_REG(%g6, %g1)
194 sethi %hi(fp_other_bounce - 4), %g7 194 sethi %hi(fp_other_bounce - 4), %g7
195 or %g7, %lo(fp_other_bounce - 4), %g7 195 or %g7, %lo(fp_other_bounce - 4), %g7
196 196
@@ -378,7 +378,7 @@ do_ivec:
378 sllx %g2, %g4, %g2 378 sllx %g2, %g4, %g2
379 sllx %g4, 2, %g4 379 sllx %g4, 2, %g4
380 380
381 TRAP_LOAD_IRQ_WORK 381 TRAP_LOAD_IRQ_WORK(%g6, %g1)
382 382
383 lduw [%g6 + %g4], %g5 /* g5 = irq_work(cpu, pil) */ 383 lduw [%g6 + %g4], %g5 /* g5 = irq_work(cpu, pil) */
384 stw %g5, [%g3 + 0x00] /* bucket->irq_chain = g5 */ 384 stw %g5, [%g3 + 0x00] /* bucket->irq_chain = g5 */
@@ -422,7 +422,7 @@ setcc:
422 422
423 .globl utrap_trap 423 .globl utrap_trap
424utrap_trap: /* %g3=handler,%g4=level */ 424utrap_trap: /* %g3=handler,%g4=level */
425 TRAP_LOAD_THREAD_REG 425 TRAP_LOAD_THREAD_REG(%g6, %g1)
426 ldx [%g6 + TI_UTRAPS], %g1 426 ldx [%g6 + TI_UTRAPS], %g1
427 brnz,pt %g1, invoke_utrap 427 brnz,pt %g1, invoke_utrap
428 nop 428 nop
diff --git a/arch/sparc64/kernel/etrap.S b/arch/sparc64/kernel/etrap.S
index d974d18b15be..b5f6bc52d917 100644
--- a/arch/sparc64/kernel/etrap.S
+++ b/arch/sparc64/kernel/etrap.S
@@ -31,7 +31,7 @@
31 .globl etrap, etrap_irq, etraptl1 31 .globl etrap, etrap_irq, etraptl1
32etrap: rdpr %pil, %g2 32etrap: rdpr %pil, %g2
33etrap_irq: 33etrap_irq:
34 TRAP_LOAD_THREAD_REG 34 TRAP_LOAD_THREAD_REG(%g6, %g1)
35 rdpr %tstate, %g1 35 rdpr %tstate, %g1
36 sllx %g2, 20, %g3 36 sllx %g2, 20, %g3
37 andcc %g1, TSTATE_PRIV, %g0 37 andcc %g1, TSTATE_PRIV, %g0
@@ -100,7 +100,7 @@ etrap_irq:
100 stx %i7, [%sp + PTREGS_OFF + PT_V9_I7] 100 stx %i7, [%sp + PTREGS_OFF + PT_V9_I7]
101 wrpr %g0, ETRAP_PSTATE2, %pstate 101 wrpr %g0, ETRAP_PSTATE2, %pstate
102 mov %l6, %g6 102 mov %l6, %g6
103 LOAD_PER_CPU_BASE(%g4, %g3, %l1) 103 LOAD_PER_CPU_BASE(%g5, %g6, %g4, %g3, %l1)
104 jmpl %l2 + 0x4, %g0 104 jmpl %l2 + 0x4, %g0
105 ldx [%g6 + TI_TASK], %g4 105 ldx [%g6 + TI_TASK], %g4
106 106
@@ -124,7 +124,7 @@ etraptl1: /* Save tstate/tpc/tnpc of TL 1-->4 and the tl register itself.
124 * 0x58 TL4's TT 124 * 0x58 TL4's TT
125 * 0x60 TL 125 * 0x60 TL
126 */ 126 */
127 TRAP_LOAD_THREAD_REG 127 TRAP_LOAD_THREAD_REG(%g6, %g1)
128 sub %sp, ((4 * 8) * 4) + 8, %g2 128 sub %sp, ((4 * 8) * 4) + 8, %g2
129 rdpr %tl, %g1 129 rdpr %tl, %g1
130 130
@@ -179,7 +179,7 @@ etraptl1: /* Save tstate/tpc/tnpc of TL 1-->4 and the tl register itself.
179 .align 64 179 .align 64
180 .globl scetrap 180 .globl scetrap
181scetrap: 181scetrap:
182 TRAP_LOAD_THREAD_REG 182 TRAP_LOAD_THREAD_REG(%g6, %g1)
183 rdpr %pil, %g2 183 rdpr %pil, %g2
184 rdpr %tstate, %g1 184 rdpr %tstate, %g1
185 sllx %g2, 20, %g3 185 sllx %g2, 20, %g3
@@ -250,7 +250,7 @@ scetrap:
250 stx %i6, [%sp + PTREGS_OFF + PT_V9_I6] 250 stx %i6, [%sp + PTREGS_OFF + PT_V9_I6]
251 mov %l6, %g6 251 mov %l6, %g6
252 stx %i7, [%sp + PTREGS_OFF + PT_V9_I7] 252 stx %i7, [%sp + PTREGS_OFF + PT_V9_I7]
253 LOAD_PER_CPU_BASE(%g4, %g3, %l1) 253 LOAD_PER_CPU_BASE(%g5, %g6, %g4, %g3, %l1)
254 ldx [%g6 + TI_TASK], %g4 254 ldx [%g6 + TI_TASK], %g4
255 done 255 done
256 256
diff --git a/arch/sparc64/kernel/rtrap.S b/arch/sparc64/kernel/rtrap.S
index 64bc03610bc6..61bd45e7697e 100644
--- a/arch/sparc64/kernel/rtrap.S
+++ b/arch/sparc64/kernel/rtrap.S
@@ -226,7 +226,7 @@ rt_continue: ldx [%sp + PTREGS_OFF + PT_V9_G1], %g1
226 brz,pt %l3, 1f 226 brz,pt %l3, 1f
227 nop 227 nop
228 /* Must do this before thread reg is clobbered below. */ 228 /* Must do this before thread reg is clobbered below. */
229 LOAD_PER_CPU_BASE(%i0, %i1, %i2) 229 LOAD_PER_CPU_BASE(%g5, %g6, %i0, %i1, %i2)
2301: 2301:
231 ldx [%sp + PTREGS_OFF + PT_V9_G6], %g6 231 ldx [%sp + PTREGS_OFF + PT_V9_G6], %g6
232 ldx [%sp + PTREGS_OFF + PT_V9_G7], %g7 232 ldx [%sp + PTREGS_OFF + PT_V9_G7], %g7
diff --git a/arch/sparc64/kernel/tsb.S b/arch/sparc64/kernel/tsb.S
index ff6a79beb98d..28e38b168dda 100644
--- a/arch/sparc64/kernel/tsb.S
+++ b/arch/sparc64/kernel/tsb.S
@@ -36,14 +36,7 @@ tsb_miss_itlb:
36 nop 36 nop
37 37
38tsb_miss_page_table_walk: 38tsb_miss_page_table_walk:
39 /* This clobbers %g1 and %g6, preserve them... */ 39 TRAP_LOAD_PGD_PHYS(%g7, %g5)
40 mov %g1, %g5
41 mov %g6, %g2
42
43 TRAP_LOAD_PGD_PHYS
44
45 mov %g2, %g6
46 mov %g5, %g1
47 40
48 USER_PGTABLE_WALK_TL1(%g4, %g7, %g5, %g2, tsb_do_fault) 41 USER_PGTABLE_WALK_TL1(%g4, %g7, %g5, %g2, tsb_do_fault)
49 42
diff --git a/arch/sparc64/kernel/winfixup.S b/arch/sparc64/kernel/winfixup.S
index 320a762d0519..211021ae6e8a 100644
--- a/arch/sparc64/kernel/winfixup.S
+++ b/arch/sparc64/kernel/winfixup.S
@@ -40,7 +40,7 @@ set_pcontext:
40 */ 40 */
41 .globl fill_fixup, spill_fixup 41 .globl fill_fixup, spill_fixup
42fill_fixup: 42fill_fixup:
43 TRAP_LOAD_THREAD_REG 43 TRAP_LOAD_THREAD_REG(%g6, %g1)
44 rdpr %tstate, %g1 44 rdpr %tstate, %g1
45 andcc %g1, TSTATE_PRIV, %g0 45 andcc %g1, TSTATE_PRIV, %g0
46 or %g4, FAULT_CODE_WINFIXUP, %g4 46 or %g4, FAULT_CODE_WINFIXUP, %g4
@@ -86,7 +86,7 @@ fill_fixup:
86 wrpr %l1, (PSTATE_IE | PSTATE_AG | PSTATE_RMO), %pstate 86 wrpr %l1, (PSTATE_IE | PSTATE_AG | PSTATE_RMO), %pstate
87 mov %o7, %g6 87 mov %o7, %g6
88 ldx [%g6 + TI_TASK], %g4 88 ldx [%g6 + TI_TASK], %g4
89 LOAD_PER_CPU_BASE(%g1, %g2, %g3) 89 LOAD_PER_CPU_BASE(%g5, %g6, %g1, %g2, %g3)
90 90
91 /* This is the same as below, except we handle this a bit special 91 /* This is the same as below, except we handle this a bit special
92 * since we must preserve %l5 and %l6, see comment above. 92 * since we must preserve %l5 and %l6, see comment above.
@@ -105,7 +105,7 @@ fill_fixup:
105 * do not touch %g7 or %g2 so we handle the two cases fine. 105 * do not touch %g7 or %g2 so we handle the two cases fine.
106 */ 106 */
107spill_fixup: 107spill_fixup:
108 TRAP_LOAD_THREAD_REG 108 TRAP_LOAD_THREAD_REG(%g6, %g1)
109 ldx [%g6 + TI_FLAGS], %g1 109 ldx [%g6 + TI_FLAGS], %g1
110 andcc %g1, _TIF_32BIT, %g0 110 andcc %g1, _TIF_32BIT, %g0
111 ldub [%g6 + TI_WSAVED], %g1 111 ldub [%g6 + TI_WSAVED], %g1
@@ -181,7 +181,7 @@ winfix_mna:
181 wrpr %g3, %tnpc 181 wrpr %g3, %tnpc
182 done 182 done
183fill_fixup_mna: 183fill_fixup_mna:
184 TRAP_LOAD_THREAD_REG 184 TRAP_LOAD_THREAD_REG(%g6, %g1)
185 rdpr %tstate, %g1 185 rdpr %tstate, %g1
186 andcc %g1, TSTATE_PRIV, %g0 186 andcc %g1, TSTATE_PRIV, %g0
187 be,pt %xcc, window_mna_from_user_common 187 be,pt %xcc, window_mna_from_user_common
@@ -209,14 +209,14 @@ fill_fixup_mna:
209 wrpr %l1, (PSTATE_IE | PSTATE_AG | PSTATE_RMO), %pstate 209 wrpr %l1, (PSTATE_IE | PSTATE_AG | PSTATE_RMO), %pstate
210 mov %o7, %g6 ! Get current back. 210 mov %o7, %g6 ! Get current back.
211 ldx [%g6 + TI_TASK], %g4 ! Finish it. 211 ldx [%g6 + TI_TASK], %g4 ! Finish it.
212 LOAD_PER_CPU_BASE(%g1, %g2, %g3) 212 LOAD_PER_CPU_BASE(%g5, %g6, %g1, %g2, %g3)
213 call mem_address_unaligned 213 call mem_address_unaligned
214 add %sp, PTREGS_OFF, %o0 214 add %sp, PTREGS_OFF, %o0
215 215
216 b,pt %xcc, rtrap 216 b,pt %xcc, rtrap
217 nop ! yes, the nop is correct 217 nop ! yes, the nop is correct
218spill_fixup_mna: 218spill_fixup_mna:
219 TRAP_LOAD_THREAD_REG 219 TRAP_LOAD_THREAD_REG(%g6, %g1)
220 ldx [%g6 + TI_FLAGS], %g1 220 ldx [%g6 + TI_FLAGS], %g1
221 andcc %g1, _TIF_32BIT, %g0 221 andcc %g1, _TIF_32BIT, %g0
222 ldub [%g6 + TI_WSAVED], %g1 222 ldub [%g6 + TI_WSAVED], %g1
@@ -284,7 +284,7 @@ winfix_dax:
284 wrpr %g3, %tnpc 284 wrpr %g3, %tnpc
285 done 285 done
286fill_fixup_dax: 286fill_fixup_dax:
287 TRAP_LOAD_THREAD_REG 287 TRAP_LOAD_THREAD_REG(%g6, %g1)
288 rdpr %tstate, %g1 288 rdpr %tstate, %g1
289 andcc %g1, TSTATE_PRIV, %g0 289 andcc %g1, TSTATE_PRIV, %g0
290 be,pt %xcc, window_dax_from_user_common 290 be,pt %xcc, window_dax_from_user_common
@@ -312,14 +312,14 @@ fill_fixup_dax:
312 wrpr %l1, (PSTATE_IE | PSTATE_AG | PSTATE_RMO), %pstate 312 wrpr %l1, (PSTATE_IE | PSTATE_AG | PSTATE_RMO), %pstate
313 mov %o7, %g6 ! Get current back. 313 mov %o7, %g6 ! Get current back.
314 ldx [%g6 + TI_TASK], %g4 ! Finish it. 314 ldx [%g6 + TI_TASK], %g4 ! Finish it.
315 LOAD_PER_CPU_BASE(%g1, %g2, %g3) 315 LOAD_PER_CPU_BASE(%g5, %g6, %g1, %g2, %g3)
316 call spitfire_data_access_exception 316 call spitfire_data_access_exception
317 add %sp, PTREGS_OFF, %o0 317 add %sp, PTREGS_OFF, %o0
318 318
319 b,pt %xcc, rtrap 319 b,pt %xcc, rtrap
320 nop ! yes, the nop is correct 320 nop ! yes, the nop is correct
321spill_fixup_dax: 321spill_fixup_dax:
322 TRAP_LOAD_THREAD_REG 322 TRAP_LOAD_THREAD_REG(%g6, %g1)
323 ldx [%g6 + TI_FLAGS], %g1 323 ldx [%g6 + TI_FLAGS], %g1
324 andcc %g1, _TIF_32BIT, %g0 324 andcc %g1, _TIF_32BIT, %g0
325 ldub [%g6 + TI_WSAVED], %g1 325 ldub [%g6 + TI_WSAVED], %g1
diff --git a/include/asm-sparc64/cpudata.h b/include/asm-sparc64/cpudata.h
index da54b4f35403..c15514f82c33 100644
--- a/include/asm-sparc64/cpudata.h
+++ b/include/asm-sparc64/cpudata.h
@@ -107,67 +107,67 @@ extern struct cpuid_patch_entry __cpuid_patch, __cpuid_patch_end;
107 lduwa [REG] ASI_PHYS_BYPASS_EC_E, REG;\ 107 lduwa [REG] ASI_PHYS_BYPASS_EC_E, REG;\
108 .previous; 108 .previous;
109 109
110/* Clobbers %g1, current address space PGD phys address into %g7. */ 110/* Clobbers TMP, current address space PGD phys address into DEST. */
111#define TRAP_LOAD_PGD_PHYS \ 111#define TRAP_LOAD_PGD_PHYS(DEST, TMP) \
112 __GET_CPUID(%g1) \ 112 __GET_CPUID(TMP) \
113 sethi %hi(trap_block), %g7; \ 113 sethi %hi(trap_block), DEST; \
114 sllx %g1, TRAP_BLOCK_SZ_SHIFT, %g1; \ 114 sllx TMP, TRAP_BLOCK_SZ_SHIFT, TMP; \
115 or %g7, %lo(trap_block), %g7; \ 115 or DEST, %lo(trap_block), DEST; \
116 add %g7, %g1, %g7; \ 116 add DEST, TMP, DEST; \
117 ldx [%g7 + TRAP_PER_CPU_PGD_PADDR], %g7; 117 ldx [DEST + TRAP_PER_CPU_PGD_PADDR], DEST;
118 118
119/* Clobbers %g1, loads local processor's IRQ work area into %g6. */ 119/* Clobbers TMP, loads local processor's IRQ work area into DEST. */
120#define TRAP_LOAD_IRQ_WORK \ 120#define TRAP_LOAD_IRQ_WORK(DEST, TMP) \
121 __GET_CPUID(%g1) \ 121 __GET_CPUID(TMP) \
122 sethi %hi(__irq_work), %g6; \ 122 sethi %hi(__irq_work), DEST; \
123 sllx %g1, 6, %g1; \ 123 sllx TMP, 6, TMP; \
124 or %g6, %lo(__irq_work), %g6; \ 124 or DEST, %lo(__irq_work), DEST; \
125 add %g6, %g1, %g6; 125 add DEST, TMP, DEST;
126 126
127/* Clobbers %g1, loads %g6 with current thread info pointer. */ 127/* Clobbers TMP, loads DEST with current thread info pointer. */
128#define TRAP_LOAD_THREAD_REG \ 128#define TRAP_LOAD_THREAD_REG(DEST, TMP) \
129 __GET_CPUID(%g1) \ 129 __GET_CPUID(TMP) \
130 sethi %hi(trap_block), %g6; \ 130 sethi %hi(trap_block), DEST; \
131 sllx %g1, TRAP_BLOCK_SZ_SHIFT, %g1; \ 131 sllx TMP, TRAP_BLOCK_SZ_SHIFT, TMP; \
132 or %g6, %lo(trap_block), %g6; \ 132 or DEST, %lo(trap_block), DEST; \
133 ldx [%g6 + %g1], %g6; 133 ldx [DEST + TMP], DEST;
134 134
135/* Given the current thread info pointer in %g6, load the per-cpu 135/* Given the current thread info pointer in THR, load the per-cpu
136 * area base of the current processor into %g5. REG1, REG2, and REG3 are 136 * area base of the current processor into DEST. REG1, REG2, and REG3 are
137 * clobbered. 137 * clobbered.
138 * 138 *
139 * You absolutely cannot use %g5 as a temporary in this code. The 139 * You absolutely cannot use DEST as a temporary in this code. The
140 * reason is that traps can happen during execution, and return from 140 * reason is that traps can happen during execution, and return from
141 * trap will load the fully resolved %g5 per-cpu base. This can corrupt 141 * trap will load the fully resolved DEST per-cpu base. This can corrupt
142 * the calculations done by the macro mid-stream. 142 * the calculations done by the macro mid-stream.
143 */ 143 */
144#define LOAD_PER_CPU_BASE(REG1, REG2, REG3) \ 144#define LOAD_PER_CPU_BASE(DEST, THR, REG1, REG2, REG3) \
145 ldub [%g6 + TI_CPU], REG1; \ 145 ldub [THR + TI_CPU], REG1; \
146 sethi %hi(__per_cpu_shift), REG3; \ 146 sethi %hi(__per_cpu_shift), REG3; \
147 sethi %hi(__per_cpu_base), REG2; \ 147 sethi %hi(__per_cpu_base), REG2; \
148 ldx [REG3 + %lo(__per_cpu_shift)], REG3; \ 148 ldx [REG3 + %lo(__per_cpu_shift)], REG3; \
149 ldx [REG2 + %lo(__per_cpu_base)], REG2; \ 149 ldx [REG2 + %lo(__per_cpu_base)], REG2; \
150 sllx REG1, REG3, REG3; \ 150 sllx REG1, REG3, REG3; \
151 add REG3, REG2, %g5; 151 add REG3, REG2, DEST;
152 152
153#else 153#else
154 154
155/* Uniprocessor versions, we know the cpuid is zero. */ 155/* Uniprocessor versions, we know the cpuid is zero. */
156#define TRAP_LOAD_PGD_PHYS \ 156#define TRAP_LOAD_PGD_PHYS(DEST, TMP) \
157 sethi %hi(trap_block), %g7; \ 157 sethi %hi(trap_block), DEST; \
158 or %g7, %lo(trap_block), %g7; \ 158 or DEST, %lo(trap_block), DEST; \
159 ldx [%g7 + TRAP_PER_CPU_PGD_PADDR], %g7; 159 ldx [DEST + TRAP_PER_CPU_PGD_PADDR], DEST;
160 160
161#define TRAP_LOAD_IRQ_WORK \ 161#define TRAP_LOAD_IRQ_WORK(DEST, TMP) \
162 sethi %hi(__irq_work), %g6; \ 162 sethi %hi(__irq_work), DEST; \
163 or %g6, %lo(__irq_work), %g6; 163 or DEST, %lo(__irq_work), DEST;
164 164
165#define TRAP_LOAD_THREAD_REG \ 165#define TRAP_LOAD_THREAD_REG(DEST, TMP) \
166 sethi %hi(trap_block), %g6; \ 166 sethi %hi(trap_block), DEST; \
167 ldx [%g6 + %lo(trap_block)], %g6; 167 ldx [DEST + %lo(trap_block)], DEST;
168 168
169/* No per-cpu areas on uniprocessor, so no need to load %g5. */ 169/* No per-cpu areas on uniprocessor, so no need to load DEST. */
170#define LOAD_PER_CPU_BASE(REG1, REG2, REG3) 170#define LOAD_PER_CPU_BASE(DEST, THR, REG1, REG2, REG3)
171 171
172#endif /* !(CONFIG_SMP) */ 172#endif /* !(CONFIG_SMP) */
173 173