diff options
author | David S. Miller <davem@davemloft.net> | 2006-01-31 21:34:51 -0500 |
---|---|---|
committer | David S. Miller <davem@sunset.davemloft.net> | 2006-03-20 04:11:29 -0500 |
commit | 86b818687d4894063ecd1190e54717a0cce8c009 (patch) | |
tree | d2951295358502c88f7fe0c02517d729cff4eb9a /arch/sparc64/kernel | |
parent | 9954863975910a1b9372b7d5006a6cba43bdd288 (diff) |
[SPARC64]: Fix race in LOAD_PER_CPU_BASE()
Since we use %g5 itself as a temporary, it can get clobbered
if we take an interrupt mid-stream and thus cause end up with
the final %g5 value too early as a result of rtrap processing.
Set %g5 at the very end, atomically, to avoid this problem.
Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc64/kernel')
-rw-r--r-- | arch/sparc64/kernel/etrap.S | 4 | ||||
-rw-r--r-- | arch/sparc64/kernel/rtrap.S | 2 | ||||
-rw-r--r-- | arch/sparc64/kernel/winfixup.S | 6 |
3 files changed, 6 insertions, 6 deletions
diff --git a/arch/sparc64/kernel/etrap.S b/arch/sparc64/kernel/etrap.S index db7681017299..d974d18b15be 100644 --- a/arch/sparc64/kernel/etrap.S +++ b/arch/sparc64/kernel/etrap.S | |||
@@ -100,7 +100,7 @@ etrap_irq: | |||
100 | stx %i7, [%sp + PTREGS_OFF + PT_V9_I7] | 100 | stx %i7, [%sp + PTREGS_OFF + PT_V9_I7] |
101 | wrpr %g0, ETRAP_PSTATE2, %pstate | 101 | wrpr %g0, ETRAP_PSTATE2, %pstate |
102 | mov %l6, %g6 | 102 | mov %l6, %g6 |
103 | LOAD_PER_CPU_BASE(%g4, %g3) | 103 | LOAD_PER_CPU_BASE(%g4, %g3, %l1) |
104 | jmpl %l2 + 0x4, %g0 | 104 | jmpl %l2 + 0x4, %g0 |
105 | ldx [%g6 + TI_TASK], %g4 | 105 | ldx [%g6 + TI_TASK], %g4 |
106 | 106 | ||
@@ -250,7 +250,7 @@ scetrap: | |||
250 | stx %i6, [%sp + PTREGS_OFF + PT_V9_I6] | 250 | stx %i6, [%sp + PTREGS_OFF + PT_V9_I6] |
251 | mov %l6, %g6 | 251 | mov %l6, %g6 |
252 | stx %i7, [%sp + PTREGS_OFF + PT_V9_I7] | 252 | stx %i7, [%sp + PTREGS_OFF + PT_V9_I7] |
253 | LOAD_PER_CPU_BASE(%g4, %g3) | 253 | LOAD_PER_CPU_BASE(%g4, %g3, %l1) |
254 | ldx [%g6 + TI_TASK], %g4 | 254 | ldx [%g6 + TI_TASK], %g4 |
255 | done | 255 | done |
256 | 256 | ||
diff --git a/arch/sparc64/kernel/rtrap.S b/arch/sparc64/kernel/rtrap.S index 89794ebdcbcf..64bc03610bc6 100644 --- a/arch/sparc64/kernel/rtrap.S +++ b/arch/sparc64/kernel/rtrap.S | |||
@@ -226,7 +226,7 @@ rt_continue: ldx [%sp + PTREGS_OFF + PT_V9_G1], %g1 | |||
226 | brz,pt %l3, 1f | 226 | brz,pt %l3, 1f |
227 | nop | 227 | nop |
228 | /* Must do this before thread reg is clobbered below. */ | 228 | /* Must do this before thread reg is clobbered below. */ |
229 | LOAD_PER_CPU_BASE(%g6, %g7) | 229 | LOAD_PER_CPU_BASE(%i0, %i1, %i2) |
230 | 1: | 230 | 1: |
231 | ldx [%sp + PTREGS_OFF + PT_V9_G6], %g6 | 231 | ldx [%sp + PTREGS_OFF + PT_V9_G6], %g6 |
232 | ldx [%sp + PTREGS_OFF + PT_V9_G7], %g7 | 232 | ldx [%sp + PTREGS_OFF + PT_V9_G7], %g7 |
diff --git a/arch/sparc64/kernel/winfixup.S b/arch/sparc64/kernel/winfixup.S index c0545d089c96..ade991b7d079 100644 --- a/arch/sparc64/kernel/winfixup.S +++ b/arch/sparc64/kernel/winfixup.S | |||
@@ -86,7 +86,7 @@ fill_fixup: | |||
86 | wrpr %l1, (PSTATE_IE | PSTATE_AG | PSTATE_RMO), %pstate | 86 | wrpr %l1, (PSTATE_IE | PSTATE_AG | PSTATE_RMO), %pstate |
87 | mov %o7, %g6 | 87 | mov %o7, %g6 |
88 | ldx [%g6 + TI_TASK], %g4 | 88 | ldx [%g6 + TI_TASK], %g4 |
89 | LOAD_PER_CPU_BASE(%g1, %g2) | 89 | LOAD_PER_CPU_BASE(%g1, %g2, %g3) |
90 | 90 | ||
91 | /* This is the same as below, except we handle this a bit special | 91 | /* This is the same as below, except we handle this a bit special |
92 | * since we must preserve %l5 and %l6, see comment above. | 92 | * since we must preserve %l5 and %l6, see comment above. |
@@ -209,7 +209,7 @@ fill_fixup_mna: | |||
209 | wrpr %l1, (PSTATE_IE | PSTATE_AG | PSTATE_RMO), %pstate | 209 | wrpr %l1, (PSTATE_IE | PSTATE_AG | PSTATE_RMO), %pstate |
210 | mov %o7, %g6 ! Get current back. | 210 | mov %o7, %g6 ! Get current back. |
211 | ldx [%g6 + TI_TASK], %g4 ! Finish it. | 211 | ldx [%g6 + TI_TASK], %g4 ! Finish it. |
212 | LOAD_PER_CPU_BASE(%g1, %g2) | 212 | LOAD_PER_CPU_BASE(%g1, %g2, %g3) |
213 | call mem_address_unaligned | 213 | call mem_address_unaligned |
214 | add %sp, PTREGS_OFF, %o0 | 214 | add %sp, PTREGS_OFF, %o0 |
215 | 215 | ||
@@ -312,7 +312,7 @@ fill_fixup_dax: | |||
312 | wrpr %l1, (PSTATE_IE | PSTATE_AG | PSTATE_RMO), %pstate | 312 | wrpr %l1, (PSTATE_IE | PSTATE_AG | PSTATE_RMO), %pstate |
313 | mov %o7, %g6 ! Get current back. | 313 | mov %o7, %g6 ! Get current back. |
314 | ldx [%g6 + TI_TASK], %g4 ! Finish it. | 314 | ldx [%g6 + TI_TASK], %g4 ! Finish it. |
315 | LOAD_PER_CPU_BASE(%g1, %g2) | 315 | LOAD_PER_CPU_BASE(%g1, %g2, %g3) |
316 | call spitfire_data_access_exception | 316 | call spitfire_data_access_exception |
317 | add %sp, PTREGS_OFF, %o0 | 317 | add %sp, PTREGS_OFF, %o0 |
318 | 318 | ||