aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sparc64/lib/atomic.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/sparc64/lib/atomic.S')
-rw-r--r--arch/sparc64/lib/atomic.S42
1 files changed, 26 insertions, 16 deletions
diff --git a/arch/sparc64/lib/atomic.S b/arch/sparc64/lib/atomic.S
index e528b8d1a3e6..faf87c31598b 100644
--- a/arch/sparc64/lib/atomic.S
+++ b/arch/sparc64/lib/atomic.S
@@ -7,18 +7,6 @@
7#include <linux/config.h> 7#include <linux/config.h>
8#include <asm/asi.h> 8#include <asm/asi.h>
9 9
10 /* On SMP we need to use memory barriers to ensure
11 * correct memory operation ordering, nop these out
12 * for uniprocessor.
13 */
14#ifdef CONFIG_SMP
15#define ATOMIC_PRE_BARRIER membar #StoreLoad | #LoadLoad
16#define ATOMIC_POST_BARRIER membar #StoreLoad | #StoreStore
17#else
18#define ATOMIC_PRE_BARRIER nop
19#define ATOMIC_POST_BARRIER nop
20#endif
21
22 .text 10 .text
23 11
24 /* Two versions of the atomic routines, one that 12 /* Two versions of the atomic routines, one that
@@ -52,6 +40,24 @@ atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
52 nop 40 nop
53 .size atomic_sub, .-atomic_sub 41 .size atomic_sub, .-atomic_sub
54 42
43 /* On SMP we need to use memory barriers to ensure
44 * correct memory operation ordering, nop these out
45 * for uniprocessor.
46 */
47#ifdef CONFIG_SMP
48
49#define ATOMIC_PRE_BARRIER membar #StoreLoad | #LoadLoad;
50#define ATOMIC_POST_BARRIER \
51 ba,pt %xcc, 80b; \
52 membar #StoreLoad | #StoreStore
53
5480: retl
55 nop
56#else
57#define ATOMIC_PRE_BARRIER
58#define ATOMIC_POST_BARRIER
59#endif
60
55 .globl atomic_add_ret 61 .globl atomic_add_ret
56 .type atomic_add_ret,#function 62 .type atomic_add_ret,#function
57atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ 63atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
@@ -62,9 +68,10 @@ atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
62 cmp %g1, %g7 68 cmp %g1, %g7
63 bne,pn %icc, 1b 69 bne,pn %icc, 1b
64 add %g7, %o0, %g7 70 add %g7, %o0, %g7
71 sra %g7, 0, %o0
65 ATOMIC_POST_BARRIER 72 ATOMIC_POST_BARRIER
66 retl 73 retl
67 sra %g7, 0, %o0 74 nop
68 .size atomic_add_ret, .-atomic_add_ret 75 .size atomic_add_ret, .-atomic_add_ret
69 76
70 .globl atomic_sub_ret 77 .globl atomic_sub_ret
@@ -77,9 +84,10 @@ atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
77 cmp %g1, %g7 84 cmp %g1, %g7
78 bne,pn %icc, 1b 85 bne,pn %icc, 1b
79 sub %g7, %o0, %g7 86 sub %g7, %o0, %g7
87 sra %g7, 0, %o0
80 ATOMIC_POST_BARRIER 88 ATOMIC_POST_BARRIER
81 retl 89 retl
82 sra %g7, 0, %o0 90 nop
83 .size atomic_sub_ret, .-atomic_sub_ret 91 .size atomic_sub_ret, .-atomic_sub_ret
84 92
85 .globl atomic64_add 93 .globl atomic64_add
@@ -118,9 +126,10 @@ atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
118 cmp %g1, %g7 126 cmp %g1, %g7
119 bne,pn %xcc, 1b 127 bne,pn %xcc, 1b
120 add %g7, %o0, %g7 128 add %g7, %o0, %g7
129 mov %g7, %o0
121 ATOMIC_POST_BARRIER 130 ATOMIC_POST_BARRIER
122 retl 131 retl
123 mov %g7, %o0 132 nop
124 .size atomic64_add_ret, .-atomic64_add_ret 133 .size atomic64_add_ret, .-atomic64_add_ret
125 134
126 .globl atomic64_sub_ret 135 .globl atomic64_sub_ret
@@ -133,7 +142,8 @@ atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
133 cmp %g1, %g7 142 cmp %g1, %g7
134 bne,pn %xcc, 1b 143 bne,pn %xcc, 1b
135 sub %g7, %o0, %g7 144 sub %g7, %o0, %g7
145 mov %g7, %o0
136 ATOMIC_POST_BARRIER 146 ATOMIC_POST_BARRIER
137 retl 147 retl
138 mov %g7, %o0 148 nop
139 .size atomic64_sub_ret, .-atomic64_sub_ret 149 .size atomic64_sub_ret, .-atomic64_sub_ret