diff options
-rw-r--r-- | arch/ia64/include/asm/atomic.h | 130 |
1 files changed, 114 insertions, 16 deletions
diff --git a/arch/ia64/include/asm/atomic.h b/arch/ia64/include/asm/atomic.h index 8dfb5f6f6c35..f565ad376142 100644 --- a/arch/ia64/include/asm/atomic.h +++ b/arch/ia64/include/asm/atomic.h | |||
@@ -42,8 +42,27 @@ ia64_atomic_##op (int i, atomic_t *v) \ | |||
42 | return new; \ | 42 | return new; \ |
43 | } | 43 | } |
44 | 44 | ||
45 | ATOMIC_OP(add, +) | 45 | #define ATOMIC_FETCH_OP(op, c_op) \ |
46 | ATOMIC_OP(sub, -) | 46 | static __inline__ int \ |
47 | ia64_atomic_fetch_##op (int i, atomic_t *v) \ | ||
48 | { \ | ||
49 | __s32 old, new; \ | ||
50 | CMPXCHG_BUGCHECK_DECL \ | ||
51 | \ | ||
52 | do { \ | ||
53 | CMPXCHG_BUGCHECK(v); \ | ||
54 | old = atomic_read(v); \ | ||
55 | new = old c_op i; \ | ||
56 | } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); \ | ||
57 | return old; \ | ||
58 | } | ||
59 | |||
60 | #define ATOMIC_OPS(op, c_op) \ | ||
61 | ATOMIC_OP(op, c_op) \ | ||
62 | ATOMIC_FETCH_OP(op, c_op) | ||
63 | |||
64 | ATOMIC_OPS(add, +) | ||
65 | ATOMIC_OPS(sub, -) | ||
47 | 66 | ||
48 | #define atomic_add_return(i,v) \ | 67 | #define atomic_add_return(i,v) \ |
49 | ({ \ | 68 | ({ \ |
@@ -69,14 +88,44 @@ ATOMIC_OP(sub, -) | |||
69 | : ia64_atomic_sub(__ia64_asr_i, v); \ | 88 | : ia64_atomic_sub(__ia64_asr_i, v); \ |
70 | }) | 89 | }) |
71 | 90 | ||
72 | ATOMIC_OP(and, &) | 91 | #define atomic_fetch_add(i,v) \ |
73 | ATOMIC_OP(or, |) | 92 | ({ \ |
74 | ATOMIC_OP(xor, ^) | 93 | int __ia64_aar_i = (i); \ |
94 | (__builtin_constant_p(i) \ | ||
95 | && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \ | ||
96 | || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \ | ||
97 | || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \ | ||
98 | || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \ | ||
99 | ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \ | ||
100 | : ia64_atomic_fetch_add(__ia64_aar_i, v); \ | ||
101 | }) | ||
102 | |||
103 | #define atomic_fetch_sub(i,v) \ | ||
104 | ({ \ | ||
105 | int __ia64_asr_i = (i); \ | ||
106 | (__builtin_constant_p(i) \ | ||
107 | && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \ | ||
108 | || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \ | ||
109 | || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \ | ||
110 | || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \ | ||
111 | ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \ | ||
112 | : ia64_atomic_fetch_sub(__ia64_asr_i, v); \ | ||
113 | }) | ||
114 | |||
115 | ATOMIC_FETCH_OP(and, &) | ||
116 | ATOMIC_FETCH_OP(or, |) | ||
117 | ATOMIC_FETCH_OP(xor, ^) | ||
118 | |||
119 | #define atomic_and(i,v) (void)ia64_atomic_fetch_and(i,v) | ||
120 | #define atomic_or(i,v) (void)ia64_atomic_fetch_or(i,v) | ||
121 | #define atomic_xor(i,v) (void)ia64_atomic_fetch_xor(i,v) | ||
75 | 122 | ||
76 | #define atomic_and(i,v) (void)ia64_atomic_and(i,v) | 123 | #define atomic_fetch_and(i,v) ia64_atomic_fetch_and(i,v) |
77 | #define atomic_or(i,v) (void)ia64_atomic_or(i,v) | 124 | #define atomic_fetch_or(i,v) ia64_atomic_fetch_or(i,v) |
78 | #define atomic_xor(i,v) (void)ia64_atomic_xor(i,v) | 125 | #define atomic_fetch_xor(i,v) ia64_atomic_fetch_xor(i,v) |
79 | 126 | ||
127 | #undef ATOMIC_OPS | ||
128 | #undef ATOMIC_FETCH_OP | ||
80 | #undef ATOMIC_OP | 129 | #undef ATOMIC_OP |
81 | 130 | ||
82 | #define ATOMIC64_OP(op, c_op) \ | 131 | #define ATOMIC64_OP(op, c_op) \ |
@@ -94,8 +143,27 @@ ia64_atomic64_##op (__s64 i, atomic64_t *v) \ | |||
94 | return new; \ | 143 | return new; \ |
95 | } | 144 | } |
96 | 145 | ||
97 | ATOMIC64_OP(add, +) | 146 | #define ATOMIC64_FETCH_OP(op, c_op) \ |
98 | ATOMIC64_OP(sub, -) | 147 | static __inline__ long \ |
148 | ia64_atomic64_fetch_##op (__s64 i, atomic64_t *v) \ | ||
149 | { \ | ||
150 | __s64 old, new; \ | ||
151 | CMPXCHG_BUGCHECK_DECL \ | ||
152 | \ | ||
153 | do { \ | ||
154 | CMPXCHG_BUGCHECK(v); \ | ||
155 | old = atomic64_read(v); \ | ||
156 | new = old c_op i; \ | ||
157 | } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); \ | ||
158 | return old; \ | ||
159 | } | ||
160 | |||
161 | #define ATOMIC64_OPS(op, c_op) \ | ||
162 | ATOMIC64_OP(op, c_op) \ | ||
163 | ATOMIC64_FETCH_OP(op, c_op) | ||
164 | |||
165 | ATOMIC64_OPS(add, +) | ||
166 | ATOMIC64_OPS(sub, -) | ||
99 | 167 | ||
100 | #define atomic64_add_return(i,v) \ | 168 | #define atomic64_add_return(i,v) \ |
101 | ({ \ | 169 | ({ \ |
@@ -121,14 +189,44 @@ ATOMIC64_OP(sub, -) | |||
121 | : ia64_atomic64_sub(__ia64_asr_i, v); \ | 189 | : ia64_atomic64_sub(__ia64_asr_i, v); \ |
122 | }) | 190 | }) |
123 | 191 | ||
124 | ATOMIC64_OP(and, &) | 192 | #define atomic64_fetch_add(i,v) \ |
125 | ATOMIC64_OP(or, |) | 193 | ({ \ |
126 | ATOMIC64_OP(xor, ^) | 194 | long __ia64_aar_i = (i); \ |
195 | (__builtin_constant_p(i) \ | ||
196 | && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \ | ||
197 | || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \ | ||
198 | || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \ | ||
199 | || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \ | ||
200 | ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \ | ||
201 | : ia64_atomic64_fetch_add(__ia64_aar_i, v); \ | ||
202 | }) | ||
203 | |||
204 | #define atomic64_fetch_sub(i,v) \ | ||
205 | ({ \ | ||
206 | long __ia64_asr_i = (i); \ | ||
207 | (__builtin_constant_p(i) \ | ||
208 | && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \ | ||
209 | || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \ | ||
210 | || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \ | ||
211 | || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \ | ||
212 | ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \ | ||
213 | : ia64_atomic64_fetch_sub(__ia64_asr_i, v); \ | ||
214 | }) | ||
215 | |||
216 | ATOMIC64_FETCH_OP(and, &) | ||
217 | ATOMIC64_FETCH_OP(or, |) | ||
218 | ATOMIC64_FETCH_OP(xor, ^) | ||
219 | |||
220 | #define atomic64_and(i,v) (void)ia64_atomic64_fetch_and(i,v) | ||
221 | #define atomic64_or(i,v) (void)ia64_atomic64_fetch_or(i,v) | ||
222 | #define atomic64_xor(i,v) (void)ia64_atomic64_fetch_xor(i,v) | ||
127 | 223 | ||
128 | #define atomic64_and(i,v) (void)ia64_atomic64_and(i,v) | 224 | #define atomic64_fetch_and(i,v) ia64_atomic64_fetch_and(i,v) |
129 | #define atomic64_or(i,v) (void)ia64_atomic64_or(i,v) | 225 | #define atomic64_fetch_or(i,v) ia64_atomic64_fetch_or(i,v) |
130 | #define atomic64_xor(i,v) (void)ia64_atomic64_xor(i,v) | 226 | #define atomic64_fetch_xor(i,v) ia64_atomic64_fetch_xor(i,v) |
131 | 227 | ||
228 | #undef ATOMIC64_OPS | ||
229 | #undef ATOMIC64_FETCH_OP | ||
132 | #undef ATOMIC64_OP | 230 | #undef ATOMIC64_OP |
133 | 231 | ||
134 | #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new)) | 232 | #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new)) |