aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMathieu Desnoyers <mathieu.desnoyers@polymtl.ca>2007-05-08 03:34:18 -0400
committerLinus Torvalds <torvalds@woody.linux-foundation.org>2007-05-08 14:15:19 -0400
commite96e69942312314c061eb2fdd947a7a1211d62f8 (patch)
tree179d1e968a5e55e4a8bb2f5d2c53fe0781781640
parentbf8f6e5b3e51ee0c64c2d1350c70198ddc8ad3f7 (diff)
atomic.h: add atomic64 cmpxchg, xchg and add_unless to alpha
This series mainly adds support for missing 64 bits cmpxchg and 64 bits atomic add unless. Therefore, principally 64 bits architectures are targeted by these patches. It also adds the complete list of atomic operations on the atomic_long type. This patch: atomic.h: add atomic64 cmpxchg, xchg and add_unless to alpha Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca> Cc: Richard Henderson <rth@twiddle.net> Cc: Ivan Kokshaysky <ink@jurassic.park.msu.ru> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
-rw-r--r--include/asm-alpha/atomic.h49
1 files changed, 46 insertions, 3 deletions
diff --git a/include/asm-alpha/atomic.h b/include/asm-alpha/atomic.h
index fc77f7413083..7b4fba88cbeb 100644
--- a/include/asm-alpha/atomic.h
+++ b/include/asm-alpha/atomic.h
@@ -175,19 +175,62 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
175 return result; 175 return result;
176} 176}
177 177
178#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) 178#define atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
179#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
180
181#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
179#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) 182#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
180 183
184/**
185 * atomic_add_unless - add unless the number is a given value
186 * @v: pointer of type atomic_t
187 * @a: the amount to add to v...
188 * @u: ...unless v is equal to u.
189 *
190 * Atomically adds @a to @v, so long as it was not @u.
191 * Returns non-zero if @v was not @u, and zero otherwise.
192 */
181#define atomic_add_unless(v, a, u) \ 193#define atomic_add_unless(v, a, u) \
182({ \ 194({ \
183 int c, old; \ 195 __typeof__((v)->counter) c, old; \
184 c = atomic_read(v); \ 196 c = atomic_read(v); \
185 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \ 197 for (;;) { \
198 if (unlikely(c == (u))) \
199 break; \
200 old = atomic_cmpxchg((v), c, c + (a)); \
201 if (likely(old == c)) \
202 break; \
186 c = old; \ 203 c = old; \
204 } \
187 c != (u); \ 205 c != (u); \
188}) 206})
189#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 207#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
190 208
209/**
210 * atomic64_add_unless - add unless the number is a given value
211 * @v: pointer of type atomic64_t
212 * @a: the amount to add to v...
213 * @u: ...unless v is equal to u.
214 *
215 * Atomically adds @a to @v, so long as it was not @u.
216 * Returns non-zero if @v was not @u, and zero otherwise.
217 */
218#define atomic64_add_unless(v, a, u) \
219({ \
220 __typeof__((v)->counter) c, old; \
221 c = atomic64_read(v); \
222 for (;;) { \
223 if (unlikely(c == (u))) \
224 break; \
225 old = atomic64_cmpxchg((v), c, c + (a)); \
226 if (likely(old == c)) \
227 break; \
228 c = old; \
229 } \
230 c != (u); \
231})
232#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
233
191#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) 234#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
192#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0) 235#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
193 236