aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-alpha
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-alpha')
-rw-r--r--include/asm-alpha/atomic.h59
1 files changed, 31 insertions, 28 deletions
diff --git a/include/asm-alpha/atomic.h b/include/asm-alpha/atomic.h
index 7b4fba88cbe..f5cb7b878af 100644
--- a/include/asm-alpha/atomic.h
+++ b/include/asm-alpha/atomic.h
@@ -2,6 +2,7 @@
2#define _ALPHA_ATOMIC_H 2#define _ALPHA_ATOMIC_H
3 3
4#include <asm/barrier.h> 4#include <asm/barrier.h>
5#include <asm/system.h>
5 6
6/* 7/*
7 * Atomic operations that C can't guarantee us. Useful for 8 * Atomic operations that C can't guarantee us. Useful for
@@ -190,20 +191,21 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
190 * Atomically adds @a to @v, so long as it was not @u. 191 * Atomically adds @a to @v, so long as it was not @u.
191 * Returns non-zero if @v was not @u, and zero otherwise. 192 * Returns non-zero if @v was not @u, and zero otherwise.
192 */ 193 */
193#define atomic_add_unless(v, a, u) \ 194static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
194({ \ 195{
195 __typeof__((v)->counter) c, old; \ 196 int c, old;
196 c = atomic_read(v); \ 197 c = atomic_read(v);
197 for (;;) { \ 198 for (;;) {
198 if (unlikely(c == (u))) \ 199 if (unlikely(c == (u)))
199 break; \ 200 break;
200 old = atomic_cmpxchg((v), c, c + (a)); \ 201 old = atomic_cmpxchg((v), c, c + (a));
201 if (likely(old == c)) \ 202 if (likely(old == c))
202 break; \ 203 break;
203 c = old; \ 204 c = old;
204 } \ 205 }
205 c != (u); \ 206 return c != (u);
206}) 207}
208
207#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 209#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
208 210
209/** 211/**
@@ -215,20 +217,21 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
215 * Atomically adds @a to @v, so long as it was not @u. 217 * Atomically adds @a to @v, so long as it was not @u.
216 * Returns non-zero if @v was not @u, and zero otherwise. 218 * Returns non-zero if @v was not @u, and zero otherwise.
217 */ 219 */
218#define atomic64_add_unless(v, a, u) \ 220static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
219({ \ 221{
220 __typeof__((v)->counter) c, old; \ 222 long c, old;
221 c = atomic64_read(v); \ 223 c = atomic64_read(v);
222 for (;;) { \ 224 for (;;) {
223 if (unlikely(c == (u))) \ 225 if (unlikely(c == (u)))
224 break; \ 226 break;
225 old = atomic64_cmpxchg((v), c, c + (a)); \ 227 old = atomic64_cmpxchg((v), c, c + (a));
226 if (likely(old == c)) \ 228 if (likely(old == c))
227 break; \ 229 break;
228 c = old; \ 230 c = old;
229 } \ 231 }
230 c != (u); \ 232 return c != (u);
231}) 233}
234
232#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) 235#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
233 236
234#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) 237#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)