aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-alpha/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-alpha/atomic.h')
-rw-r--r--include/asm-alpha/atomic.h49
1 files changed, 46 insertions, 3 deletions
diff --git a/include/asm-alpha/atomic.h b/include/asm-alpha/atomic.h
index fc77f7413083..7b4fba88cbeb 100644
--- a/include/asm-alpha/atomic.h
+++ b/include/asm-alpha/atomic.h
@@ -175,19 +175,62 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
175 return result; 175 return result;
176} 176}
177 177
178#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) 178#define atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
179#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
180
181#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
179#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) 182#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
180 183
184/**
185 * atomic_add_unless - add unless the number is a given value
186 * @v: pointer of type atomic_t
187 * @a: the amount to add to v...
188 * @u: ...unless v is equal to u.
189 *
190 * Atomically adds @a to @v, so long as it was not @u.
191 * Returns non-zero if @v was not @u, and zero otherwise.
192 */
181#define atomic_add_unless(v, a, u) \ 193#define atomic_add_unless(v, a, u) \
182({ \ 194({ \
183 int c, old; \ 195 __typeof__((v)->counter) c, old; \
184 c = atomic_read(v); \ 196 c = atomic_read(v); \
185 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \ 197 for (;;) { \
198 if (unlikely(c == (u))) \
199 break; \
200 old = atomic_cmpxchg((v), c, c + (a)); \
201 if (likely(old == c)) \
202 break; \
186 c = old; \ 203 c = old; \
204 } \
187 c != (u); \ 205 c != (u); \
188}) 206})
189#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 207#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
190 208
209/**
210 * atomic64_add_unless - add unless the number is a given value
211 * @v: pointer of type atomic64_t
212 * @a: the amount to add to v...
213 * @u: ...unless v is equal to u.
214 *
215 * Atomically adds @a to @v, so long as it was not @u.
216 * Returns non-zero if @v was not @u, and zero otherwise.
217 */
218#define atomic64_add_unless(v, a, u) \
219({ \
220 __typeof__((v)->counter) c, old; \
221 c = atomic64_read(v); \
222 for (;;) { \
223 if (unlikely(c == (u))) \
224 break; \
225 old = atomic64_cmpxchg((v), c, c + (a)); \
226 if (likely(old == c)) \
227 break; \
228 c = old; \
229 } \
230 c != (u); \
231})
232#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
233
191#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) 234#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
192#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0) 235#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
193 236