diff options
Diffstat (limited to 'include/asm-parisc')
-rw-r--r-- | include/asm-parisc/atomic.h | 47 |
1 files changed, 31 insertions, 16 deletions
diff --git a/include/asm-parisc/atomic.h b/include/asm-parisc/atomic.h index 66a0edbb51f4..e894ee35074b 100644 --- a/include/asm-parisc/atomic.h +++ b/include/asm-parisc/atomic.h | |||
@@ -6,6 +6,7 @@ | |||
6 | #define _ASM_PARISC_ATOMIC_H_ | 6 | #define _ASM_PARISC_ATOMIC_H_ |
7 | 7 | ||
8 | #include <linux/types.h> | 8 | #include <linux/types.h> |
9 | #include <asm/system.h> | ||
9 | 10 | ||
10 | /* | 11 | /* |
11 | * Atomic operations that C can't guarantee us. Useful for | 12 | * Atomic operations that C can't guarantee us. Useful for |
@@ -174,14 +175,21 @@ static __inline__ int atomic_read(const atomic_t *v) | |||
174 | * Atomically adds @a to @v, so long as it was not @u. | 175 | * Atomically adds @a to @v, so long as it was not @u. |
175 | * Returns non-zero if @v was not @u, and zero otherwise. | 176 | * Returns non-zero if @v was not @u, and zero otherwise. |
176 | */ | 177 | */ |
177 | #define atomic_add_unless(v, a, u) \ | 178 | static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) |
178 | ({ \ | 179 | { |
179 | __typeof__((v)->counter) c, old; \ | 180 | int c, old; |
180 | c = atomic_read(v); \ | 181 | c = atomic_read(v); |
181 | while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \ | 182 | for (;;) { |
182 | c = old; \ | 183 | if (unlikely(c == (u))) |
183 | c != (u); \ | 184 | break; |
184 | }) | 185 | old = atomic_cmpxchg((v), c, c + (a)); |
186 | if (likely(old == c)) | ||
187 | break; | ||
188 | c = old; | ||
189 | } | ||
190 | return c != (u); | ||
191 | } | ||
192 | |||
185 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | 193 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) |
186 | 194 | ||
187 | #define atomic_add(i,v) ((void)(__atomic_add_return( ((int)i),(v)))) | 195 | #define atomic_add(i,v) ((void)(__atomic_add_return( ((int)i),(v)))) |
@@ -283,14 +291,21 @@ atomic64_read(const atomic64_t *v) | |||
283 | * Atomically adds @a to @v, so long as it was not @u. | 291 | * Atomically adds @a to @v, so long as it was not @u. |
284 | * Returns non-zero if @v was not @u, and zero otherwise. | 292 | * Returns non-zero if @v was not @u, and zero otherwise. |
285 | */ | 293 | */ |
286 | #define atomic64_add_unless(v, a, u) \ | 294 | static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) |
287 | ({ \ | 295 | { |
288 | __typeof__((v)->counter) c, old; \ | 296 | long c, old; |
289 | c = atomic64_read(v); \ | 297 | c = atomic64_read(v); |
290 | while (c != (u) && (old = atomic64_cmpxchg((v), c, c + (a))) != c) \ | 298 | for (;;) { |
291 | c = old; \ | 299 | if (unlikely(c == (u))) |
292 | c != (u); \ | 300 | break; |
293 | }) | 301 | old = atomic64_cmpxchg((v), c, c + (a)); |
302 | if (likely(old == c)) | ||
303 | break; | ||
304 | c = old; | ||
305 | } | ||
306 | return c != (u); | ||
307 | } | ||
308 | |||
294 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) | 309 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) |
295 | 310 | ||
296 | #endif /* CONFIG_64BIT */ | 311 | #endif /* CONFIG_64BIT */ |