aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-x86_64/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-x86_64/atomic.h')
-rw-r--r--include/asm-x86_64/atomic.h59
1 files changed, 31 insertions, 28 deletions
diff --git a/include/asm-x86_64/atomic.h b/include/asm-x86_64/atomic.h
index 80e4fdbe2204..19e0c607b568 100644
--- a/include/asm-x86_64/atomic.h
+++ b/include/asm-x86_64/atomic.h
@@ -2,6 +2,7 @@
2#define __ARCH_X86_64_ATOMIC__ 2#define __ARCH_X86_64_ATOMIC__
3 3
4#include <asm/alternative.h> 4#include <asm/alternative.h>
5#include <asm/system.h>
5 6
6/* atomic_t should be 32 bit signed type */ 7/* atomic_t should be 32 bit signed type */
7 8
@@ -403,20 +404,21 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t *v)
403 * Atomically adds @a to @v, so long as it was not @u. 404 * Atomically adds @a to @v, so long as it was not @u.
404 * Returns non-zero if @v was not @u, and zero otherwise. 405 * Returns non-zero if @v was not @u, and zero otherwise.
405 */ 406 */
406#define atomic_add_unless(v, a, u) \ 407static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
407({ \ 408{
408 __typeof__((v)->counter) c, old; \ 409 int c, old;
409 c = atomic_read(v); \ 410 c = atomic_read(v);
410 for (;;) { \ 411 for (;;) {
411 if (unlikely(c == (u))) \ 412 if (unlikely(c == (u)))
412 break; \ 413 break;
413 old = atomic_cmpxchg((v), c, c + (a)); \ 414 old = atomic_cmpxchg((v), c, c + (a));
414 if (likely(old == c)) \ 415 if (likely(old == c))
415 break; \ 416 break;
416 c = old; \ 417 c = old;
417 } \ 418 }
418 c != (u); \ 419 return c != (u);
419}) 420}
421
420#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 422#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
421 423
422/** 424/**
@@ -428,20 +430,21 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t *v)
428 * Atomically adds @a to @v, so long as it was not @u. 430 * Atomically adds @a to @v, so long as it was not @u.
429 * Returns non-zero if @v was not @u, and zero otherwise. 431 * Returns non-zero if @v was not @u, and zero otherwise.
430 */ 432 */
431#define atomic64_add_unless(v, a, u) \ 433static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
432({ \ 434{
433 __typeof__((v)->counter) c, old; \ 435 long c, old;
434 c = atomic64_read(v); \ 436 c = atomic64_read(v);
435 for (;;) { \ 437 for (;;) {
436 if (unlikely(c == (u))) \ 438 if (unlikely(c == (u)))
437 break; \ 439 break;
438 old = atomic64_cmpxchg((v), c, c + (a)); \ 440 old = atomic64_cmpxchg((v), c, c + (a));
439 if (likely(old == c)) \ 441 if (likely(old == c))
440 break; \ 442 break;
441 c = old; \ 443 c = old;
442 } \ 444 }
443 c != (u); \ 445 return c != (u);
444}) 446}
447
445#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) 448#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
446 449
447/* These are x86-specific, used by some header files */ 450/* These are x86-specific, used by some header files */