diff options
Diffstat (limited to 'include/asm-x86_64/atomic.h')
-rw-r--r-- | include/asm-x86_64/atomic.h | 65 |
1 files changed, 48 insertions, 17 deletions
diff --git a/include/asm-x86_64/atomic.h b/include/asm-x86_64/atomic.h index 706ca4b60000..f2e64634fa48 100644 --- a/include/asm-x86_64/atomic.h +++ b/include/asm-x86_64/atomic.h | |||
@@ -2,6 +2,7 @@ | |||
2 | #define __ARCH_X86_64_ATOMIC__ | 2 | #define __ARCH_X86_64_ATOMIC__ |
3 | 3 | ||
4 | #include <asm/alternative.h> | 4 | #include <asm/alternative.h> |
5 | #include <asm/cmpxchg.h> | ||
5 | 6 | ||
6 | /* atomic_t should be 32 bit signed type */ | 7 | /* atomic_t should be 32 bit signed type */ |
7 | 8 | ||
@@ -375,8 +376,8 @@ static __inline__ long atomic64_add_return(long i, atomic64_t *v) | |||
375 | long __i = i; | 376 | long __i = i; |
376 | __asm__ __volatile__( | 377 | __asm__ __volatile__( |
377 | LOCK_PREFIX "xaddq %0, %1;" | 378 | LOCK_PREFIX "xaddq %0, %1;" |
378 | :"=r"(i) | 379 | :"+r" (i), "+m" (v->counter) |
379 | :"m"(v->counter), "0"(i)); | 380 | : : "memory"); |
380 | return i + __i; | 381 | return i + __i; |
381 | } | 382 | } |
382 | 383 | ||
@@ -388,7 +389,10 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t *v) | |||
388 | #define atomic64_inc_return(v) (atomic64_add_return(1,v)) | 389 | #define atomic64_inc_return(v) (atomic64_add_return(1,v)) |
389 | #define atomic64_dec_return(v) (atomic64_sub_return(1,v)) | 390 | #define atomic64_dec_return(v) (atomic64_sub_return(1,v)) |
390 | 391 | ||
391 | #define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new)) | 392 | #define atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new)) |
393 | #define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) | ||
394 | |||
395 | #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new)) | ||
392 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) | 396 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) |
393 | 397 | ||
394 | /** | 398 | /** |
@@ -400,22 +404,49 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t *v) | |||
400 | * Atomically adds @a to @v, so long as it was not @u. | 404 | * Atomically adds @a to @v, so long as it was not @u. |
401 | * Returns non-zero if @v was not @u, and zero otherwise. | 405 | * Returns non-zero if @v was not @u, and zero otherwise. |
402 | */ | 406 | */ |
403 | #define atomic_add_unless(v, a, u) \ | 407 | static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) |
404 | ({ \ | 408 | { |
405 | int c, old; \ | 409 | int c, old; |
406 | c = atomic_read(v); \ | 410 | c = atomic_read(v); |
407 | for (;;) { \ | 411 | for (;;) { |
408 | if (unlikely(c == (u))) \ | 412 | if (unlikely(c == (u))) |
409 | break; \ | 413 | break; |
410 | old = atomic_cmpxchg((v), c, c + (a)); \ | 414 | old = atomic_cmpxchg((v), c, c + (a)); |
411 | if (likely(old == c)) \ | 415 | if (likely(old == c)) |
412 | break; \ | 416 | break; |
413 | c = old; \ | 417 | c = old; |
414 | } \ | 418 | } |
415 | c != (u); \ | 419 | return c != (u); |
416 | }) | 420 | } |
421 | |||
417 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | 422 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) |
418 | 423 | ||
424 | /** | ||
425 | * atomic64_add_unless - add unless the number is a given value | ||
426 | * @v: pointer of type atomic64_t | ||
427 | * @a: the amount to add to v... | ||
428 | * @u: ...unless v is equal to u. | ||
429 | * | ||
430 | * Atomically adds @a to @v, so long as it was not @u. | ||
431 | * Returns non-zero if @v was not @u, and zero otherwise. | ||
432 | */ | ||
433 | static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) | ||
434 | { | ||
435 | long c, old; | ||
436 | c = atomic64_read(v); | ||
437 | for (;;) { | ||
438 | if (unlikely(c == (u))) | ||
439 | break; | ||
440 | old = atomic64_cmpxchg((v), c, c + (a)); | ||
441 | if (likely(old == c)) | ||
442 | break; | ||
443 | c = old; | ||
444 | } | ||
445 | return c != (u); | ||
446 | } | ||
447 | |||
448 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) | ||
449 | |||
419 | /* These are x86-specific, used by some header files */ | 450 | /* These are x86-specific, used by some header files */ |
420 | #define atomic_clear_mask(mask, addr) \ | 451 | #define atomic_clear_mask(mask, addr) \ |
421 | __asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \ | 452 | __asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \ |