diff options
Diffstat (limited to 'include/asm-x86_64/atomic.h')
-rw-r--r-- | include/asm-x86_64/atomic.h | 36 |
1 files changed, 32 insertions, 4 deletions
diff --git a/include/asm-x86_64/atomic.h b/include/asm-x86_64/atomic.h index 706ca4b60000..80e4fdbe2204 100644 --- a/include/asm-x86_64/atomic.h +++ b/include/asm-x86_64/atomic.h | |||
@@ -375,8 +375,8 @@ static __inline__ long atomic64_add_return(long i, atomic64_t *v) | |||
375 | long __i = i; | 375 | long __i = i; |
376 | __asm__ __volatile__( | 376 | __asm__ __volatile__( |
377 | LOCK_PREFIX "xaddq %0, %1;" | 377 | LOCK_PREFIX "xaddq %0, %1;" |
378 | :"=r"(i) | 378 | :"+r" (i), "+m" (v->counter) |
379 | :"m"(v->counter), "0"(i)); | 379 | : : "memory"); |
380 | return i + __i; | 380 | return i + __i; |
381 | } | 381 | } |
382 | 382 | ||
@@ -388,7 +388,10 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t *v) | |||
388 | #define atomic64_inc_return(v) (atomic64_add_return(1,v)) | 388 | #define atomic64_inc_return(v) (atomic64_add_return(1,v)) |
389 | #define atomic64_dec_return(v) (atomic64_sub_return(1,v)) | 389 | #define atomic64_dec_return(v) (atomic64_sub_return(1,v)) |
390 | 390 | ||
391 | #define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new)) | 391 | #define atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new)) |
392 | #define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) | ||
393 | |||
394 | #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new)) | ||
392 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) | 395 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) |
393 | 396 | ||
394 | /** | 397 | /** |
@@ -402,7 +405,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t *v) | |||
402 | */ | 405 | */ |
403 | #define atomic_add_unless(v, a, u) \ | 406 | #define atomic_add_unless(v, a, u) \ |
404 | ({ \ | 407 | ({ \ |
405 | int c, old; \ | 408 | __typeof__((v)->counter) c, old; \ |
406 | c = atomic_read(v); \ | 409 | c = atomic_read(v); \ |
407 | for (;;) { \ | 410 | for (;;) { \ |
408 | if (unlikely(c == (u))) \ | 411 | if (unlikely(c == (u))) \ |
@@ -416,6 +419,31 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t *v) | |||
416 | }) | 419 | }) |
417 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | 420 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) |
418 | 421 | ||
422 | /** | ||
423 | * atomic64_add_unless - add unless the number is a given value | ||
424 | * @v: pointer of type atomic64_t | ||
425 | * @a: the amount to add to v... | ||
426 | * @u: ...unless v is equal to u. | ||
427 | * | ||
428 | * Atomically adds @a to @v, so long as it was not @u. | ||
429 | * Returns non-zero if @v was not @u, and zero otherwise. | ||
430 | */ | ||
431 | #define atomic64_add_unless(v, a, u) \ | ||
432 | ({ \ | ||
433 | __typeof__((v)->counter) c, old; \ | ||
434 | c = atomic64_read(v); \ | ||
435 | for (;;) { \ | ||
436 | if (unlikely(c == (u))) \ | ||
437 | break; \ | ||
438 | old = atomic64_cmpxchg((v), c, c + (a)); \ | ||
439 | if (likely(old == c)) \ | ||
440 | break; \ | ||
441 | c = old; \ | ||
442 | } \ | ||
443 | c != (u); \ | ||
444 | }) | ||
445 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) | ||
446 | |||
419 | /* These are x86-specific, used by some header files */ | 447 | /* These are x86-specific, used by some header files */ |
420 | #define atomic_clear_mask(mask, addr) \ | 448 | #define atomic_clear_mask(mask, addr) \ |
421 | __asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \ | 449 | __asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \ |