aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-mips
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-mips')
-rw-r--r--include/asm-mips/atomic.h46
1 files changed, 30 insertions, 16 deletions
diff --git a/include/asm-mips/atomic.h b/include/asm-mips/atomic.h
index 6423ffa195a4..62daa746a9c9 100644
--- a/include/asm-mips/atomic.h
+++ b/include/asm-mips/atomic.h
@@ -18,6 +18,7 @@
18#include <asm/barrier.h> 18#include <asm/barrier.h>
19#include <asm/cpu-features.h> 19#include <asm/cpu-features.h>
20#include <asm/war.h> 20#include <asm/war.h>
21#include <asm/system.h>
21 22
22typedef struct { volatile int counter; } atomic_t; 23typedef struct { volatile int counter; } atomic_t;
23 24
@@ -318,14 +319,20 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
318 * Atomically adds @a to @v, so long as it was not @u. 319 * Atomically adds @a to @v, so long as it was not @u.
319 * Returns non-zero if @v was not @u, and zero otherwise. 320 * Returns non-zero if @v was not @u, and zero otherwise.
320 */ 321 */
321#define atomic_add_unless(v, a, u) \ 322static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
322({ \ 323{
323 __typeof__((v)->counter) c, old; \ 324 int c, old;
324 c = atomic_read(v); \ 325 c = atomic_read(v);
325 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \ 326 for (;;) {
326 c = old; \ 327 if (unlikely(c == (u)))
327 c != (u); \ 328 break;
328}) 329 old = atomic_cmpxchg((v), c, c + (a));
330 if (likely(old == c))
331 break;
332 c = old;
333 }
334 return c != (u);
335}
329#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 336#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
330 337
331#define atomic_dec_return(v) atomic_sub_return(1,(v)) 338#define atomic_dec_return(v) atomic_sub_return(1,(v))
@@ -694,14 +701,21 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
694 * Atomically adds @a to @v, so long as it was not @u. 701 * Atomically adds @a to @v, so long as it was not @u.
695 * Returns non-zero if @v was not @u, and zero otherwise. 702 * Returns non-zero if @v was not @u, and zero otherwise.
696 */ 703 */
697#define atomic64_add_unless(v, a, u) \ 704static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
698({ \ 705{
699 __typeof__((v)->counter) c, old; \ 706 long c, old;
700 c = atomic_read(v); \ 707 c = atomic64_read(v);
701 while (c != (u) && (old = atomic64_cmpxchg((v), c, c + (a))) != c) \ 708 for (;;) {
702 c = old; \ 709 if (unlikely(c == (u)))
703 c != (u); \ 710 break;
704}) 711 old = atomic64_cmpxchg((v), c, c + (a));
712 if (likely(old == c))
713 break;
714 c = old;
715 }
716 return c != (u);
717}
718
705#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) 719#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
706 720
707#define atomic64_dec_return(v) atomic64_sub_return(1,(v)) 721#define atomic64_dec_return(v) atomic64_sub_return(1,(v))