aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-mips/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-mips/atomic.h')
-rw-r--r--include/asm-mips/atomic.h57
1 files changed, 47 insertions, 10 deletions
diff --git a/include/asm-mips/atomic.h b/include/asm-mips/atomic.h
index 1ac50b6c47ad..62daa746a9c9 100644
--- a/include/asm-mips/atomic.h
+++ b/include/asm-mips/atomic.h
@@ -18,6 +18,7 @@
18#include <asm/barrier.h> 18#include <asm/barrier.h>
19#include <asm/cpu-features.h> 19#include <asm/cpu-features.h>
20#include <asm/war.h> 20#include <asm/war.h>
21#include <asm/system.h>
21 22
22typedef struct { volatile int counter; } atomic_t; 23typedef struct { volatile int counter; } atomic_t;
23 24
@@ -306,8 +307,8 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
306 return result; 307 return result;
307} 308}
308 309
309#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) 310#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
310#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) 311#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
311 312
312/** 313/**
313 * atomic_add_unless - add unless the number is a given value 314 * atomic_add_unless - add unless the number is a given value
@@ -318,14 +319,20 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
318 * Atomically adds @a to @v, so long as it was not @u. 319 * Atomically adds @a to @v, so long as it was not @u.
319 * Returns non-zero if @v was not @u, and zero otherwise. 320 * Returns non-zero if @v was not @u, and zero otherwise.
320 */ 321 */
321#define atomic_add_unless(v, a, u) \ 322static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
322({ \ 323{
323 int c, old; \ 324 int c, old;
324 c = atomic_read(v); \ 325 c = atomic_read(v);
325 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \ 326 for (;;) {
326 c = old; \ 327 if (unlikely(c == (u)))
327 c != (u); \ 328 break;
328}) 329 old = atomic_cmpxchg((v), c, c + (a));
330 if (likely(old == c))
331 break;
332 c = old;
333 }
334 return c != (u);
335}
329#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 336#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
330 337
331#define atomic_dec_return(v) atomic_sub_return(1,(v)) 338#define atomic_dec_return(v) atomic_sub_return(1,(v))
@@ -681,6 +688,36 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
681 return result; 688 return result;
682} 689}
683 690
691#define atomic64_cmpxchg(v, o, n) \
692 (((__typeof__((v)->counter)))cmpxchg(&((v)->counter), (o), (n)))
693#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
694
695/**
696 * atomic64_add_unless - add unless the number is a given value
697 * @v: pointer of type atomic64_t
698 * @a: the amount to add to v...
699 * @u: ...unless v is equal to u.
700 *
701 * Atomically adds @a to @v, so long as it was not @u.
702 * Returns non-zero if @v was not @u, and zero otherwise.
703 */
704static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
705{
706 long c, old;
707 c = atomic64_read(v);
708 for (;;) {
709 if (unlikely(c == (u)))
710 break;
711 old = atomic64_cmpxchg((v), c, c + (a));
712 if (likely(old == c))
713 break;
714 c = old;
715 }
716 return c != (u);
717}
718
719#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
720
684#define atomic64_dec_return(v) atomic64_sub_return(1,(v)) 721#define atomic64_dec_return(v) atomic64_sub_return(1,(v))
685#define atomic64_inc_return(v) atomic64_add_return(1,(v)) 722#define atomic64_inc_return(v) atomic64_add_return(1,(v))
686 723