aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-generic/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-generic/atomic.h')
-rw-r--r--include/asm-generic/atomic.h39
1 files changed, 20 insertions, 19 deletions
diff --git a/include/asm-generic/atomic.h b/include/asm-generic/atomic.h
index e53347fbf1da..e994197f84b7 100644
--- a/include/asm-generic/atomic.h
+++ b/include/asm-generic/atomic.h
@@ -43,6 +43,7 @@
43 */ 43 */
44#define atomic_set(v, i) (((v)->counter) = (i)) 44#define atomic_set(v, i) (((v)->counter) = (i))
45 45
46#include <linux/irqflags.h>
46#include <asm/system.h> 47#include <asm/system.h>
47 48
48/** 49/**
@@ -57,7 +58,7 @@ static inline int atomic_add_return(int i, atomic_t *v)
57 unsigned long flags; 58 unsigned long flags;
58 int temp; 59 int temp;
59 60
60 raw_local_irq_save(flags); /* Don't trace it in a irqsoff handler */ 61 raw_local_irq_save(flags); /* Don't trace it in an irqsoff handler */
61 temp = v->counter; 62 temp = v->counter;
62 temp += i; 63 temp += i;
63 v->counter = temp; 64 v->counter = temp;
@@ -78,7 +79,7 @@ static inline int atomic_sub_return(int i, atomic_t *v)
78 unsigned long flags; 79 unsigned long flags;
79 int temp; 80 int temp;
80 81
81 raw_local_irq_save(flags); /* Don't trace it in a irqsoff handler */ 82 raw_local_irq_save(flags); /* Don't trace it in an irqsoff handler */
82 temp = v->counter; 83 temp = v->counter;
83 temp -= i; 84 temp -= i;
84 v->counter = temp; 85 v->counter = temp;
@@ -119,14 +120,23 @@ static inline void atomic_dec(atomic_t *v)
119#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0) 120#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
120#define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0) 121#define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
121 122
122#define atomic_add_unless(v, a, u) \ 123#define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v)))
123({ \ 124#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
124 int c, old; \ 125
125 c = atomic_read(v); \ 126#define cmpxchg_local(ptr, o, n) \
126 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \ 127 ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
127 c = old; \ 128 (unsigned long)(n), sizeof(*(ptr))))
128 c != (u); \ 129
129}) 130#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
131
132static inline int atomic_add_unless(atomic_t *v, int a, int u)
133{
134 int c, old;
135 c = atomic_read(v);
136 while (c != u && (old = atomic_cmpxchg(v, c, c + a)) != c)
137 c = old;
138 return c != u;
139}
130 140
131#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 141#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
132 142
@@ -140,15 +150,6 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
140 raw_local_irq_restore(flags); 150 raw_local_irq_restore(flags);
141} 151}
142 152
143#define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v)))
144#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
145
146#define cmpxchg_local(ptr, o, n) \
147 ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
148 (unsigned long)(n), sizeof(*(ptr))))
149
150#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
151
152/* Assume that atomic operations are already serializing */ 153/* Assume that atomic operations are already serializing */
153#define smp_mb__before_atomic_dec() barrier() 154#define smp_mb__before_atomic_dec() barrier()
154#define smp_mb__after_atomic_dec() barrier() 155#define smp_mb__after_atomic_dec() barrier()