aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMathieu Lacage <mathieu.lacage@sophia.inria.fr>2010-06-27 06:26:06 -0400
committerArnd Bergmann <arnd@arndb.de>2010-10-09 15:36:35 -0400
commit8b9d40691e8f5e7e0c8fb839c2bad29c5e0888ce (patch)
treea4e4cea8af80fa0aa90cb98a9516d3118cc1b51c
parent6b0cd00bc396daf5c2dcf17a8d82055335341f46 (diff)
asm-generic: make atomic_add_unless a function
atomic_add_unless is a macro so, bad things happen if the caller defines a local variable named c, just like like the local variable c defined by the macro. Thus, convert atomic_add_unless to a function. (bug triggered by net/ipv4/netfilter/ipt_CLUSTERIP.c: clusterip_config_find_get calls atomic_inc_not_zero) Signed-off-by: Mathieu Lacage <mathieu.lacage@inria.fr> Signed-off-by: Arnd Bergmann <arnd@arndb.de>
-rw-r--r--include/asm-generic/atomic.h34
1 files changed, 17 insertions, 17 deletions
diff --git a/include/asm-generic/atomic.h b/include/asm-generic/atomic.h
index e53347fbf1da..a6cc019a41e0 100644
--- a/include/asm-generic/atomic.h
+++ b/include/asm-generic/atomic.h
@@ -119,14 +119,23 @@ static inline void atomic_dec(atomic_t *v)
119#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0) 119#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
120#define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0) 120#define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
121 121
122#define atomic_add_unless(v, a, u) \ 122#define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v)))
123({ \ 123#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
124 int c, old; \ 124
125 c = atomic_read(v); \ 125#define cmpxchg_local(ptr, o, n) \
126 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \ 126 ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
127 c = old; \ 127 (unsigned long)(n), sizeof(*(ptr))))
128 c != (u); \ 128
129}) 129#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
130
131static inline int atomic_add_unless(atomic_t *v, int a, int u)
132{
133 int c, old;
134 c = atomic_read(v);
135 while (c != u && (old = atomic_cmpxchg(v, c, c + a)) != c)
136 c = old;
137 return c != u;
138}
130 139
131#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 140#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
132 141
@@ -140,15 +149,6 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
140 raw_local_irq_restore(flags); 149 raw_local_irq_restore(flags);
141} 150}
142 151
143#define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v)))
144#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
145
146#define cmpxchg_local(ptr, o, n) \
147 ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
148 (unsigned long)(n), sizeof(*(ptr))))
149
150#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
151
152/* Assume that atomic operations are already serializing */ 152/* Assume that atomic operations are already serializing */
153#define smp_mb__before_atomic_dec() barrier() 153#define smp_mb__before_atomic_dec() barrier()
154#define smp_mb__after_atomic_dec() barrier() 154#define smp_mb__after_atomic_dec() barrier()