aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorJeremy Fitzhardinge <jeremy.fitzhardinge@citrix.com>2011-06-21 15:00:55 -0400
committerH. Peter Anvin <hpa@linux.intel.com>2011-08-29 16:42:20 -0400
commit433b3520616be694e0aa777089346c8718c91a7b (patch)
tree0d73d7efc65c550a02535c0f10265807402e3617
parente9826380d83d1bda3ee5663bf3fa4667a6fbe60a (diff)
x86: Add xadd helper macro
Add a common xadd implementation. Signed-off-by: Jeremy Fitzhardinge <jeremy.fitzhardinge@citrix.com> Link: http://lkml.kernel.org/r/4E5BCC40.3030501@goop.org Signed-off-by: H. Peter Anvin <hpa@linux.intel.com>
-rw-r--r--arch/x86/include/asm/cmpxchg.h43
1 files changed, 43 insertions, 0 deletions
diff --git a/arch/x86/include/asm/cmpxchg.h b/arch/x86/include/asm/cmpxchg.h
index efe3ec778a58..0d0d9cdd3309 100644
--- a/arch/x86/include/asm/cmpxchg.h
+++ b/arch/x86/include/asm/cmpxchg.h
@@ -6,6 +6,7 @@
6/* Non-existant functions to indicate usage errors at link time. */ 6/* Non-existant functions to indicate usage errors at link time. */
7extern void __xchg_wrong_size(void); 7extern void __xchg_wrong_size(void);
8extern void __cmpxchg_wrong_size(void); 8extern void __cmpxchg_wrong_size(void);
9extern void __xadd_wrong_size(void);
9 10
10/* 11/*
11 * Constants for operation sizes. On 32-bit, the 64-bit size it set to 12 * Constants for operation sizes. On 32-bit, the 64-bit size it set to
@@ -157,4 +158,46 @@ extern void __cmpxchg_wrong_size(void);
157 __cmpxchg_local((ptr), (old), (new), sizeof(*ptr)) 158 __cmpxchg_local((ptr), (old), (new), sizeof(*ptr))
158#endif 159#endif
159 160
161#define __xadd(ptr, inc, lock) \
162 ({ \
163 __typeof__ (*(ptr)) __ret = (inc); \
164 switch (sizeof(*(ptr))) { \
165 case __X86_CASE_B: \
166 asm volatile (lock "xaddb %b0, %1\n" \
167 : "+r" (__ret), "+m" (*(ptr)) \
168 : : "memory", "cc"); \
169 break; \
170 case __X86_CASE_W: \
171 asm volatile (lock "xaddw %w0, %1\n" \
172 : "+r" (__ret), "+m" (*(ptr)) \
173 : : "memory", "cc"); \
174 break; \
175 case __X86_CASE_L: \
176 asm volatile (lock "xaddl %0, %1\n" \
177 : "+r" (__ret), "+m" (*(ptr)) \
178 : : "memory", "cc"); \
179 break; \
180 case __X86_CASE_Q: \
181 asm volatile (lock "xaddq %q0, %1\n" \
182 : "+r" (__ret), "+m" (*(ptr)) \
183 : : "memory", "cc"); \
184 break; \
185 default: \
186 __xadd_wrong_size(); \
187 } \
188 __ret; \
189 })
190
191/*
192 * xadd() adds "inc" to "*ptr" and atomically returns the previous
193 * value of "*ptr".
194 *
195 * xadd() is locked when multiple CPUs are online
196 * xadd_sync() is always locked
197 * xadd_local() is never locked
198 */
199#define xadd(ptr, inc) __xadd((ptr), (inc), LOCK_PREFIX)
200#define xadd_sync(ptr, inc) __xadd((ptr), (inc), "lock; ")
201#define xadd_local(ptr, inc) __xadd((ptr), (inc), "")
202
160#endif /* ASM_X86_CMPXCHG_H */ 203#endif /* ASM_X86_CMPXCHG_H */