aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/include/asm/cmpxchg_32.h
diff options
context:
space:
mode:
authorH. Peter Anvin <hpa@linux.intel.com>2010-07-28 18:18:35 -0400
committerH. Peter Anvin <hpa@linux.intel.com>2010-07-28 18:24:09 -0400
commit4532b305e8f0c238dd73048068ff8a6dd1380291 (patch)
treee4f9e4d78385dc45ab2456edf02ab515442f1cc1 /arch/x86/include/asm/cmpxchg_32.h
parent69309a05907546fb686b251d4ab041c26afe1e1d (diff)
x86, asm: Clean up and simplify <asm/cmpxchg.h>
Remove the __xg() hack to create a memory barrier near xchg and cmpxchg; it has been there since 1.3.11 but should not be necessary with "asm volatile" and a "memory" clobber, neither of which were there in the original implementation. However, we *should* make this a volatile reference. Signed-off-by: H. Peter Anvin <hpa@linux.intel.com> LKML-Reference: <AANLkTikAmaDPji-TVDarmG1yD=fwbffcsmEU=YEuP+8r@mail.gmail.com>
Diffstat (limited to 'arch/x86/include/asm/cmpxchg_32.h')
-rw-r--r--arch/x86/include/asm/cmpxchg_32.h75
1 files changed, 42 insertions, 33 deletions
diff --git a/arch/x86/include/asm/cmpxchg_32.h b/arch/x86/include/asm/cmpxchg_32.h
index 20955ea7bc12..f5bd1fd388ff 100644
--- a/arch/x86/include/asm/cmpxchg_32.h
+++ b/arch/x86/include/asm/cmpxchg_32.h
@@ -11,38 +11,42 @@
11extern void __xchg_wrong_size(void); 11extern void __xchg_wrong_size(void);
12 12
13/* 13/*
14 * Note: no "lock" prefix even on SMP: xchg always implies lock anyway 14 * Note: no "lock" prefix even on SMP: xchg always implies lock anyway.
15 * Note 2: xchg has side effect, so that attribute volatile is necessary, 15 * Since this is generally used to protect other memory information, we
16 * but generally the primitive is invalid, *ptr is output argument. --ANK 16 * use "asm volatile" and "memory" clobbers to prevent gcc from moving
17 * information around.
17 */ 18 */
18
19struct __xchg_dummy {
20 unsigned long a[100];
21};
22#define __xg(x) ((struct __xchg_dummy *)(x))
23
24#define __xchg(x, ptr, size) \ 19#define __xchg(x, ptr, size) \
25({ \ 20({ \
26 __typeof(*(ptr)) __x = (x); \ 21 __typeof(*(ptr)) __x = (x); \
27 switch (size) { \ 22 switch (size) { \
28 case 1: \ 23 case 1: \
29 asm volatile("xchgb %b0,%1" \ 24 { \
30 : "=q" (__x), "+m" (*__xg(ptr)) \ 25 volatile u8 *__ptr = (volatile u8 *)(ptr); \
26 asm volatile("xchgb %0,%1" \
27 : "=q" (__x), "+m" (*__ptr) \
31 : "0" (__x) \ 28 : "0" (__x) \
32 : "memory"); \ 29 : "memory"); \
33 break; \ 30 break; \
31 } \
34 case 2: \ 32 case 2: \
35 asm volatile("xchgw %w0,%1" \ 33 { \
36 : "=r" (__x), "+m" (*__xg(ptr)) \ 34 volatile u16 *__ptr = (volatile u16 *)(ptr); \
35 asm volatile("xchgw %0,%1" \
36 : "=r" (__x), "+m" (*__ptr) \
37 : "0" (__x) \ 37 : "0" (__x) \
38 : "memory"); \ 38 : "memory"); \
39 break; \ 39 break; \
40 } \
40 case 4: \ 41 case 4: \
42 { \
43 volatile u32 *__ptr = (volatile u32 *)(ptr); \
41 asm volatile("xchgl %0,%1" \ 44 asm volatile("xchgl %0,%1" \
42 : "=r" (__x), "+m" (*__xg(ptr)) \ 45 : "=r" (__x), "+m" (*__ptr) \
43 : "0" (__x) \ 46 : "0" (__x) \
44 : "memory"); \ 47 : "memory"); \
45 break; \ 48 break; \
49 } \
46 default: \ 50 default: \
47 __xchg_wrong_size(); \ 51 __xchg_wrong_size(); \
48 } \ 52 } \
@@ -94,23 +98,32 @@ extern void __cmpxchg_wrong_size(void);
94 __typeof__(*(ptr)) __new = (new); \ 98 __typeof__(*(ptr)) __new = (new); \
95 switch (size) { \ 99 switch (size) { \
96 case 1: \ 100 case 1: \
97 asm volatile(lock "cmpxchgb %b2,%1" \ 101 { \
98 : "=a" (__ret), "+m" (*__xg(ptr)) \ 102 volatile u8 *__ptr = (volatile u8 *)(ptr); \
103 asm volatile(lock "cmpxchgb %2,%1" \
104 : "=a" (__ret), "+m" (*__ptr) \
99 : "q" (__new), "0" (__old) \ 105 : "q" (__new), "0" (__old) \
100 : "memory"); \ 106 : "memory"); \
101 break; \ 107 break; \
108 } \
102 case 2: \ 109 case 2: \
103 asm volatile(lock "cmpxchgw %w2,%1" \ 110 { \
104 : "=a" (__ret), "+m" (*__xg(ptr)) \ 111 volatile u16 *__ptr = (volatile u16 *)(ptr); \
112 asm volatile(lock "cmpxchgw %2,%1" \
113 : "=a" (__ret), "+m" (*__ptr) \
105 : "r" (__new), "0" (__old) \ 114 : "r" (__new), "0" (__old) \
106 : "memory"); \ 115 : "memory"); \
107 break; \ 116 break; \
117 } \
108 case 4: \ 118 case 4: \
119 { \
120 volatile u32 *__ptr = (volatile u32 *)(ptr); \
109 asm volatile(lock "cmpxchgl %2,%1" \ 121 asm volatile(lock "cmpxchgl %2,%1" \
110 : "=a" (__ret), "+m" (*__xg(ptr)) \ 122 : "=a" (__ret), "+m" (*__ptr) \
111 : "r" (__new), "0" (__old) \ 123 : "r" (__new), "0" (__old) \
112 : "memory"); \ 124 : "memory"); \
113 break; \ 125 break; \
126 } \
114 default: \ 127 default: \
115 __cmpxchg_wrong_size(); \ 128 __cmpxchg_wrong_size(); \
116 } \ 129 } \
@@ -148,31 +161,27 @@ extern void __cmpxchg_wrong_size(void);
148 (unsigned long long)(n))) 161 (unsigned long long)(n)))
149#endif 162#endif
150 163
151static inline unsigned long long __cmpxchg64(volatile void *ptr, 164static inline u64 __cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
152 unsigned long long old,
153 unsigned long long new)
154{ 165{
155 unsigned long long prev; 166 u64 prev;
156 asm volatile(LOCK_PREFIX "cmpxchg8b %1" 167 asm volatile(LOCK_PREFIX "cmpxchg8b %1"
157 : "=A" (prev), 168 : "=A" (prev),
158 "+m" (*__xg(ptr)) 169 "+m" (*ptr)
159 : "b" ((unsigned long)new), 170 : "b" ((u32)new),
160 "c" ((unsigned long)(new >> 32)), 171 "c" ((u32)(new >> 32)),
161 "0" (old) 172 "0" (old)
162 : "memory"); 173 : "memory");
163 return prev; 174 return prev;
164} 175}
165 176
166static inline unsigned long long __cmpxchg64_local(volatile void *ptr, 177static inline u64 __cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
167 unsigned long long old,
168 unsigned long long new)
169{ 178{
170 unsigned long long prev; 179 u64 prev;
171 asm volatile("cmpxchg8b %1" 180 asm volatile("cmpxchg8b %1"
172 : "=A" (prev), 181 : "=A" (prev),
173 "+m" (*__xg(ptr)) 182 "+m" (*ptr)
174 : "b" ((unsigned long)new), 183 : "b" ((u32)new),
175 "c" ((unsigned long)(new >> 32)), 184 "c" ((u32)(new >> 32)),
176 "0" (old) 185 "0" (old)
177 : "memory"); 186 : "memory");
178 return prev; 187 return prev;