aboutsummaryrefslogtreecommitdiffstats
path: root/arch
diff options
context:
space:
mode:
authorH. Peter Anvin <hpa@zytor.com>2010-07-27 20:01:49 -0400
committerH. Peter Anvin <hpa@zytor.com>2010-07-27 20:14:02 -0400
commit113fc5a6e8c2288619ff7e8187a6f556b7e0d372 (patch)
tree84393467b2174c05e7240db72ffa55ce3da67a70 /arch
parentfc0f5ac8fe693d1b05f5a928cc48135d1c8b7f2e (diff)
x86: Add memory modify constraints to xchg() and cmpxchg()
xchg() and cmpxchg() modify their memory operands, not merely read them. For some versions of gcc the "memory" clobber has apparently dealt with the situation, but not for all. Originally-by: Linus Torvalds <torvalds@linux-foundation.org> Signed-off-by: H. Peter Anvin <hpa@zytor.com> Cc: Glauber Costa <glommer@redhat.com> Cc: Avi Kivity <avi@redhat.com> Cc: Peter Palfrader <peter@palfrader.org> Cc: Greg KH <gregkh@suse.de> Cc: Alan Cox <alan@lxorguk.ukuu.org.uk> Cc: Zachary Amsden <zamsden@redhat.com> Cc: Marcelo Tosatti <mtosatti@redhat.com> Cc: <stable@kernel.org> LKML-Reference: <4C4F7277.8050306@zytor.com>
Diffstat (limited to 'arch')
-rw-r--r--arch/x86/include/asm/cmpxchg_32.h68
-rw-r--r--arch/x86/include/asm/cmpxchg_64.h40
2 files changed, 54 insertions, 54 deletions
diff --git a/arch/x86/include/asm/cmpxchg_32.h b/arch/x86/include/asm/cmpxchg_32.h
index 8859e12dd3cf..c1cf59d72f09 100644
--- a/arch/x86/include/asm/cmpxchg_32.h
+++ b/arch/x86/include/asm/cmpxchg_32.h
@@ -27,20 +27,20 @@ struct __xchg_dummy {
27 switch (size) { \ 27 switch (size) { \
28 case 1: \ 28 case 1: \
29 asm volatile("xchgb %b0,%1" \ 29 asm volatile("xchgb %b0,%1" \
30 : "=q" (__x) \ 30 : "=q" (__x), "+m" (*__xg(ptr)) \
31 : "m" (*__xg(ptr)), "0" (__x) \ 31 : "0" (__x) \
32 : "memory"); \ 32 : "memory"); \
33 break; \ 33 break; \
34 case 2: \ 34 case 2: \
35 asm volatile("xchgw %w0,%1" \ 35 asm volatile("xchgw %w0,%1" \
36 : "=r" (__x) \ 36 : "=r" (__x), "+m" (*__xg(ptr)) \
37 : "m" (*__xg(ptr)), "0" (__x) \ 37 : "0" (__x) \
38 : "memory"); \ 38 : "memory"); \
39 break; \ 39 break; \
40 case 4: \ 40 case 4: \
41 asm volatile("xchgl %0,%1" \ 41 asm volatile("xchgl %0,%1" \
42 : "=r" (__x) \ 42 : "=r" (__x), "+m" (*__xg(ptr)) \
43 : "m" (*__xg(ptr)), "0" (__x) \ 43 : "0" (__x) \
44 : "memory"); \ 44 : "memory"); \
45 break; \ 45 break; \
46 default: \ 46 default: \
@@ -70,14 +70,14 @@ static inline void __set_64bit(unsigned long long *ptr,
70 unsigned int low, unsigned int high) 70 unsigned int low, unsigned int high)
71{ 71{
72 asm volatile("\n1:\t" 72 asm volatile("\n1:\t"
73 "movl (%0), %%eax\n\t" 73 "movl (%1), %%eax\n\t"
74 "movl 4(%0), %%edx\n\t" 74 "movl 4(%1), %%edx\n\t"
75 LOCK_PREFIX "cmpxchg8b (%0)\n\t" 75 LOCK_PREFIX "cmpxchg8b (%1)\n\t"
76 "jnz 1b" 76 "jnz 1b"
77 : /* no outputs */ 77 : "=m" (*ptr)
78 : "D"(ptr), 78 : "D" (ptr),
79 "b"(low), 79 "b" (low),
80 "c"(high) 80 "c" (high)
81 : "ax", "dx", "memory"); 81 : "ax", "dx", "memory");
82} 82}
83 83
@@ -121,21 +121,21 @@ extern void __cmpxchg_wrong_size(void);
121 __typeof__(*(ptr)) __new = (new); \ 121 __typeof__(*(ptr)) __new = (new); \
122 switch (size) { \ 122 switch (size) { \
123 case 1: \ 123 case 1: \
124 asm volatile(lock "cmpxchgb %b1,%2" \ 124 asm volatile(lock "cmpxchgb %b2,%1" \
125 : "=a"(__ret) \ 125 : "=a" (__ret), "+m" (*__xg(ptr)) \
126 : "q"(__new), "m"(*__xg(ptr)), "0"(__old) \ 126 : "q" (__new), "0" (__old) \
127 : "memory"); \ 127 : "memory"); \
128 break; \ 128 break; \
129 case 2: \ 129 case 2: \
130 asm volatile(lock "cmpxchgw %w1,%2" \ 130 asm volatile(lock "cmpxchgw %w2,%1" \
131 : "=a"(__ret) \ 131 : "=a" (__ret), "+m" (*__xg(ptr)) \
132 : "r"(__new), "m"(*__xg(ptr)), "0"(__old) \ 132 : "r" (__new), "0" (__old) \
133 : "memory"); \ 133 : "memory"); \
134 break; \ 134 break; \
135 case 4: \ 135 case 4: \
136 asm volatile(lock "cmpxchgl %1,%2" \ 136 asm volatile(lock "cmpxchgl %2,%1" \
137 : "=a"(__ret) \ 137 : "=a" (__ret), "+m" (*__xg(ptr)) \
138 : "r"(__new), "m"(*__xg(ptr)), "0"(__old) \ 138 : "r" (__new), "0" (__old) \
139 : "memory"); \ 139 : "memory"); \
140 break; \ 140 break; \
141 default: \ 141 default: \
@@ -180,12 +180,12 @@ static inline unsigned long long __cmpxchg64(volatile void *ptr,
180 unsigned long long new) 180 unsigned long long new)
181{ 181{
182 unsigned long long prev; 182 unsigned long long prev;
183 asm volatile(LOCK_PREFIX "cmpxchg8b %3" 183 asm volatile(LOCK_PREFIX "cmpxchg8b %1"
184 : "=A"(prev) 184 : "=A" (prev),
185 : "b"((unsigned long)new), 185 "+m" (*__xg(ptr))
186 "c"((unsigned long)(new >> 32)), 186 : "b" ((unsigned long)new),
187 "m"(*__xg(ptr)), 187 "c" ((unsigned long)(new >> 32)),
188 "0"(old) 188 "0" (old)
189 : "memory"); 189 : "memory");
190 return prev; 190 return prev;
191} 191}
@@ -195,12 +195,12 @@ static inline unsigned long long __cmpxchg64_local(volatile void *ptr,
195 unsigned long long new) 195 unsigned long long new)
196{ 196{
197 unsigned long long prev; 197 unsigned long long prev;
198 asm volatile("cmpxchg8b %3" 198 asm volatile("cmpxchg8b %1"
199 : "=A"(prev) 199 : "=A" (prev),
200 : "b"((unsigned long)new), 200 "+m" (*__xg(ptr))
201 "c"((unsigned long)(new >> 32)), 201 : "b" ((unsigned long)new),
202 "m"(*__xg(ptr)), 202 "c" ((unsigned long)(new >> 32)),
203 "0"(old) 203 "0" (old)
204 : "memory"); 204 : "memory");
205 return prev; 205 return prev;
206} 206}
diff --git a/arch/x86/include/asm/cmpxchg_64.h b/arch/x86/include/asm/cmpxchg_64.h
index 485ae415faec..b92f147339f3 100644
--- a/arch/x86/include/asm/cmpxchg_64.h
+++ b/arch/x86/include/asm/cmpxchg_64.h
@@ -26,26 +26,26 @@ extern void __cmpxchg_wrong_size(void);
26 switch (size) { \ 26 switch (size) { \
27 case 1: \ 27 case 1: \
28 asm volatile("xchgb %b0,%1" \ 28 asm volatile("xchgb %b0,%1" \
29 : "=q" (__x) \ 29 : "=q" (__x), "+m" (*__xg(ptr)) \
30 : "m" (*__xg(ptr)), "0" (__x) \ 30 : "0" (__x) \
31 : "memory"); \ 31 : "memory"); \
32 break; \ 32 break; \
33 case 2: \ 33 case 2: \
34 asm volatile("xchgw %w0,%1" \ 34 asm volatile("xchgw %w0,%1" \
35 : "=r" (__x) \ 35 : "=r" (__x), "+m" (*__xg(ptr)) \
36 : "m" (*__xg(ptr)), "0" (__x) \ 36 : "0" (__x) \
37 : "memory"); \ 37 : "memory"); \
38 break; \ 38 break; \
39 case 4: \ 39 case 4: \
40 asm volatile("xchgl %k0,%1" \ 40 asm volatile("xchgl %k0,%1" \
41 : "=r" (__x) \ 41 : "=r" (__x), "+m" (*__xg(ptr)) \
42 : "m" (*__xg(ptr)), "0" (__x) \ 42 : "0" (__x) \
43 : "memory"); \ 43 : "memory"); \
44 break; \ 44 break; \
45 case 8: \ 45 case 8: \
46 asm volatile("xchgq %0,%1" \ 46 asm volatile("xchgq %0,%1" \
47 : "=r" (__x) \ 47 : "=r" (__x), "+m" (*__xg(ptr)) \
48 : "m" (*__xg(ptr)), "0" (__x) \ 48 : "0" (__x) \
49 : "memory"); \ 49 : "memory"); \
50 break; \ 50 break; \
51 default: \ 51 default: \
@@ -71,27 +71,27 @@ extern void __cmpxchg_wrong_size(void);
71 __typeof__(*(ptr)) __new = (new); \ 71 __typeof__(*(ptr)) __new = (new); \
72 switch (size) { \ 72 switch (size) { \
73 case 1: \ 73 case 1: \
74 asm volatile(lock "cmpxchgb %b1,%2" \ 74 asm volatile(lock "cmpxchgb %b2,%1" \
75 : "=a"(__ret) \ 75 : "=a" (__ret), "+m" (*__xg(ptr)) \
76 : "q"(__new), "m"(*__xg(ptr)), "0"(__old) \ 76 : "q" (__new), "0" (__old) \
77 : "memory"); \ 77 : "memory"); \
78 break; \ 78 break; \
79 case 2: \ 79 case 2: \
80 asm volatile(lock "cmpxchgw %w1,%2" \ 80 asm volatile(lock "cmpxchgw %w2,%1" \
81 : "=a"(__ret) \ 81 : "=a" (__ret), "+m" (*__xg(ptr)) \
82 : "r"(__new), "m"(*__xg(ptr)), "0"(__old) \ 82 : "r" (__new), "0" (__old) \
83 : "memory"); \ 83 : "memory"); \
84 break; \ 84 break; \
85 case 4: \ 85 case 4: \
86 asm volatile(lock "cmpxchgl %k1,%2" \ 86 asm volatile(lock "cmpxchgl %k2,%1" \
87 : "=a"(__ret) \ 87 : "=a" (__ret), "+m" (*__xg(ptr)) \
88 : "r"(__new), "m"(*__xg(ptr)), "0"(__old) \ 88 : "r" (__new), "0" (__old) \
89 : "memory"); \ 89 : "memory"); \
90 break; \ 90 break; \
91 case 8: \ 91 case 8: \
92 asm volatile(lock "cmpxchgq %1,%2" \ 92 asm volatile(lock "cmpxchgq %2,%1" \
93 : "=a"(__ret) \ 93 : "=a" (__ret), "+m" (*__xg(ptr)) \
94 : "r"(__new), "m"(*__xg(ptr)), "0"(__old) \ 94 : "r" (__new), "0" (__old) \
95 : "memory"); \ 95 : "memory"); \
96 break; \ 96 break; \
97 default: \ 97 default: \