aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-x86/cmpxchg_32.h
diff options
context:
space:
mode:
authorJoe Perches <joe@perches.com>2008-03-23 04:01:51 -0400
committerIngo Molnar <mingo@elte.hu>2008-04-17 11:41:22 -0400
commit8121019cad7bfe61f8f626a85427aca66dfe0f1e (patch)
tree05975450b2fd32538a808d8f87faaebd102223c8 /include/asm-x86/cmpxchg_32.h
parent3d3c6e10036dcbbe9fe7d69911f5638faecfbaeb (diff)
include/asm-x86/cmpxchg_32.h: checkpatch cleanups - formatting only
Signed-off-by: Joe Perches <joe@perches.com> Signed-off-by: Ingo Molnar <mingo@elte.hu>
Diffstat (limited to 'include/asm-x86/cmpxchg_32.h')
-rw-r--r--include/asm-x86/cmpxchg_32.h253
1 files changed, 132 insertions, 121 deletions
diff --git a/include/asm-x86/cmpxchg_32.h b/include/asm-x86/cmpxchg_32.h
index 959fad00dff5..bf5a69d1329e 100644
--- a/include/asm-x86/cmpxchg_32.h
+++ b/include/asm-x86/cmpxchg_32.h
@@ -8,9 +8,12 @@
8 * you need to test for the feature in boot_cpu_data. 8 * you need to test for the feature in boot_cpu_data.
9 */ 9 */
10 10
11#define xchg(ptr,v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v),(ptr),sizeof(*(ptr)))) 11#define xchg(ptr, v) \
12 ((__typeof__(*(ptr)))__xchg((unsigned long)(v), (ptr), sizeof(*(ptr))))
12 13
13struct __xchg_dummy { unsigned long a[100]; }; 14struct __xchg_dummy {
15 unsigned long a[100];
16};
14#define __xg(x) ((struct __xchg_dummy *)(x)) 17#define __xg(x) ((struct __xchg_dummy *)(x))
15 18
16/* 19/*
@@ -27,72 +30,74 @@ struct __xchg_dummy { unsigned long a[100]; };
27 * of the instruction set reference 24319102.pdf. We need 30 * of the instruction set reference 24319102.pdf. We need
28 * the reader side to see the coherent 64bit value. 31 * the reader side to see the coherent 64bit value.
29 */ 32 */
30static inline void __set_64bit (unsigned long long * ptr, 33static inline void __set_64bit(unsigned long long *ptr,
31 unsigned int low, unsigned int high) 34 unsigned int low, unsigned int high)
32{ 35{
33 __asm__ __volatile__ ( 36 asm volatile("\n1:\t"
34 "\n1:\t" 37 "movl (%0), %%eax\n\t"
35 "movl (%0), %%eax\n\t" 38 "movl 4(%0), %%edx\n\t"
36 "movl 4(%0), %%edx\n\t" 39 LOCK_PREFIX "cmpxchg8b (%0)\n\t"
37 LOCK_PREFIX "cmpxchg8b (%0)\n\t" 40 "jnz 1b"
38 "jnz 1b" 41 : /* no outputs */
39 : /* no outputs */ 42 : "D"(ptr),
40 : "D"(ptr), 43 "b"(low),
41 "b"(low), 44 "c"(high)
42 "c"(high) 45 : "ax", "dx", "memory");
43 : "ax","dx","memory");
44} 46}
45 47
46static inline void __set_64bit_constant (unsigned long long *ptr, 48static inline void __set_64bit_constant(unsigned long long *ptr,
47 unsigned long long value) 49 unsigned long long value)
48{ 50{
49 __set_64bit(ptr,(unsigned int)(value), (unsigned int)((value)>>32ULL)); 51 __set_64bit(ptr, (unsigned int)value, (unsigned int)(value >> 32));
50} 52}
51#define ll_low(x) *(((unsigned int*)&(x))+0)
52#define ll_high(x) *(((unsigned int*)&(x))+1)
53 53
54static inline void __set_64bit_var (unsigned long long *ptr, 54#define ll_low(x) *(((unsigned int *)&(x)) + 0)
55 unsigned long long value) 55#define ll_high(x) *(((unsigned int *)&(x)) + 1)
56
57static inline void __set_64bit_var(unsigned long long *ptr,
58 unsigned long long value)
56{ 59{
57 __set_64bit(ptr,ll_low(value), ll_high(value)); 60 __set_64bit(ptr, ll_low(value), ll_high(value));
58} 61}
59 62
60#define set_64bit(ptr,value) \ 63#define set_64bit(ptr, value) \
61(__builtin_constant_p(value) ? \ 64 (__builtin_constant_p((value)) \
62 __set_64bit_constant(ptr, value) : \ 65 ? __set_64bit_constant((ptr), (value)) \
63 __set_64bit_var(ptr, value) ) 66 : __set_64bit_var((ptr), (value)))
64 67
65#define _set_64bit(ptr,value) \ 68#define _set_64bit(ptr, value) \
66(__builtin_constant_p(value) ? \ 69 (__builtin_constant_p(value) \
67 __set_64bit(ptr, (unsigned int)(value), (unsigned int)((value)>>32ULL) ) : \ 70 ? __set_64bit(ptr, (unsigned int)(value), \
68 __set_64bit(ptr, ll_low(value), ll_high(value)) ) 71 (unsigned int)((value) >> 32)) \
72 : __set_64bit(ptr, ll_low((value)), ll_high((value))))
69 73
70/* 74/*
71 * Note: no "lock" prefix even on SMP: xchg always implies lock anyway 75 * Note: no "lock" prefix even on SMP: xchg always implies lock anyway
72 * Note 2: xchg has side effect, so that attribute volatile is necessary, 76 * Note 2: xchg has side effect, so that attribute volatile is necessary,
73 * but generally the primitive is invalid, *ptr is output argument. --ANK 77 * but generally the primitive is invalid, *ptr is output argument. --ANK
74 */ 78 */
75static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size) 79static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
80 int size)
76{ 81{
77 switch (size) { 82 switch (size) {
78 case 1: 83 case 1:
79 __asm__ __volatile__("xchgb %b0,%1" 84 asm volatile("xchgb %b0,%1"
80 :"=q" (x) 85 : "=q" (x)
81 :"m" (*__xg(ptr)), "0" (x) 86 : "m" (*__xg(ptr)), "0" (x)
82 :"memory"); 87 : "memory");
83 break; 88 break;
84 case 2: 89 case 2:
85 __asm__ __volatile__("xchgw %w0,%1" 90 asm volatile("xchgw %w0,%1"
86 :"=r" (x) 91 : "=r" (x)
87 :"m" (*__xg(ptr)), "0" (x) 92 : "m" (*__xg(ptr)), "0" (x)
88 :"memory"); 93 : "memory");
89 break; 94 break;
90 case 4: 95 case 4:
91 __asm__ __volatile__("xchgl %0,%1" 96 asm volatile("xchgl %0,%1"
92 :"=r" (x) 97 : "=r" (x)
93 :"m" (*__xg(ptr)), "0" (x) 98 : "m" (*__xg(ptr)), "0" (x)
94 :"memory"); 99 : "memory");
95 break; 100 break;
96 } 101 }
97 return x; 102 return x;
98} 103}
@@ -105,24 +110,27 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
105 110
106#ifdef CONFIG_X86_CMPXCHG 111#ifdef CONFIG_X86_CMPXCHG
107#define __HAVE_ARCH_CMPXCHG 1 112#define __HAVE_ARCH_CMPXCHG 1
108#define cmpxchg(ptr, o, n) \ 113#define cmpxchg(ptr, o, n) \
109 ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \ 114 ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \
110 (unsigned long)(n), sizeof(*(ptr)))) 115 (unsigned long)(n), \
111#define sync_cmpxchg(ptr, o, n) \ 116 sizeof(*(ptr))))
112 ((__typeof__(*(ptr)))__sync_cmpxchg((ptr), (unsigned long)(o), \ 117#define sync_cmpxchg(ptr, o, n) \
113 (unsigned long)(n), sizeof(*(ptr)))) 118 ((__typeof__(*(ptr)))__sync_cmpxchg((ptr), (unsigned long)(o), \
114#define cmpxchg_local(ptr, o, n) \ 119 (unsigned long)(n), \
115 ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \ 120 sizeof(*(ptr))))
116 (unsigned long)(n), sizeof(*(ptr)))) 121#define cmpxchg_local(ptr, o, n) \
122 ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
123 (unsigned long)(n), \
124 sizeof(*(ptr))))
117#endif 125#endif
118 126
119#ifdef CONFIG_X86_CMPXCHG64 127#ifdef CONFIG_X86_CMPXCHG64
120#define cmpxchg64(ptr, o, n) \ 128#define cmpxchg64(ptr, o, n) \
121 ((__typeof__(*(ptr)))__cmpxchg64((ptr), (unsigned long long)(o), \ 129 ((__typeof__(*(ptr)))__cmpxchg64((ptr), (unsigned long long)(o), \
122 (unsigned long long)(n))) 130 (unsigned long long)(n)))
123#define cmpxchg64_local(ptr, o, n) \ 131#define cmpxchg64_local(ptr, o, n) \
124 ((__typeof__(*(ptr)))__cmpxchg64_local((ptr), (unsigned long long)(o),\ 132 ((__typeof__(*(ptr)))__cmpxchg64_local((ptr), (unsigned long long)(o), \
125 (unsigned long long)(n))) 133 (unsigned long long)(n)))
126#endif 134#endif
127 135
128static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old, 136static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
@@ -131,22 +139,22 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
131 unsigned long prev; 139 unsigned long prev;
132 switch (size) { 140 switch (size) {
133 case 1: 141 case 1:
134 __asm__ __volatile__(LOCK_PREFIX "cmpxchgb %b1,%2" 142 asm volatile(LOCK_PREFIX "cmpxchgb %b1,%2"
135 : "=a"(prev) 143 : "=a"(prev)
136 : "q"(new), "m"(*__xg(ptr)), "0"(old) 144 : "q"(new), "m"(*__xg(ptr)), "0"(old)
137 : "memory"); 145 : "memory");
138 return prev; 146 return prev;
139 case 2: 147 case 2:
140 __asm__ __volatile__(LOCK_PREFIX "cmpxchgw %w1,%2" 148 asm volatile(LOCK_PREFIX "cmpxchgw %w1,%2"
141 : "=a"(prev) 149 : "=a"(prev)
142 : "r"(new), "m"(*__xg(ptr)), "0"(old) 150 : "r"(new), "m"(*__xg(ptr)), "0"(old)
143 : "memory"); 151 : "memory");
144 return prev; 152 return prev;
145 case 4: 153 case 4:
146 __asm__ __volatile__(LOCK_PREFIX "cmpxchgl %1,%2" 154 asm volatile(LOCK_PREFIX "cmpxchgl %1,%2"
147 : "=a"(prev) 155 : "=a"(prev)
148 : "r"(new), "m"(*__xg(ptr)), "0"(old) 156 : "r"(new), "m"(*__xg(ptr)), "0"(old)
149 : "memory"); 157 : "memory");
150 return prev; 158 return prev;
151 } 159 }
152 return old; 160 return old;
@@ -158,85 +166,88 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
158 * isn't. 166 * isn't.
159 */ 167 */
160static inline unsigned long __sync_cmpxchg(volatile void *ptr, 168static inline unsigned long __sync_cmpxchg(volatile void *ptr,
161 unsigned long old, 169 unsigned long old,
162 unsigned long new, int size) 170 unsigned long new, int size)
163{ 171{
164 unsigned long prev; 172 unsigned long prev;
165 switch (size) { 173 switch (size) {
166 case 1: 174 case 1:
167 __asm__ __volatile__("lock; cmpxchgb %b1,%2" 175 asm volatile("lock; cmpxchgb %b1,%2"
168 : "=a"(prev) 176 : "=a"(prev)
169 : "q"(new), "m"(*__xg(ptr)), "0"(old) 177 : "q"(new), "m"(*__xg(ptr)), "0"(old)
170 : "memory"); 178 : "memory");
171 return prev; 179 return prev;
172 case 2: 180 case 2:
173 __asm__ __volatile__("lock; cmpxchgw %w1,%2" 181 asm volatile("lock; cmpxchgw %w1,%2"
174 : "=a"(prev) 182 : "=a"(prev)
175 : "r"(new), "m"(*__xg(ptr)), "0"(old) 183 : "r"(new), "m"(*__xg(ptr)), "0"(old)
176 : "memory"); 184 : "memory");
177 return prev; 185 return prev;
178 case 4: 186 case 4:
179 __asm__ __volatile__("lock; cmpxchgl %1,%2" 187 asm volatile("lock; cmpxchgl %1,%2"
180 : "=a"(prev) 188 : "=a"(prev)
181 : "r"(new), "m"(*__xg(ptr)), "0"(old) 189 : "r"(new), "m"(*__xg(ptr)), "0"(old)
182 : "memory"); 190 : "memory");
183 return prev; 191 return prev;
184 } 192 }
185 return old; 193 return old;
186} 194}
187 195
188static inline unsigned long __cmpxchg_local(volatile void *ptr, 196static inline unsigned long __cmpxchg_local(volatile void *ptr,
189 unsigned long old, unsigned long new, int size) 197 unsigned long old,
198 unsigned long new, int size)
190{ 199{
191 unsigned long prev; 200 unsigned long prev;
192 switch (size) { 201 switch (size) {
193 case 1: 202 case 1:
194 __asm__ __volatile__("cmpxchgb %b1,%2" 203 asm volatile("cmpxchgb %b1,%2"
195 : "=a"(prev) 204 : "=a"(prev)
196 : "q"(new), "m"(*__xg(ptr)), "0"(old) 205 : "q"(new), "m"(*__xg(ptr)), "0"(old)
197 : "memory"); 206 : "memory");
198 return prev; 207 return prev;
199 case 2: 208 case 2:
200 __asm__ __volatile__("cmpxchgw %w1,%2" 209 asm volatile("cmpxchgw %w1,%2"
201 : "=a"(prev) 210 : "=a"(prev)
202 : "r"(new), "m"(*__xg(ptr)), "0"(old) 211 : "r"(new), "m"(*__xg(ptr)), "0"(old)
203 : "memory"); 212 : "memory");
204 return prev; 213 return prev;
205 case 4: 214 case 4:
206 __asm__ __volatile__("cmpxchgl %1,%2" 215 asm volatile("cmpxchgl %1,%2"
207 : "=a"(prev) 216 : "=a"(prev)
208 : "r"(new), "m"(*__xg(ptr)), "0"(old) 217 : "r"(new), "m"(*__xg(ptr)), "0"(old)
209 : "memory"); 218 : "memory");
210 return prev; 219 return prev;
211 } 220 }
212 return old; 221 return old;
213} 222}
214 223
215static inline unsigned long long __cmpxchg64(volatile void *ptr, 224static inline unsigned long long __cmpxchg64(volatile void *ptr,
216 unsigned long long old, unsigned long long new) 225 unsigned long long old,
226 unsigned long long new)
217{ 227{
218 unsigned long long prev; 228 unsigned long long prev;
219 __asm__ __volatile__(LOCK_PREFIX "cmpxchg8b %3" 229 asm volatile(LOCK_PREFIX "cmpxchg8b %3"
220 : "=A"(prev) 230 : "=A"(prev)
221 : "b"((unsigned long)new), 231 : "b"((unsigned long)new),
222 "c"((unsigned long)(new >> 32)), 232 "c"((unsigned long)(new >> 32)),
223 "m"(*__xg(ptr)), 233 "m"(*__xg(ptr)),
224 "0"(old) 234 "0"(old)
225 : "memory"); 235 : "memory");
226 return prev; 236 return prev;
227} 237}
228 238
229static inline unsigned long long __cmpxchg64_local(volatile void *ptr, 239static inline unsigned long long __cmpxchg64_local(volatile void *ptr,
230 unsigned long long old, unsigned long long new) 240 unsigned long long old,
241 unsigned long long new)
231{ 242{
232 unsigned long long prev; 243 unsigned long long prev;
233 __asm__ __volatile__("cmpxchg8b %3" 244 asm volatile("cmpxchg8b %3"
234 : "=A"(prev) 245 : "=A"(prev)
235 : "b"((unsigned long)new), 246 : "b"((unsigned long)new),
236 "c"((unsigned long)(new >> 32)), 247 "c"((unsigned long)(new >> 32)),
237 "m"(*__xg(ptr)), 248 "m"(*__xg(ptr)),
238 "0"(old) 249 "0"(old)
239 : "memory"); 250 : "memory");
240 return prev; 251 return prev;
241} 252}
242 253
@@ -252,7 +263,7 @@ extern unsigned long cmpxchg_386_u16(volatile void *, u16, u16);
252extern unsigned long cmpxchg_386_u32(volatile void *, u32, u32); 263extern unsigned long cmpxchg_386_u32(volatile void *, u32, u32);
253 264
254static inline unsigned long cmpxchg_386(volatile void *ptr, unsigned long old, 265static inline unsigned long cmpxchg_386(volatile void *ptr, unsigned long old,
255 unsigned long new, int size) 266 unsigned long new, int size)
256{ 267{
257 switch (size) { 268 switch (size) {
258 case 1: 269 case 1: