diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2010-05-18 11:40:05 -0400 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2010-05-18 11:40:05 -0400 |
commit | 93c9d7f60c0cb7715890b1f9e159da6f4d1f5a65 (patch) | |
tree | 6be428ca5fe52f14ebb78a8e695cec59d2f21c26 /arch/x86/include/asm | |
parent | 7421a10de7a525f67cc082fca7a91011d00eada4 (diff) | |
parent | d9c5841e22231e4e49fd0a1004164e6fce59b7a6 (diff) |
Merge branch 'x86-atomic-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/linux-2.6-tip
* 'x86-atomic-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/linux-2.6-tip:
x86: Fix LOCK_PREFIX_HERE for uniprocessor build
x86, atomic64: In selftest, distinguish x86-64 from 586+
x86-32: Fix atomic64_inc_not_zero return value convention
lib: Fix atomic64_inc_not_zero test
lib: Fix atomic64_add_unless return value convention
x86-32: Fix atomic64_add_unless return value convention
lib: Fix atomic64_add_unless test
x86: Implement atomic[64]_dec_if_positive()
lib: Only test atomic64_dec_if_positive on archs having it
x86-32: Rewrite 32-bit atomic64 functions in assembly
lib: Add self-test for atomic64_t
x86-32: Allow UP/SMP lock replacement in cmpxchg64
x86: Add support for lock prefix in alternatives
Diffstat (limited to 'arch/x86/include/asm')
-rw-r--r-- | arch/x86/include/asm/alternative.h | 9 | ||||
-rw-r--r-- | arch/x86/include/asm/atomic.h | 23 | ||||
-rw-r--r-- | arch/x86/include/asm/atomic64_32.h | 278 | ||||
-rw-r--r-- | arch/x86/include/asm/atomic64_64.h | 23 | ||||
-rw-r--r-- | arch/x86/include/asm/cmpxchg_32.h | 3 |
5 files changed, 265 insertions, 71 deletions
diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h index 714bf2417284..92a9033c14d1 100644 --- a/arch/x86/include/asm/alternative.h +++ b/arch/x86/include/asm/alternative.h | |||
@@ -28,14 +28,17 @@ | |||
28 | */ | 28 | */ |
29 | 29 | ||
30 | #ifdef CONFIG_SMP | 30 | #ifdef CONFIG_SMP |
31 | #define LOCK_PREFIX \ | 31 | #define LOCK_PREFIX_HERE \ |
32 | ".section .smp_locks,\"a\"\n" \ | 32 | ".section .smp_locks,\"a\"\n" \ |
33 | ".balign 4\n" \ | 33 | ".balign 4\n" \ |
34 | ".long 661f - .\n" /* offset */ \ | 34 | ".long 671f - .\n" /* offset */ \ |
35 | ".previous\n" \ | 35 | ".previous\n" \ |
36 | "661:\n\tlock; " | 36 | "671:" |
37 | |||
38 | #define LOCK_PREFIX LOCK_PREFIX_HERE "\n\tlock; " | ||
37 | 39 | ||
38 | #else /* ! CONFIG_SMP */ | 40 | #else /* ! CONFIG_SMP */ |
41 | #define LOCK_PREFIX_HERE "" | ||
39 | #define LOCK_PREFIX "" | 42 | #define LOCK_PREFIX "" |
40 | #endif | 43 | #endif |
41 | 44 | ||
diff --git a/arch/x86/include/asm/atomic.h b/arch/x86/include/asm/atomic.h index 37b39d27abe0..952a826ac4e5 100644 --- a/arch/x86/include/asm/atomic.h +++ b/arch/x86/include/asm/atomic.h | |||
@@ -246,6 +246,29 @@ static inline int atomic_add_unless(atomic_t *v, int a, int u) | |||
246 | 246 | ||
247 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | 247 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) |
248 | 248 | ||
249 | /* | ||
250 | * atomic_dec_if_positive - decrement by 1 if old value positive | ||
251 | * @v: pointer of type atomic_t | ||
252 | * | ||
253 | * The function returns the old value of *v minus 1, even if | ||
254 | * the atomic variable, v, was not decremented. | ||
255 | */ | ||
256 | static inline int atomic_dec_if_positive(atomic_t *v) | ||
257 | { | ||
258 | int c, old, dec; | ||
259 | c = atomic_read(v); | ||
260 | for (;;) { | ||
261 | dec = c - 1; | ||
262 | if (unlikely(dec < 0)) | ||
263 | break; | ||
264 | old = atomic_cmpxchg((v), c, dec); | ||
265 | if (likely(old == c)) | ||
266 | break; | ||
267 | c = old; | ||
268 | } | ||
269 | return dec; | ||
270 | } | ||
271 | |||
249 | /** | 272 | /** |
250 | * atomic_inc_short - increment of a short integer | 273 | * atomic_inc_short - increment of a short integer |
251 | * @v: pointer to type int | 274 | * @v: pointer to type int |
diff --git a/arch/x86/include/asm/atomic64_32.h b/arch/x86/include/asm/atomic64_32.h index 03027bf28de5..2a934aa19a43 100644 --- a/arch/x86/include/asm/atomic64_32.h +++ b/arch/x86/include/asm/atomic64_32.h | |||
@@ -14,109 +14,193 @@ typedef struct { | |||
14 | 14 | ||
15 | #define ATOMIC64_INIT(val) { (val) } | 15 | #define ATOMIC64_INIT(val) { (val) } |
16 | 16 | ||
17 | extern u64 atomic64_cmpxchg(atomic64_t *ptr, u64 old_val, u64 new_val); | 17 | #ifdef CONFIG_X86_CMPXCHG64 |
18 | #define ATOMIC64_ALTERNATIVE_(f, g) "call atomic64_" #g "_cx8" | ||
19 | #else | ||
20 | #define ATOMIC64_ALTERNATIVE_(f, g) ALTERNATIVE("call atomic64_" #f "_386", "call atomic64_" #g "_cx8", X86_FEATURE_CX8) | ||
21 | #endif | ||
22 | |||
23 | #define ATOMIC64_ALTERNATIVE(f) ATOMIC64_ALTERNATIVE_(f, f) | ||
24 | |||
25 | /** | ||
26 | * atomic64_cmpxchg - cmpxchg atomic64 variable | ||
27 | * @p: pointer to type atomic64_t | ||
28 | * @o: expected value | ||
29 | * @n: new value | ||
30 | * | ||
31 | * Atomically sets @v to @n if it was equal to @o and returns | ||
32 | * the old value. | ||
33 | */ | ||
34 | |||
35 | static inline long long atomic64_cmpxchg(atomic64_t *v, long long o, long long n) | ||
36 | { | ||
37 | return cmpxchg64(&v->counter, o, n); | ||
38 | } | ||
18 | 39 | ||
19 | /** | 40 | /** |
20 | * atomic64_xchg - xchg atomic64 variable | 41 | * atomic64_xchg - xchg atomic64 variable |
21 | * @ptr: pointer to type atomic64_t | 42 | * @v: pointer to type atomic64_t |
22 | * @new_val: value to assign | 43 | * @n: value to assign |
23 | * | 44 | * |
24 | * Atomically xchgs the value of @ptr to @new_val and returns | 45 | * Atomically xchgs the value of @v to @n and returns |
25 | * the old value. | 46 | * the old value. |
26 | */ | 47 | */ |
27 | extern u64 atomic64_xchg(atomic64_t *ptr, u64 new_val); | 48 | static inline long long atomic64_xchg(atomic64_t *v, long long n) |
49 | { | ||
50 | long long o; | ||
51 | unsigned high = (unsigned)(n >> 32); | ||
52 | unsigned low = (unsigned)n; | ||
53 | asm volatile(ATOMIC64_ALTERNATIVE(xchg) | ||
54 | : "=A" (o), "+b" (low), "+c" (high) | ||
55 | : "S" (v) | ||
56 | : "memory" | ||
57 | ); | ||
58 | return o; | ||
59 | } | ||
28 | 60 | ||
29 | /** | 61 | /** |
30 | * atomic64_set - set atomic64 variable | 62 | * atomic64_set - set atomic64 variable |
31 | * @ptr: pointer to type atomic64_t | 63 | * @v: pointer to type atomic64_t |
32 | * @new_val: value to assign | 64 | * @n: value to assign |
33 | * | 65 | * |
34 | * Atomically sets the value of @ptr to @new_val. | 66 | * Atomically sets the value of @v to @n. |
35 | */ | 67 | */ |
36 | extern void atomic64_set(atomic64_t *ptr, u64 new_val); | 68 | static inline void atomic64_set(atomic64_t *v, long long i) |
69 | { | ||
70 | unsigned high = (unsigned)(i >> 32); | ||
71 | unsigned low = (unsigned)i; | ||
72 | asm volatile(ATOMIC64_ALTERNATIVE(set) | ||
73 | : "+b" (low), "+c" (high) | ||
74 | : "S" (v) | ||
75 | : "eax", "edx", "memory" | ||
76 | ); | ||
77 | } | ||
37 | 78 | ||
38 | /** | 79 | /** |
39 | * atomic64_read - read atomic64 variable | 80 | * atomic64_read - read atomic64 variable |
40 | * @ptr: pointer to type atomic64_t | 81 | * @v: pointer to type atomic64_t |
41 | * | 82 | * |
42 | * Atomically reads the value of @ptr and returns it. | 83 | * Atomically reads the value of @v and returns it. |
43 | */ | 84 | */ |
44 | static inline u64 atomic64_read(atomic64_t *ptr) | 85 | static inline long long atomic64_read(atomic64_t *v) |
45 | { | 86 | { |
46 | u64 res; | 87 | long long r; |
47 | 88 | asm volatile(ATOMIC64_ALTERNATIVE(read) | |
48 | /* | 89 | : "=A" (r), "+c" (v) |
49 | * Note, we inline this atomic64_t primitive because | 90 | : : "memory" |
50 | * it only clobbers EAX/EDX and leaves the others | 91 | ); |
51 | * untouched. We also (somewhat subtly) rely on the | 92 | return r; |
52 | * fact that cmpxchg8b returns the current 64-bit value | 93 | } |
53 | * of the memory location we are touching: | ||
54 | */ | ||
55 | asm volatile( | ||
56 | "mov %%ebx, %%eax\n\t" | ||
57 | "mov %%ecx, %%edx\n\t" | ||
58 | LOCK_PREFIX "cmpxchg8b %1\n" | ||
59 | : "=&A" (res) | ||
60 | : "m" (*ptr) | ||
61 | ); | ||
62 | |||
63 | return res; | ||
64 | } | ||
65 | |||
66 | extern u64 atomic64_read(atomic64_t *ptr); | ||
67 | 94 | ||
68 | /** | 95 | /** |
69 | * atomic64_add_return - add and return | 96 | * atomic64_add_return - add and return |
70 | * @delta: integer value to add | 97 | * @i: integer value to add |
71 | * @ptr: pointer to type atomic64_t | 98 | * @v: pointer to type atomic64_t |
72 | * | 99 | * |
73 | * Atomically adds @delta to @ptr and returns @delta + *@ptr | 100 | * Atomically adds @i to @v and returns @i + *@v |
74 | */ | 101 | */ |
75 | extern u64 atomic64_add_return(u64 delta, atomic64_t *ptr); | 102 | static inline long long atomic64_add_return(long long i, atomic64_t *v) |
103 | { | ||
104 | asm volatile(ATOMIC64_ALTERNATIVE(add_return) | ||
105 | : "+A" (i), "+c" (v) | ||
106 | : : "memory" | ||
107 | ); | ||
108 | return i; | ||
109 | } | ||
76 | 110 | ||
77 | /* | 111 | /* |
78 | * Other variants with different arithmetic operators: | 112 | * Other variants with different arithmetic operators: |
79 | */ | 113 | */ |
80 | extern u64 atomic64_sub_return(u64 delta, atomic64_t *ptr); | 114 | static inline long long atomic64_sub_return(long long i, atomic64_t *v) |
81 | extern u64 atomic64_inc_return(atomic64_t *ptr); | 115 | { |
82 | extern u64 atomic64_dec_return(atomic64_t *ptr); | 116 | asm volatile(ATOMIC64_ALTERNATIVE(sub_return) |
117 | : "+A" (i), "+c" (v) | ||
118 | : : "memory" | ||
119 | ); | ||
120 | return i; | ||
121 | } | ||
122 | |||
123 | static inline long long atomic64_inc_return(atomic64_t *v) | ||
124 | { | ||
125 | long long a; | ||
126 | asm volatile(ATOMIC64_ALTERNATIVE(inc_return) | ||
127 | : "=A" (a) | ||
128 | : "S" (v) | ||
129 | : "memory", "ecx" | ||
130 | ); | ||
131 | return a; | ||
132 | } | ||
133 | |||
134 | static inline long long atomic64_dec_return(atomic64_t *v) | ||
135 | { | ||
136 | long long a; | ||
137 | asm volatile(ATOMIC64_ALTERNATIVE(dec_return) | ||
138 | : "=A" (a) | ||
139 | : "S" (v) | ||
140 | : "memory", "ecx" | ||
141 | ); | ||
142 | return a; | ||
143 | } | ||
83 | 144 | ||
84 | /** | 145 | /** |
85 | * atomic64_add - add integer to atomic64 variable | 146 | * atomic64_add - add integer to atomic64 variable |
86 | * @delta: integer value to add | 147 | * @i: integer value to add |
87 | * @ptr: pointer to type atomic64_t | 148 | * @v: pointer to type atomic64_t |
88 | * | 149 | * |
89 | * Atomically adds @delta to @ptr. | 150 | * Atomically adds @i to @v. |
90 | */ | 151 | */ |
91 | extern void atomic64_add(u64 delta, atomic64_t *ptr); | 152 | static inline long long atomic64_add(long long i, atomic64_t *v) |
153 | { | ||
154 | asm volatile(ATOMIC64_ALTERNATIVE_(add, add_return) | ||
155 | : "+A" (i), "+c" (v) | ||
156 | : : "memory" | ||
157 | ); | ||
158 | return i; | ||
159 | } | ||
92 | 160 | ||
93 | /** | 161 | /** |
94 | * atomic64_sub - subtract the atomic64 variable | 162 | * atomic64_sub - subtract the atomic64 variable |
95 | * @delta: integer value to subtract | 163 | * @i: integer value to subtract |
96 | * @ptr: pointer to type atomic64_t | 164 | * @v: pointer to type atomic64_t |
97 | * | 165 | * |
98 | * Atomically subtracts @delta from @ptr. | 166 | * Atomically subtracts @i from @v. |
99 | */ | 167 | */ |
100 | extern void atomic64_sub(u64 delta, atomic64_t *ptr); | 168 | static inline long long atomic64_sub(long long i, atomic64_t *v) |
169 | { | ||
170 | asm volatile(ATOMIC64_ALTERNATIVE_(sub, sub_return) | ||
171 | : "+A" (i), "+c" (v) | ||
172 | : : "memory" | ||
173 | ); | ||
174 | return i; | ||
175 | } | ||
101 | 176 | ||
102 | /** | 177 | /** |
103 | * atomic64_sub_and_test - subtract value from variable and test result | 178 | * atomic64_sub_and_test - subtract value from variable and test result |
104 | * @delta: integer value to subtract | 179 | * @i: integer value to subtract |
105 | * @ptr: pointer to type atomic64_t | 180 | * @v: pointer to type atomic64_t |
106 | * | 181 | * |
107 | * Atomically subtracts @delta from @ptr and returns | 182 | * Atomically subtracts @i from @v and returns |
108 | * true if the result is zero, or false for all | 183 | * true if the result is zero, or false for all |
109 | * other cases. | 184 | * other cases. |
110 | */ | 185 | */ |
111 | extern int atomic64_sub_and_test(u64 delta, atomic64_t *ptr); | 186 | static inline int atomic64_sub_and_test(long long i, atomic64_t *v) |
187 | { | ||
188 | return atomic64_sub_return(i, v) == 0; | ||
189 | } | ||
112 | 190 | ||
113 | /** | 191 | /** |
114 | * atomic64_inc - increment atomic64 variable | 192 | * atomic64_inc - increment atomic64 variable |
115 | * @ptr: pointer to type atomic64_t | 193 | * @v: pointer to type atomic64_t |
116 | * | 194 | * |
117 | * Atomically increments @ptr by 1. | 195 | * Atomically increments @v by 1. |
118 | */ | 196 | */ |
119 | extern void atomic64_inc(atomic64_t *ptr); | 197 | static inline void atomic64_inc(atomic64_t *v) |
198 | { | ||
199 | asm volatile(ATOMIC64_ALTERNATIVE_(inc, inc_return) | ||
200 | : : "S" (v) | ||
201 | : "memory", "eax", "ecx", "edx" | ||
202 | ); | ||
203 | } | ||
120 | 204 | ||
121 | /** | 205 | /** |
122 | * atomic64_dec - decrement atomic64 variable | 206 | * atomic64_dec - decrement atomic64 variable |
@@ -124,37 +208,97 @@ extern void atomic64_inc(atomic64_t *ptr); | |||
124 | * | 208 | * |
125 | * Atomically decrements @ptr by 1. | 209 | * Atomically decrements @ptr by 1. |
126 | */ | 210 | */ |
127 | extern void atomic64_dec(atomic64_t *ptr); | 211 | static inline void atomic64_dec(atomic64_t *v) |
212 | { | ||
213 | asm volatile(ATOMIC64_ALTERNATIVE_(dec, dec_return) | ||
214 | : : "S" (v) | ||
215 | : "memory", "eax", "ecx", "edx" | ||
216 | ); | ||
217 | } | ||
128 | 218 | ||
129 | /** | 219 | /** |
130 | * atomic64_dec_and_test - decrement and test | 220 | * atomic64_dec_and_test - decrement and test |
131 | * @ptr: pointer to type atomic64_t | 221 | * @v: pointer to type atomic64_t |
132 | * | 222 | * |
133 | * Atomically decrements @ptr by 1 and | 223 | * Atomically decrements @v by 1 and |
134 | * returns true if the result is 0, or false for all other | 224 | * returns true if the result is 0, or false for all other |
135 | * cases. | 225 | * cases. |
136 | */ | 226 | */ |
137 | extern int atomic64_dec_and_test(atomic64_t *ptr); | 227 | static inline int atomic64_dec_and_test(atomic64_t *v) |
228 | { | ||
229 | return atomic64_dec_return(v) == 0; | ||
230 | } | ||
138 | 231 | ||
139 | /** | 232 | /** |
140 | * atomic64_inc_and_test - increment and test | 233 | * atomic64_inc_and_test - increment and test |
141 | * @ptr: pointer to type atomic64_t | 234 | * @v: pointer to type atomic64_t |
142 | * | 235 | * |
143 | * Atomically increments @ptr by 1 | 236 | * Atomically increments @v by 1 |
144 | * and returns true if the result is zero, or false for all | 237 | * and returns true if the result is zero, or false for all |
145 | * other cases. | 238 | * other cases. |
146 | */ | 239 | */ |
147 | extern int atomic64_inc_and_test(atomic64_t *ptr); | 240 | static inline int atomic64_inc_and_test(atomic64_t *v) |
241 | { | ||
242 | return atomic64_inc_return(v) == 0; | ||
243 | } | ||
148 | 244 | ||
149 | /** | 245 | /** |
150 | * atomic64_add_negative - add and test if negative | 246 | * atomic64_add_negative - add and test if negative |
151 | * @delta: integer value to add | 247 | * @i: integer value to add |
152 | * @ptr: pointer to type atomic64_t | 248 | * @v: pointer to type atomic64_t |
153 | * | 249 | * |
154 | * Atomically adds @delta to @ptr and returns true | 250 | * Atomically adds @i to @v and returns true |
155 | * if the result is negative, or false when | 251 | * if the result is negative, or false when |
156 | * result is greater than or equal to zero. | 252 | * result is greater than or equal to zero. |
157 | */ | 253 | */ |
158 | extern int atomic64_add_negative(u64 delta, atomic64_t *ptr); | 254 | static inline int atomic64_add_negative(long long i, atomic64_t *v) |
255 | { | ||
256 | return atomic64_add_return(i, v) < 0; | ||
257 | } | ||
258 | |||
259 | /** | ||
260 | * atomic64_add_unless - add unless the number is a given value | ||
261 | * @v: pointer of type atomic64_t | ||
262 | * @a: the amount to add to v... | ||
263 | * @u: ...unless v is equal to u. | ||
264 | * | ||
265 | * Atomically adds @a to @v, so long as it was not @u. | ||
266 | * Returns non-zero if @v was not @u, and zero otherwise. | ||
267 | */ | ||
268 | static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u) | ||
269 | { | ||
270 | unsigned low = (unsigned)u; | ||
271 | unsigned high = (unsigned)(u >> 32); | ||
272 | asm volatile(ATOMIC64_ALTERNATIVE(add_unless) "\n\t" | ||
273 | : "+A" (a), "+c" (v), "+S" (low), "+D" (high) | ||
274 | : : "memory"); | ||
275 | return (int)a; | ||
276 | } | ||
277 | |||
278 | |||
279 | static inline int atomic64_inc_not_zero(atomic64_t *v) | ||
280 | { | ||
281 | int r; | ||
282 | asm volatile(ATOMIC64_ALTERNATIVE(inc_not_zero) | ||
283 | : "=a" (r) | ||
284 | : "S" (v) | ||
285 | : "ecx", "edx", "memory" | ||
286 | ); | ||
287 | return r; | ||
288 | } | ||
289 | |||
290 | static inline long long atomic64_dec_if_positive(atomic64_t *v) | ||
291 | { | ||
292 | long long r; | ||
293 | asm volatile(ATOMIC64_ALTERNATIVE(dec_if_positive) | ||
294 | : "=A" (r) | ||
295 | : "S" (v) | ||
296 | : "ecx", "memory" | ||
297 | ); | ||
298 | return r; | ||
299 | } | ||
300 | |||
301 | #undef ATOMIC64_ALTERNATIVE | ||
302 | #undef ATOMIC64_ALTERNATIVE_ | ||
159 | 303 | ||
160 | #endif /* _ASM_X86_ATOMIC64_32_H */ | 304 | #endif /* _ASM_X86_ATOMIC64_32_H */ |
diff --git a/arch/x86/include/asm/atomic64_64.h b/arch/x86/include/asm/atomic64_64.h index b014e235ea8d..49fd1ea22951 100644 --- a/arch/x86/include/asm/atomic64_64.h +++ b/arch/x86/include/asm/atomic64_64.h | |||
@@ -221,4 +221,27 @@ static inline int atomic64_add_unless(atomic64_t *v, long a, long u) | |||
221 | 221 | ||
222 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) | 222 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) |
223 | 223 | ||
224 | /* | ||
225 | * atomic64_dec_if_positive - decrement by 1 if old value positive | ||
226 | * @v: pointer of type atomic_t | ||
227 | * | ||
228 | * The function returns the old value of *v minus 1, even if | ||
229 | * the atomic variable, v, was not decremented. | ||
230 | */ | ||
231 | static inline long atomic64_dec_if_positive(atomic64_t *v) | ||
232 | { | ||
233 | long c, old, dec; | ||
234 | c = atomic64_read(v); | ||
235 | for (;;) { | ||
236 | dec = c - 1; | ||
237 | if (unlikely(dec < 0)) | ||
238 | break; | ||
239 | old = atomic64_cmpxchg((v), c, dec); | ||
240 | if (likely(old == c)) | ||
241 | break; | ||
242 | c = old; | ||
243 | } | ||
244 | return dec; | ||
245 | } | ||
246 | |||
224 | #endif /* _ASM_X86_ATOMIC64_64_H */ | 247 | #endif /* _ASM_X86_ATOMIC64_64_H */ |
diff --git a/arch/x86/include/asm/cmpxchg_32.h b/arch/x86/include/asm/cmpxchg_32.h index ffb9bb6b6c37..8859e12dd3cf 100644 --- a/arch/x86/include/asm/cmpxchg_32.h +++ b/arch/x86/include/asm/cmpxchg_32.h | |||
@@ -271,7 +271,8 @@ extern unsigned long long cmpxchg_486_u64(volatile void *, u64, u64); | |||
271 | __typeof__(*(ptr)) __ret; \ | 271 | __typeof__(*(ptr)) __ret; \ |
272 | __typeof__(*(ptr)) __old = (o); \ | 272 | __typeof__(*(ptr)) __old = (o); \ |
273 | __typeof__(*(ptr)) __new = (n); \ | 273 | __typeof__(*(ptr)) __new = (n); \ |
274 | alternative_io("call cmpxchg8b_emu", \ | 274 | alternative_io(LOCK_PREFIX_HERE \ |
275 | "call cmpxchg8b_emu", \ | ||
275 | "lock; cmpxchg8b (%%esi)" , \ | 276 | "lock; cmpxchg8b (%%esi)" , \ |
276 | X86_FEATURE_CX8, \ | 277 | X86_FEATURE_CX8, \ |
277 | "=A" (__ret), \ | 278 | "=A" (__ret), \ |