diff options
Diffstat (limited to 'include/asm-x86_64/atomic.h')
-rw-r--r-- | include/asm-x86_64/atomic.h | 42 |
1 files changed, 21 insertions, 21 deletions
diff --git a/include/asm-x86_64/atomic.h b/include/asm-x86_64/atomic.h index bd3fa67ed835..007e88d6d43f 100644 --- a/include/asm-x86_64/atomic.h +++ b/include/asm-x86_64/atomic.h | |||
@@ -1,7 +1,7 @@ | |||
1 | #ifndef __ARCH_X86_64_ATOMIC__ | 1 | #ifndef __ARCH_X86_64_ATOMIC__ |
2 | #define __ARCH_X86_64_ATOMIC__ | 2 | #define __ARCH_X86_64_ATOMIC__ |
3 | 3 | ||
4 | #include <asm/types.h> | 4 | #include <asm/alternative.h> |
5 | 5 | ||
6 | /* atomic_t should be 32 bit signed type */ | 6 | /* atomic_t should be 32 bit signed type */ |
7 | 7 | ||
@@ -52,7 +52,7 @@ typedef struct { volatile int counter; } atomic_t; | |||
52 | static __inline__ void atomic_add(int i, atomic_t *v) | 52 | static __inline__ void atomic_add(int i, atomic_t *v) |
53 | { | 53 | { |
54 | __asm__ __volatile__( | 54 | __asm__ __volatile__( |
55 | LOCK "addl %1,%0" | 55 | LOCK_PREFIX "addl %1,%0" |
56 | :"=m" (v->counter) | 56 | :"=m" (v->counter) |
57 | :"ir" (i), "m" (v->counter)); | 57 | :"ir" (i), "m" (v->counter)); |
58 | } | 58 | } |
@@ -67,7 +67,7 @@ static __inline__ void atomic_add(int i, atomic_t *v) | |||
67 | static __inline__ void atomic_sub(int i, atomic_t *v) | 67 | static __inline__ void atomic_sub(int i, atomic_t *v) |
68 | { | 68 | { |
69 | __asm__ __volatile__( | 69 | __asm__ __volatile__( |
70 | LOCK "subl %1,%0" | 70 | LOCK_PREFIX "subl %1,%0" |
71 | :"=m" (v->counter) | 71 | :"=m" (v->counter) |
72 | :"ir" (i), "m" (v->counter)); | 72 | :"ir" (i), "m" (v->counter)); |
73 | } | 73 | } |
@@ -86,7 +86,7 @@ static __inline__ int atomic_sub_and_test(int i, atomic_t *v) | |||
86 | unsigned char c; | 86 | unsigned char c; |
87 | 87 | ||
88 | __asm__ __volatile__( | 88 | __asm__ __volatile__( |
89 | LOCK "subl %2,%0; sete %1" | 89 | LOCK_PREFIX "subl %2,%0; sete %1" |
90 | :"=m" (v->counter), "=qm" (c) | 90 | :"=m" (v->counter), "=qm" (c) |
91 | :"ir" (i), "m" (v->counter) : "memory"); | 91 | :"ir" (i), "m" (v->counter) : "memory"); |
92 | return c; | 92 | return c; |
@@ -101,7 +101,7 @@ static __inline__ int atomic_sub_and_test(int i, atomic_t *v) | |||
101 | static __inline__ void atomic_inc(atomic_t *v) | 101 | static __inline__ void atomic_inc(atomic_t *v) |
102 | { | 102 | { |
103 | __asm__ __volatile__( | 103 | __asm__ __volatile__( |
104 | LOCK "incl %0" | 104 | LOCK_PREFIX "incl %0" |
105 | :"=m" (v->counter) | 105 | :"=m" (v->counter) |
106 | :"m" (v->counter)); | 106 | :"m" (v->counter)); |
107 | } | 107 | } |
@@ -115,7 +115,7 @@ static __inline__ void atomic_inc(atomic_t *v) | |||
115 | static __inline__ void atomic_dec(atomic_t *v) | 115 | static __inline__ void atomic_dec(atomic_t *v) |
116 | { | 116 | { |
117 | __asm__ __volatile__( | 117 | __asm__ __volatile__( |
118 | LOCK "decl %0" | 118 | LOCK_PREFIX "decl %0" |
119 | :"=m" (v->counter) | 119 | :"=m" (v->counter) |
120 | :"m" (v->counter)); | 120 | :"m" (v->counter)); |
121 | } | 121 | } |
@@ -133,7 +133,7 @@ static __inline__ int atomic_dec_and_test(atomic_t *v) | |||
133 | unsigned char c; | 133 | unsigned char c; |
134 | 134 | ||
135 | __asm__ __volatile__( | 135 | __asm__ __volatile__( |
136 | LOCK "decl %0; sete %1" | 136 | LOCK_PREFIX "decl %0; sete %1" |
137 | :"=m" (v->counter), "=qm" (c) | 137 | :"=m" (v->counter), "=qm" (c) |
138 | :"m" (v->counter) : "memory"); | 138 | :"m" (v->counter) : "memory"); |
139 | return c != 0; | 139 | return c != 0; |
@@ -152,7 +152,7 @@ static __inline__ int atomic_inc_and_test(atomic_t *v) | |||
152 | unsigned char c; | 152 | unsigned char c; |
153 | 153 | ||
154 | __asm__ __volatile__( | 154 | __asm__ __volatile__( |
155 | LOCK "incl %0; sete %1" | 155 | LOCK_PREFIX "incl %0; sete %1" |
156 | :"=m" (v->counter), "=qm" (c) | 156 | :"=m" (v->counter), "=qm" (c) |
157 | :"m" (v->counter) : "memory"); | 157 | :"m" (v->counter) : "memory"); |
158 | return c != 0; | 158 | return c != 0; |
@@ -172,7 +172,7 @@ static __inline__ int atomic_add_negative(int i, atomic_t *v) | |||
172 | unsigned char c; | 172 | unsigned char c; |
173 | 173 | ||
174 | __asm__ __volatile__( | 174 | __asm__ __volatile__( |
175 | LOCK "addl %2,%0; sets %1" | 175 | LOCK_PREFIX "addl %2,%0; sets %1" |
176 | :"=m" (v->counter), "=qm" (c) | 176 | :"=m" (v->counter), "=qm" (c) |
177 | :"ir" (i), "m" (v->counter) : "memory"); | 177 | :"ir" (i), "m" (v->counter) : "memory"); |
178 | return c; | 178 | return c; |
@@ -189,7 +189,7 @@ static __inline__ int atomic_add_return(int i, atomic_t *v) | |||
189 | { | 189 | { |
190 | int __i = i; | 190 | int __i = i; |
191 | __asm__ __volatile__( | 191 | __asm__ __volatile__( |
192 | LOCK "xaddl %0, %1;" | 192 | LOCK_PREFIX "xaddl %0, %1;" |
193 | :"=r"(i) | 193 | :"=r"(i) |
194 | :"m"(v->counter), "0"(i)); | 194 | :"m"(v->counter), "0"(i)); |
195 | return i + __i; | 195 | return i + __i; |
@@ -237,7 +237,7 @@ typedef struct { volatile long counter; } atomic64_t; | |||
237 | static __inline__ void atomic64_add(long i, atomic64_t *v) | 237 | static __inline__ void atomic64_add(long i, atomic64_t *v) |
238 | { | 238 | { |
239 | __asm__ __volatile__( | 239 | __asm__ __volatile__( |
240 | LOCK "addq %1,%0" | 240 | LOCK_PREFIX "addq %1,%0" |
241 | :"=m" (v->counter) | 241 | :"=m" (v->counter) |
242 | :"ir" (i), "m" (v->counter)); | 242 | :"ir" (i), "m" (v->counter)); |
243 | } | 243 | } |
@@ -252,7 +252,7 @@ static __inline__ void atomic64_add(long i, atomic64_t *v) | |||
252 | static __inline__ void atomic64_sub(long i, atomic64_t *v) | 252 | static __inline__ void atomic64_sub(long i, atomic64_t *v) |
253 | { | 253 | { |
254 | __asm__ __volatile__( | 254 | __asm__ __volatile__( |
255 | LOCK "subq %1,%0" | 255 | LOCK_PREFIX "subq %1,%0" |
256 | :"=m" (v->counter) | 256 | :"=m" (v->counter) |
257 | :"ir" (i), "m" (v->counter)); | 257 | :"ir" (i), "m" (v->counter)); |
258 | } | 258 | } |
@@ -271,7 +271,7 @@ static __inline__ int atomic64_sub_and_test(long i, atomic64_t *v) | |||
271 | unsigned char c; | 271 | unsigned char c; |
272 | 272 | ||
273 | __asm__ __volatile__( | 273 | __asm__ __volatile__( |
274 | LOCK "subq %2,%0; sete %1" | 274 | LOCK_PREFIX "subq %2,%0; sete %1" |
275 | :"=m" (v->counter), "=qm" (c) | 275 | :"=m" (v->counter), "=qm" (c) |
276 | :"ir" (i), "m" (v->counter) : "memory"); | 276 | :"ir" (i), "m" (v->counter) : "memory"); |
277 | return c; | 277 | return c; |
@@ -286,7 +286,7 @@ static __inline__ int atomic64_sub_and_test(long i, atomic64_t *v) | |||
286 | static __inline__ void atomic64_inc(atomic64_t *v) | 286 | static __inline__ void atomic64_inc(atomic64_t *v) |
287 | { | 287 | { |
288 | __asm__ __volatile__( | 288 | __asm__ __volatile__( |
289 | LOCK "incq %0" | 289 | LOCK_PREFIX "incq %0" |
290 | :"=m" (v->counter) | 290 | :"=m" (v->counter) |
291 | :"m" (v->counter)); | 291 | :"m" (v->counter)); |
292 | } | 292 | } |
@@ -300,7 +300,7 @@ static __inline__ void atomic64_inc(atomic64_t *v) | |||
300 | static __inline__ void atomic64_dec(atomic64_t *v) | 300 | static __inline__ void atomic64_dec(atomic64_t *v) |
301 | { | 301 | { |
302 | __asm__ __volatile__( | 302 | __asm__ __volatile__( |
303 | LOCK "decq %0" | 303 | LOCK_PREFIX "decq %0" |
304 | :"=m" (v->counter) | 304 | :"=m" (v->counter) |
305 | :"m" (v->counter)); | 305 | :"m" (v->counter)); |
306 | } | 306 | } |
@@ -318,7 +318,7 @@ static __inline__ int atomic64_dec_and_test(atomic64_t *v) | |||
318 | unsigned char c; | 318 | unsigned char c; |
319 | 319 | ||
320 | __asm__ __volatile__( | 320 | __asm__ __volatile__( |
321 | LOCK "decq %0; sete %1" | 321 | LOCK_PREFIX "decq %0; sete %1" |
322 | :"=m" (v->counter), "=qm" (c) | 322 | :"=m" (v->counter), "=qm" (c) |
323 | :"m" (v->counter) : "memory"); | 323 | :"m" (v->counter) : "memory"); |
324 | return c != 0; | 324 | return c != 0; |
@@ -337,7 +337,7 @@ static __inline__ int atomic64_inc_and_test(atomic64_t *v) | |||
337 | unsigned char c; | 337 | unsigned char c; |
338 | 338 | ||
339 | __asm__ __volatile__( | 339 | __asm__ __volatile__( |
340 | LOCK "incq %0; sete %1" | 340 | LOCK_PREFIX "incq %0; sete %1" |
341 | :"=m" (v->counter), "=qm" (c) | 341 | :"=m" (v->counter), "=qm" (c) |
342 | :"m" (v->counter) : "memory"); | 342 | :"m" (v->counter) : "memory"); |
343 | return c != 0; | 343 | return c != 0; |
@@ -357,7 +357,7 @@ static __inline__ int atomic64_add_negative(long i, atomic64_t *v) | |||
357 | unsigned char c; | 357 | unsigned char c; |
358 | 358 | ||
359 | __asm__ __volatile__( | 359 | __asm__ __volatile__( |
360 | LOCK "addq %2,%0; sets %1" | 360 | LOCK_PREFIX "addq %2,%0; sets %1" |
361 | :"=m" (v->counter), "=qm" (c) | 361 | :"=m" (v->counter), "=qm" (c) |
362 | :"ir" (i), "m" (v->counter) : "memory"); | 362 | :"ir" (i), "m" (v->counter) : "memory"); |
363 | return c; | 363 | return c; |
@@ -374,7 +374,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t *v) | |||
374 | { | 374 | { |
375 | long __i = i; | 375 | long __i = i; |
376 | __asm__ __volatile__( | 376 | __asm__ __volatile__( |
377 | LOCK "xaddq %0, %1;" | 377 | LOCK_PREFIX "xaddq %0, %1;" |
378 | :"=r"(i) | 378 | :"=r"(i) |
379 | :"m"(v->counter), "0"(i)); | 379 | :"m"(v->counter), "0"(i)); |
380 | return i + __i; | 380 | return i + __i; |
@@ -418,11 +418,11 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t *v) | |||
418 | 418 | ||
419 | /* These are x86-specific, used by some header files */ | 419 | /* These are x86-specific, used by some header files */ |
420 | #define atomic_clear_mask(mask, addr) \ | 420 | #define atomic_clear_mask(mask, addr) \ |
421 | __asm__ __volatile__(LOCK "andl %0,%1" \ | 421 | __asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \ |
422 | : : "r" (~(mask)),"m" (*addr) : "memory") | 422 | : : "r" (~(mask)),"m" (*addr) : "memory") |
423 | 423 | ||
424 | #define atomic_set_mask(mask, addr) \ | 424 | #define atomic_set_mask(mask, addr) \ |
425 | __asm__ __volatile__(LOCK "orl %0,%1" \ | 425 | __asm__ __volatile__(LOCK_PREFIX "orl %0,%1" \ |
426 | : : "r" ((unsigned)mask),"m" (*(addr)) : "memory") | 426 | : : "r" ((unsigned)mask),"m" (*(addr)) : "memory") |
427 | 427 | ||
428 | /* Atomic operations are already serializing on x86 */ | 428 | /* Atomic operations are already serializing on x86 */ |