diff options
Diffstat (limited to 'include/asm-x86/atomic_64.h')
-rw-r--r-- | include/asm-x86/atomic_64.h | 14 |
1 files changed, 7 insertions, 7 deletions
diff --git a/include/asm-x86/atomic_64.h b/include/asm-x86/atomic_64.h index a0095191c02e..2cb218c4a356 100644 --- a/include/asm-x86/atomic_64.h +++ b/include/asm-x86/atomic_64.h | |||
@@ -1,5 +1,5 @@ | |||
1 | #ifndef __ARCH_X86_64_ATOMIC__ | 1 | #ifndef ASM_X86__ATOMIC_64_H |
2 | #define __ARCH_X86_64_ATOMIC__ | 2 | #define ASM_X86__ATOMIC_64_H |
3 | 3 | ||
4 | #include <asm/alternative.h> | 4 | #include <asm/alternative.h> |
5 | #include <asm/cmpxchg.h> | 5 | #include <asm/cmpxchg.h> |
@@ -228,7 +228,7 @@ static inline void atomic64_add(long i, atomic64_t *v) | |||
228 | { | 228 | { |
229 | asm volatile(LOCK_PREFIX "addq %1,%0" | 229 | asm volatile(LOCK_PREFIX "addq %1,%0" |
230 | : "=m" (v->counter) | 230 | : "=m" (v->counter) |
231 | : "ir" (i), "m" (v->counter)); | 231 | : "er" (i), "m" (v->counter)); |
232 | } | 232 | } |
233 | 233 | ||
234 | /** | 234 | /** |
@@ -242,7 +242,7 @@ static inline void atomic64_sub(long i, atomic64_t *v) | |||
242 | { | 242 | { |
243 | asm volatile(LOCK_PREFIX "subq %1,%0" | 243 | asm volatile(LOCK_PREFIX "subq %1,%0" |
244 | : "=m" (v->counter) | 244 | : "=m" (v->counter) |
245 | : "ir" (i), "m" (v->counter)); | 245 | : "er" (i), "m" (v->counter)); |
246 | } | 246 | } |
247 | 247 | ||
248 | /** | 248 | /** |
@@ -260,7 +260,7 @@ static inline int atomic64_sub_and_test(long i, atomic64_t *v) | |||
260 | 260 | ||
261 | asm volatile(LOCK_PREFIX "subq %2,%0; sete %1" | 261 | asm volatile(LOCK_PREFIX "subq %2,%0; sete %1" |
262 | : "=m" (v->counter), "=qm" (c) | 262 | : "=m" (v->counter), "=qm" (c) |
263 | : "ir" (i), "m" (v->counter) : "memory"); | 263 | : "er" (i), "m" (v->counter) : "memory"); |
264 | return c; | 264 | return c; |
265 | } | 265 | } |
266 | 266 | ||
@@ -341,7 +341,7 @@ static inline int atomic64_add_negative(long i, atomic64_t *v) | |||
341 | 341 | ||
342 | asm volatile(LOCK_PREFIX "addq %2,%0; sets %1" | 342 | asm volatile(LOCK_PREFIX "addq %2,%0; sets %1" |
343 | : "=m" (v->counter), "=qm" (c) | 343 | : "=m" (v->counter), "=qm" (c) |
344 | : "ir" (i), "m" (v->counter) : "memory"); | 344 | : "er" (i), "m" (v->counter) : "memory"); |
345 | return c; | 345 | return c; |
346 | } | 346 | } |
347 | 347 | ||
@@ -470,4 +470,4 @@ static inline void atomic_or_long(unsigned long *v1, unsigned long v2) | |||
470 | #define smp_mb__after_atomic_inc() barrier() | 470 | #define smp_mb__after_atomic_inc() barrier() |
471 | 471 | ||
472 | #include <asm-generic/atomic.h> | 472 | #include <asm-generic/atomic.h> |
473 | #endif | 473 | #endif /* ASM_X86__ATOMIC_64_H */ |