diff options
Diffstat (limited to 'include')
-rw-r--r-- | include/asm-x86/spinlock_32.h | 2 | ||||
-rw-r--r-- | include/asm-x86/spinlock_64.h | 10 |
2 files changed, 6 insertions, 6 deletions
diff --git a/include/asm-x86/spinlock_32.h b/include/asm-x86/spinlock_32.h index c42c3f12d7ce..fca124a1103d 100644 --- a/include/asm-x86/spinlock_32.h +++ b/include/asm-x86/spinlock_32.h | |||
@@ -99,7 +99,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock) | |||
99 | 99 | ||
100 | static inline void __raw_spin_unlock(raw_spinlock_t *lock) | 100 | static inline void __raw_spin_unlock(raw_spinlock_t *lock) |
101 | { | 101 | { |
102 | asm volatile("movb $1,%0" : "+m" (lock->slock) :: "memory"); | 102 | asm volatile("movb $1,%0" : "=m" (lock->slock) :: "memory"); |
103 | } | 103 | } |
104 | 104 | ||
105 | #else | 105 | #else |
diff --git a/include/asm-x86/spinlock_64.h b/include/asm-x86/spinlock_64.h index 3b5adf92ad08..e81f6c18d877 100644 --- a/include/asm-x86/spinlock_64.h +++ b/include/asm-x86/spinlock_64.h | |||
@@ -34,7 +34,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock) | |||
34 | "jle 3b\n\t" | 34 | "jle 3b\n\t" |
35 | "jmp 1b\n" | 35 | "jmp 1b\n" |
36 | "2:\t" | 36 | "2:\t" |
37 | : "=m" (lock->slock) : : "memory"); | 37 | : "+m" (lock->slock) : : "memory"); |
38 | } | 38 | } |
39 | 39 | ||
40 | /* | 40 | /* |
@@ -80,7 +80,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock) | |||
80 | 80 | ||
81 | asm volatile( | 81 | asm volatile( |
82 | "xchgl %0,%1" | 82 | "xchgl %0,%1" |
83 | :"=q" (oldval), "=m" (lock->slock) | 83 | :"=q" (oldval), "+m" (lock->slock) |
84 | :"0" (0) : "memory"); | 84 | :"0" (0) : "memory"); |
85 | 85 | ||
86 | return oldval > 0; | 86 | return oldval > 0; |
@@ -162,13 +162,13 @@ static inline int __raw_write_trylock(raw_rwlock_t *lock) | |||
162 | 162 | ||
163 | static inline void __raw_read_unlock(raw_rwlock_t *rw) | 163 | static inline void __raw_read_unlock(raw_rwlock_t *rw) |
164 | { | 164 | { |
165 | asm volatile(LOCK_PREFIX "incl %0" :"=m" (rw->lock) : : "memory"); | 165 | asm volatile(LOCK_PREFIX "incl %0" :"+m" (rw->lock) : : "memory"); |
166 | } | 166 | } |
167 | 167 | ||
168 | static inline void __raw_write_unlock(raw_rwlock_t *rw) | 168 | static inline void __raw_write_unlock(raw_rwlock_t *rw) |
169 | { | 169 | { |
170 | asm volatile(LOCK_PREFIX "addl $" RW_LOCK_BIAS_STR ",%0" | 170 | asm volatile(LOCK_PREFIX "addl $" RW_LOCK_BIAS_STR ", %0" |
171 | : "=m" (rw->lock) : : "memory"); | 171 | : "+m" (rw->lock) : : "memory"); |
172 | } | 172 | } |
173 | 173 | ||
174 | #define _raw_spin_relax(lock) cpu_relax() | 174 | #define _raw_spin_relax(lock) cpu_relax() |