aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-x86
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-x86')
-rw-r--r--include/asm-x86/rwlock.h1
-rw-r--r--include/asm-x86/spinlock_32.h8
-rw-r--r--include/asm-x86/spinlock_64.h6
3 files changed, 7 insertions, 8 deletions
diff --git a/include/asm-x86/rwlock.h b/include/asm-x86/rwlock.h
index f2b64a429e6b..6a8c0d645108 100644
--- a/include/asm-x86/rwlock.h
+++ b/include/asm-x86/rwlock.h
@@ -2,7 +2,6 @@
2#define _ASM_X86_RWLOCK_H 2#define _ASM_X86_RWLOCK_H
3 3
4#define RW_LOCK_BIAS 0x01000000 4#define RW_LOCK_BIAS 0x01000000
5#define RW_LOCK_BIAS_STR "0x01000000"
6 5
7/* Actual code is in asm/spinlock.h or in arch/x86/lib/rwlock.S */ 6/* Actual code is in asm/spinlock.h or in arch/x86/lib/rwlock.S */
8 7
diff --git a/include/asm-x86/spinlock_32.h b/include/asm-x86/spinlock_32.h
index fca124a1103d..e7a14ab906e9 100644
--- a/include/asm-x86/spinlock_32.h
+++ b/include/asm-x86/spinlock_32.h
@@ -156,11 +156,11 @@ static inline void __raw_read_lock(raw_rwlock_t *rw)
156 156
157static inline void __raw_write_lock(raw_rwlock_t *rw) 157static inline void __raw_write_lock(raw_rwlock_t *rw)
158{ 158{
159 asm volatile(LOCK_PREFIX " subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" 159 asm volatile(LOCK_PREFIX " subl %1,(%0)\n\t"
160 "jz 1f\n" 160 "jz 1f\n"
161 "call __write_lock_failed\n\t" 161 "call __write_lock_failed\n\t"
162 "1:\n" 162 "1:\n"
163 ::"a" (rw) : "memory"); 163 ::"a" (rw), "i" (RW_LOCK_BIAS) : "memory");
164} 164}
165 165
166static inline int __raw_read_trylock(raw_rwlock_t *lock) 166static inline int __raw_read_trylock(raw_rwlock_t *lock)
@@ -191,8 +191,8 @@ static inline void __raw_read_unlock(raw_rwlock_t *rw)
191 191
192static inline void __raw_write_unlock(raw_rwlock_t *rw) 192static inline void __raw_write_unlock(raw_rwlock_t *rw)
193{ 193{
194 asm volatile(LOCK_PREFIX "addl $" RW_LOCK_BIAS_STR ", %0" 194 asm volatile(LOCK_PREFIX "addl %1, %0"
195 : "+m" (rw->lock) : : "memory"); 195 : "+m" (rw->lock) : "i" (RW_LOCK_BIAS) : "memory");
196} 196}
197 197
198#define _raw_spin_relax(lock) cpu_relax() 198#define _raw_spin_relax(lock) cpu_relax()
diff --git a/include/asm-x86/spinlock_64.h b/include/asm-x86/spinlock_64.h
index e81f6c18d877..ab50e7f51058 100644
--- a/include/asm-x86/spinlock_64.h
+++ b/include/asm-x86/spinlock_64.h
@@ -127,7 +127,7 @@ static inline void __raw_read_lock(raw_rwlock_t *rw)
127 "jns 1f\n" 127 "jns 1f\n"
128 "call __read_lock_failed\n\t" 128 "call __read_lock_failed\n\t"
129 "1:\n" 129 "1:\n"
130 ::"D" (rw), "i" (RW_LOCK_BIAS) : "memory"); 130 ::"D" (rw) : "memory");
131} 131}
132 132
133static inline void __raw_write_lock(raw_rwlock_t *rw) 133static inline void __raw_write_lock(raw_rwlock_t *rw)
@@ -167,8 +167,8 @@ static inline void __raw_read_unlock(raw_rwlock_t *rw)
167 167
168static inline void __raw_write_unlock(raw_rwlock_t *rw) 168static inline void __raw_write_unlock(raw_rwlock_t *rw)
169{ 169{
170 asm volatile(LOCK_PREFIX "addl $" RW_LOCK_BIAS_STR ", %0" 170 asm volatile(LOCK_PREFIX "addl %1, %0"
171 : "+m" (rw->lock) : : "memory"); 171 : "+m" (rw->lock) : "i" (RW_LOCK_BIAS) : "memory");
172} 172}
173 173
174#define _raw_spin_relax(lock) cpu_relax() 174#define _raw_spin_relax(lock) cpu_relax()