diff options
-rw-r--r-- | arch/x86/include/asm/rwsem.h | 30 |
1 files changed, 15 insertions, 15 deletions
diff --git a/arch/x86/include/asm/rwsem.h b/arch/x86/include/asm/rwsem.h index ca7517d33776..413620024768 100644 --- a/arch/x86/include/asm/rwsem.h +++ b/arch/x86/include/asm/rwsem.h | |||
@@ -105,7 +105,7 @@ do { \ | |||
105 | static inline void __down_read(struct rw_semaphore *sem) | 105 | static inline void __down_read(struct rw_semaphore *sem) |
106 | { | 106 | { |
107 | asm volatile("# beginning down_read\n\t" | 107 | asm volatile("# beginning down_read\n\t" |
108 | LOCK_PREFIX " incl (%%eax)\n\t" | 108 | LOCK_PREFIX " inc%z0 (%1)\n\t" |
109 | /* adds 0x00000001, returns the old value */ | 109 | /* adds 0x00000001, returns the old value */ |
110 | " jns 1f\n" | 110 | " jns 1f\n" |
111 | " call call_rwsem_down_read_failed\n" | 111 | " call call_rwsem_down_read_failed\n" |
@@ -123,12 +123,12 @@ static inline int __down_read_trylock(struct rw_semaphore *sem) | |||
123 | { | 123 | { |
124 | __s32 result, tmp; | 124 | __s32 result, tmp; |
125 | asm volatile("# beginning __down_read_trylock\n\t" | 125 | asm volatile("# beginning __down_read_trylock\n\t" |
126 | " movl %0,%1\n\t" | 126 | " mov %0,%1\n\t" |
127 | "1:\n\t" | 127 | "1:\n\t" |
128 | " movl %1,%2\n\t" | 128 | " mov %1,%2\n\t" |
129 | " addl %3,%2\n\t" | 129 | " add %3,%2\n\t" |
130 | " jle 2f\n\t" | 130 | " jle 2f\n\t" |
131 | LOCK_PREFIX " cmpxchgl %2,%0\n\t" | 131 | LOCK_PREFIX " cmpxchg %2,%0\n\t" |
132 | " jnz 1b\n\t" | 132 | " jnz 1b\n\t" |
133 | "2:\n\t" | 133 | "2:\n\t" |
134 | "# ending __down_read_trylock\n\t" | 134 | "# ending __down_read_trylock\n\t" |
@@ -147,9 +147,9 @@ static inline void __down_write_nested(struct rw_semaphore *sem, int subclass) | |||
147 | 147 | ||
148 | tmp = RWSEM_ACTIVE_WRITE_BIAS; | 148 | tmp = RWSEM_ACTIVE_WRITE_BIAS; |
149 | asm volatile("# beginning down_write\n\t" | 149 | asm volatile("# beginning down_write\n\t" |
150 | LOCK_PREFIX " xadd %%edx,(%%eax)\n\t" | 150 | LOCK_PREFIX " xadd %1,(%2)\n\t" |
151 | /* subtract 0x0000ffff, returns the old value */ | 151 | /* subtract 0x0000ffff, returns the old value */ |
152 | " testl %%edx,%%edx\n\t" | 152 | " test %1,%1\n\t" |
153 | /* was the count 0 before? */ | 153 | /* was the count 0 before? */ |
154 | " jz 1f\n" | 154 | " jz 1f\n" |
155 | " call call_rwsem_down_write_failed\n" | 155 | " call call_rwsem_down_write_failed\n" |
@@ -185,7 +185,7 @@ static inline void __up_read(struct rw_semaphore *sem) | |||
185 | { | 185 | { |
186 | __s32 tmp = -RWSEM_ACTIVE_READ_BIAS; | 186 | __s32 tmp = -RWSEM_ACTIVE_READ_BIAS; |
187 | asm volatile("# beginning __up_read\n\t" | 187 | asm volatile("# beginning __up_read\n\t" |
188 | LOCK_PREFIX " xadd %%edx,(%%eax)\n\t" | 188 | LOCK_PREFIX " xadd %1,(%2)\n\t" |
189 | /* subtracts 1, returns the old value */ | 189 | /* subtracts 1, returns the old value */ |
190 | " jns 1f\n\t" | 190 | " jns 1f\n\t" |
191 | " call call_rwsem_wake\n" | 191 | " call call_rwsem_wake\n" |
@@ -201,18 +201,18 @@ static inline void __up_read(struct rw_semaphore *sem) | |||
201 | */ | 201 | */ |
202 | static inline void __up_write(struct rw_semaphore *sem) | 202 | static inline void __up_write(struct rw_semaphore *sem) |
203 | { | 203 | { |
204 | unsigned long tmp; | ||
204 | asm volatile("# beginning __up_write\n\t" | 205 | asm volatile("# beginning __up_write\n\t" |
205 | " movl %2,%%edx\n\t" | 206 | LOCK_PREFIX " xadd %1,(%2)\n\t" |
206 | LOCK_PREFIX " xaddl %%edx,(%%eax)\n\t" | ||
207 | /* tries to transition | 207 | /* tries to transition |
208 | 0xffff0001 -> 0x00000000 */ | 208 | 0xffff0001 -> 0x00000000 */ |
209 | " jz 1f\n" | 209 | " jz 1f\n" |
210 | " call call_rwsem_wake\n" | 210 | " call call_rwsem_wake\n" |
211 | "1:\n\t" | 211 | "1:\n\t" |
212 | "# ending __up_write\n" | 212 | "# ending __up_write\n" |
213 | : "+m" (sem->count) | 213 | : "+m" (sem->count), "=d" (tmp) |
214 | : "a" (sem), "i" (-RWSEM_ACTIVE_WRITE_BIAS) | 214 | : "a" (sem), "1" (-RWSEM_ACTIVE_WRITE_BIAS) |
215 | : "memory", "cc", "edx"); | 215 | : "memory", "cc"); |
216 | } | 216 | } |
217 | 217 | ||
218 | /* | 218 | /* |
@@ -221,7 +221,7 @@ static inline void __up_write(struct rw_semaphore *sem) | |||
221 | static inline void __downgrade_write(struct rw_semaphore *sem) | 221 | static inline void __downgrade_write(struct rw_semaphore *sem) |
222 | { | 222 | { |
223 | asm volatile("# beginning __downgrade_write\n\t" | 223 | asm volatile("# beginning __downgrade_write\n\t" |
224 | LOCK_PREFIX " addl %2,(%%eax)\n\t" | 224 | LOCK_PREFIX " add%z0 %2,(%1)\n\t" |
225 | /* transitions 0xZZZZ0001 -> 0xYYYY0001 */ | 225 | /* transitions 0xZZZZ0001 -> 0xYYYY0001 */ |
226 | " jns 1f\n\t" | 226 | " jns 1f\n\t" |
227 | " call call_rwsem_downgrade_wake\n" | 227 | " call call_rwsem_downgrade_wake\n" |
@@ -237,7 +237,7 @@ static inline void __downgrade_write(struct rw_semaphore *sem) | |||
237 | */ | 237 | */ |
238 | static inline void rwsem_atomic_add(int delta, struct rw_semaphore *sem) | 238 | static inline void rwsem_atomic_add(int delta, struct rw_semaphore *sem) |
239 | { | 239 | { |
240 | asm volatile(LOCK_PREFIX "addl %1,%0" | 240 | asm volatile(LOCK_PREFIX "add%z0 %1,%0" |
241 | : "+m" (sem->count) | 241 | : "+m" (sem->count) |
242 | : "ir" (delta)); | 242 | : "ir" (delta)); |
243 | } | 243 | } |