diff options
Diffstat (limited to 'arch/mips/include/asm/spinlock.h')
-rw-r--r-- | arch/mips/include/asm/spinlock.h | 55 |
1 files changed, 26 insertions, 29 deletions
diff --git a/arch/mips/include/asm/spinlock.h b/arch/mips/include/asm/spinlock.h index c6d06d383ef9..b4548690ade9 100644 --- a/arch/mips/include/asm/spinlock.h +++ b/arch/mips/include/asm/spinlock.h | |||
@@ -89,7 +89,7 @@ static inline void arch_spin_lock(arch_spinlock_t *lock) | |||
89 | " subu %[ticket], %[ticket], 1 \n" | 89 | " subu %[ticket], %[ticket], 1 \n" |
90 | " .previous \n" | 90 | " .previous \n" |
91 | " .set pop \n" | 91 | " .set pop \n" |
92 | : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock), | 92 | : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock), |
93 | [serving_now_ptr] "+m" (lock->h.serving_now), | 93 | [serving_now_ptr] "+m" (lock->h.serving_now), |
94 | [ticket] "=&r" (tmp), | 94 | [ticket] "=&r" (tmp), |
95 | [my_ticket] "=&r" (my_ticket) | 95 | [my_ticket] "=&r" (my_ticket) |
@@ -122,7 +122,7 @@ static inline void arch_spin_lock(arch_spinlock_t *lock) | |||
122 | " subu %[ticket], %[ticket], 1 \n" | 122 | " subu %[ticket], %[ticket], 1 \n" |
123 | " .previous \n" | 123 | " .previous \n" |
124 | " .set pop \n" | 124 | " .set pop \n" |
125 | : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock), | 125 | : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock), |
126 | [serving_now_ptr] "+m" (lock->h.serving_now), | 126 | [serving_now_ptr] "+m" (lock->h.serving_now), |
127 | [ticket] "=&r" (tmp), | 127 | [ticket] "=&r" (tmp), |
128 | [my_ticket] "=&r" (my_ticket) | 128 | [my_ticket] "=&r" (my_ticket) |
@@ -164,7 +164,7 @@ static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock) | |||
164 | " li %[ticket], 0 \n" | 164 | " li %[ticket], 0 \n" |
165 | " .previous \n" | 165 | " .previous \n" |
166 | " .set pop \n" | 166 | " .set pop \n" |
167 | : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock), | 167 | : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock), |
168 | [ticket] "=&r" (tmp), | 168 | [ticket] "=&r" (tmp), |
169 | [my_ticket] "=&r" (tmp2), | 169 | [my_ticket] "=&r" (tmp2), |
170 | [now_serving] "=&r" (tmp3) | 170 | [now_serving] "=&r" (tmp3) |
@@ -188,7 +188,7 @@ static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock) | |||
188 | " li %[ticket], 0 \n" | 188 | " li %[ticket], 0 \n" |
189 | " .previous \n" | 189 | " .previous \n" |
190 | " .set pop \n" | 190 | " .set pop \n" |
191 | : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock), | 191 | : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock), |
192 | [ticket] "=&r" (tmp), | 192 | [ticket] "=&r" (tmp), |
193 | [my_ticket] "=&r" (tmp2), | 193 | [my_ticket] "=&r" (tmp2), |
194 | [now_serving] "=&r" (tmp3) | 194 | [now_serving] "=&r" (tmp3) |
@@ -235,8 +235,8 @@ static inline void arch_read_lock(arch_rwlock_t *rw) | |||
235 | " beqzl %1, 1b \n" | 235 | " beqzl %1, 1b \n" |
236 | " nop \n" | 236 | " nop \n" |
237 | " .set reorder \n" | 237 | " .set reorder \n" |
238 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) | 238 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) |
239 | : GCC_OFF12_ASM() (rw->lock) | 239 | : GCC_OFF_SMALL_ASM() (rw->lock) |
240 | : "memory"); | 240 | : "memory"); |
241 | } else { | 241 | } else { |
242 | do { | 242 | do { |
@@ -245,8 +245,8 @@ static inline void arch_read_lock(arch_rwlock_t *rw) | |||
245 | " bltz %1, 1b \n" | 245 | " bltz %1, 1b \n" |
246 | " addu %1, 1 \n" | 246 | " addu %1, 1 \n" |
247 | "2: sc %1, %0 \n" | 247 | "2: sc %1, %0 \n" |
248 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) | 248 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) |
249 | : GCC_OFF12_ASM() (rw->lock) | 249 | : GCC_OFF_SMALL_ASM() (rw->lock) |
250 | : "memory"); | 250 | : "memory"); |
251 | } while (unlikely(!tmp)); | 251 | } while (unlikely(!tmp)); |
252 | } | 252 | } |
@@ -254,9 +254,6 @@ static inline void arch_read_lock(arch_rwlock_t *rw) | |||
254 | smp_llsc_mb(); | 254 | smp_llsc_mb(); |
255 | } | 255 | } |
256 | 256 | ||
257 | /* Note the use of sub, not subu which will make the kernel die with an | ||
258 | overflow exception if we ever try to unlock an rwlock that is already | ||
259 | unlocked or is being held by a writer. */ | ||
260 | static inline void arch_read_unlock(arch_rwlock_t *rw) | 257 | static inline void arch_read_unlock(arch_rwlock_t *rw) |
261 | { | 258 | { |
262 | unsigned int tmp; | 259 | unsigned int tmp; |
@@ -266,20 +263,20 @@ static inline void arch_read_unlock(arch_rwlock_t *rw) | |||
266 | if (R10000_LLSC_WAR) { | 263 | if (R10000_LLSC_WAR) { |
267 | __asm__ __volatile__( | 264 | __asm__ __volatile__( |
268 | "1: ll %1, %2 # arch_read_unlock \n" | 265 | "1: ll %1, %2 # arch_read_unlock \n" |
269 | " sub %1, 1 \n" | 266 | " addiu %1, 1 \n" |
270 | " sc %1, %0 \n" | 267 | " sc %1, %0 \n" |
271 | " beqzl %1, 1b \n" | 268 | " beqzl %1, 1b \n" |
272 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) | 269 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) |
273 | : GCC_OFF12_ASM() (rw->lock) | 270 | : GCC_OFF_SMALL_ASM() (rw->lock) |
274 | : "memory"); | 271 | : "memory"); |
275 | } else { | 272 | } else { |
276 | do { | 273 | do { |
277 | __asm__ __volatile__( | 274 | __asm__ __volatile__( |
278 | "1: ll %1, %2 # arch_read_unlock \n" | 275 | "1: ll %1, %2 # arch_read_unlock \n" |
279 | " sub %1, 1 \n" | 276 | " addiu %1, -1 \n" |
280 | " sc %1, %0 \n" | 277 | " sc %1, %0 \n" |
281 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) | 278 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) |
282 | : GCC_OFF12_ASM() (rw->lock) | 279 | : GCC_OFF_SMALL_ASM() (rw->lock) |
283 | : "memory"); | 280 | : "memory"); |
284 | } while (unlikely(!tmp)); | 281 | } while (unlikely(!tmp)); |
285 | } | 282 | } |
@@ -299,8 +296,8 @@ static inline void arch_write_lock(arch_rwlock_t *rw) | |||
299 | " beqzl %1, 1b \n" | 296 | " beqzl %1, 1b \n" |
300 | " nop \n" | 297 | " nop \n" |
301 | " .set reorder \n" | 298 | " .set reorder \n" |
302 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) | 299 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) |
303 | : GCC_OFF12_ASM() (rw->lock) | 300 | : GCC_OFF_SMALL_ASM() (rw->lock) |
304 | : "memory"); | 301 | : "memory"); |
305 | } else { | 302 | } else { |
306 | do { | 303 | do { |
@@ -309,8 +306,8 @@ static inline void arch_write_lock(arch_rwlock_t *rw) | |||
309 | " bnez %1, 1b \n" | 306 | " bnez %1, 1b \n" |
310 | " lui %1, 0x8000 \n" | 307 | " lui %1, 0x8000 \n" |
311 | "2: sc %1, %0 \n" | 308 | "2: sc %1, %0 \n" |
312 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) | 309 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) |
313 | : GCC_OFF12_ASM() (rw->lock) | 310 | : GCC_OFF_SMALL_ASM() (rw->lock) |
314 | : "memory"); | 311 | : "memory"); |
315 | } while (unlikely(!tmp)); | 312 | } while (unlikely(!tmp)); |
316 | } | 313 | } |
@@ -349,8 +346,8 @@ static inline int arch_read_trylock(arch_rwlock_t *rw) | |||
349 | __WEAK_LLSC_MB | 346 | __WEAK_LLSC_MB |
350 | " li %2, 1 \n" | 347 | " li %2, 1 \n" |
351 | "2: \n" | 348 | "2: \n" |
352 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) | 349 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) |
353 | : GCC_OFF12_ASM() (rw->lock) | 350 | : GCC_OFF_SMALL_ASM() (rw->lock) |
354 | : "memory"); | 351 | : "memory"); |
355 | } else { | 352 | } else { |
356 | __asm__ __volatile__( | 353 | __asm__ __volatile__( |
@@ -366,8 +363,8 @@ static inline int arch_read_trylock(arch_rwlock_t *rw) | |||
366 | __WEAK_LLSC_MB | 363 | __WEAK_LLSC_MB |
367 | " li %2, 1 \n" | 364 | " li %2, 1 \n" |
368 | "2: \n" | 365 | "2: \n" |
369 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) | 366 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) |
370 | : GCC_OFF12_ASM() (rw->lock) | 367 | : GCC_OFF_SMALL_ASM() (rw->lock) |
371 | : "memory"); | 368 | : "memory"); |
372 | } | 369 | } |
373 | 370 | ||
@@ -393,8 +390,8 @@ static inline int arch_write_trylock(arch_rwlock_t *rw) | |||
393 | " li %2, 1 \n" | 390 | " li %2, 1 \n" |
394 | " .set reorder \n" | 391 | " .set reorder \n" |
395 | "2: \n" | 392 | "2: \n" |
396 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) | 393 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) |
397 | : GCC_OFF12_ASM() (rw->lock) | 394 | : GCC_OFF_SMALL_ASM() (rw->lock) |
398 | : "memory"); | 395 | : "memory"); |
399 | } else { | 396 | } else { |
400 | do { | 397 | do { |
@@ -406,9 +403,9 @@ static inline int arch_write_trylock(arch_rwlock_t *rw) | |||
406 | " sc %1, %0 \n" | 403 | " sc %1, %0 \n" |
407 | " li %2, 1 \n" | 404 | " li %2, 1 \n" |
408 | "2: \n" | 405 | "2: \n" |
409 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), | 406 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), |
410 | "=&r" (ret) | 407 | "=&r" (ret) |
411 | : GCC_OFF12_ASM() (rw->lock) | 408 | : GCC_OFF_SMALL_ASM() (rw->lock) |
412 | : "memory"); | 409 | : "memory"); |
413 | } while (unlikely(!tmp)); | 410 | } while (unlikely(!tmp)); |
414 | 411 | ||