aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--arch/mips/include/asm/spinlock.h116
1 files changed, 44 insertions, 72 deletions
diff --git a/arch/mips/include/asm/spinlock.h b/arch/mips/include/asm/spinlock.h
index 5130c88d6420..0b1dbd22e6f8 100644
--- a/arch/mips/include/asm/spinlock.h
+++ b/arch/mips/include/asm/spinlock.h
@@ -242,25 +242,16 @@ static inline void arch_read_lock(arch_rwlock_t *rw)
242 : "m" (rw->lock) 242 : "m" (rw->lock)
243 : "memory"); 243 : "memory");
244 } else { 244 } else {
245 __asm__ __volatile__( 245 do {
246 " .set noreorder # arch_read_lock \n" 246 __asm__ __volatile__(
247 "1: ll %1, %2 \n" 247 "1: ll %1, %2 # arch_read_lock \n"
248 " bltz %1, 3f \n" 248 " bltz %1, 1b \n"
249 " addu %1, 1 \n" 249 " addu %1, 1 \n"
250 "2: sc %1, %0 \n" 250 "2: sc %1, %0 \n"
251 " beqz %1, 1b \n" 251 : "=m" (rw->lock), "=&r" (tmp)
252 " nop \n" 252 : "m" (rw->lock)
253 " .subsection 2 \n" 253 : "memory");
254 "3: ll %1, %2 \n" 254 } while (unlikely(!tmp));
255 " bltz %1, 3b \n"
256 " addu %1, 1 \n"
257 " b 2b \n"
258 " nop \n"
259 " .previous \n"
260 " .set reorder \n"
261 : "=m" (rw->lock), "=&r" (tmp)
262 : "m" (rw->lock)
263 : "memory");
264 } 255 }
265 256
266 smp_llsc_mb(); 257 smp_llsc_mb();
@@ -285,21 +276,15 @@ static inline void arch_read_unlock(arch_rwlock_t *rw)
285 : "m" (rw->lock) 276 : "m" (rw->lock)
286 : "memory"); 277 : "memory");
287 } else { 278 } else {
288 __asm__ __volatile__( 279 do {
289 " .set noreorder # arch_read_unlock \n" 280 __asm__ __volatile__(
290 "1: ll %1, %2 \n" 281 "1: ll %1, %2 # arch_read_unlock \n"
291 " sub %1, 1 \n" 282 " sub %1, 1 \n"
292 " sc %1, %0 \n" 283 " sc %1, %0 \n"
293 " beqz %1, 2f \n" 284 : "=m" (rw->lock), "=&r" (tmp)
294 " nop \n" 285 : "m" (rw->lock)
295 " .subsection 2 \n" 286 : "memory");
296 "2: b 1b \n" 287 } while (unlikely(!tmp));
297 " nop \n"
298 " .previous \n"
299 " .set reorder \n"
300 : "=m" (rw->lock), "=&r" (tmp)
301 : "m" (rw->lock)
302 : "memory");
303 } 288 }
304} 289}
305 290
@@ -321,25 +306,16 @@ static inline void arch_write_lock(arch_rwlock_t *rw)
321 : "m" (rw->lock) 306 : "m" (rw->lock)
322 : "memory"); 307 : "memory");
323 } else { 308 } else {
324 __asm__ __volatile__( 309 do {
325 " .set noreorder # arch_write_lock \n" 310 __asm__ __volatile__(
326 "1: ll %1, %2 \n" 311 "1: ll %1, %2 # arch_write_lock \n"
327 " bnez %1, 3f \n" 312 " bnez %1, 1b \n"
328 " lui %1, 0x8000 \n" 313 " lui %1, 0x8000 \n"
329 "2: sc %1, %0 \n" 314 "2: sc %1, %0 \n"
330 " beqz %1, 3f \n" 315 : "=m" (rw->lock), "=&r" (tmp)
331 " nop \n" 316 : "m" (rw->lock)
332 " .subsection 2 \n" 317 : "memory");
333 "3: ll %1, %2 \n" 318 } while (unlikely(!tmp));
334 " bnez %1, 3b \n"
335 " lui %1, 0x8000 \n"
336 " b 2b \n"
337 " nop \n"
338 " .previous \n"
339 " .set reorder \n"
340 : "=m" (rw->lock), "=&r" (tmp)
341 : "m" (rw->lock)
342 : "memory");
343 } 319 }
344 320
345 smp_llsc_mb(); 321 smp_llsc_mb();
@@ -424,25 +400,21 @@ static inline int arch_write_trylock(arch_rwlock_t *rw)
424 : "m" (rw->lock) 400 : "m" (rw->lock)
425 : "memory"); 401 : "memory");
426 } else { 402 } else {
427 __asm__ __volatile__( 403 do {
428 " .set noreorder # arch_write_trylock \n" 404 __asm__ __volatile__(
429 " li %2, 0 \n" 405 " ll %1, %3 # arch_write_trylock \n"
430 "1: ll %1, %3 \n" 406 " li %2, 0 \n"
431 " bnez %1, 2f \n" 407 " bnez %1, 2f \n"
432 " lui %1, 0x8000 \n" 408 " lui %1, 0x8000 \n"
433 " sc %1, %0 \n" 409 " sc %1, %0 \n"
434 " beqz %1, 3f \n" 410 " li %2, 1 \n"
435 " li %2, 1 \n" 411 "2: \n"
436 "2: \n" 412 : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
437 __WEAK_LLSC_MB 413 : "m" (rw->lock)
438 " .subsection 2 \n" 414 : "memory");
439 "3: b 1b \n" 415 } while (unlikely(!tmp));
440 " li %2, 0 \n" 416
441 " .previous \n" 417 smp_llsc_mb();
442 " .set reorder \n"
443 : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
444 : "m" (rw->lock)
445 : "memory");
446 } 418 }
447 419
448 return ret; 420 return ret;