aboutsummaryrefslogtreecommitdiffstats
path: root/arch/mips/include
diff options
context:
space:
mode:
authorMarkos Chandras <markos.chandras@imgtec.com>2014-11-24 09:11:39 -0500
committerMarkos Chandras <markos.chandras@imgtec.com>2015-02-17 10:37:23 -0500
commit5753762cbd1cb208f6e6c916169b56139373b790 (patch)
tree2d0ffa8b1a9810cbe68734d8a0c12820787f97be /arch/mips/include
parent1922c356ab2d0031d1acc2979043da4a1105dc4a (diff)
MIPS: asm: spinlock: Replace "sub" instruction with "addiu"
"sub $reg, imm" is not a real MIPS instruction. The assembler can replace that with "addi $reg, -imm". However, addi has been removed from R6, so we replace the "sub" instruction with the "addiu" one. Signed-off-by: Markos Chandras <markos.chandras@imgtec.com>
Diffstat (limited to 'arch/mips/include')
-rw-r--r--arch/mips/include/asm/spinlock.h7
1 files changed, 2 insertions, 5 deletions
diff --git a/arch/mips/include/asm/spinlock.h b/arch/mips/include/asm/spinlock.h
index b5238404c059..b4548690ade9 100644
--- a/arch/mips/include/asm/spinlock.h
+++ b/arch/mips/include/asm/spinlock.h
@@ -254,9 +254,6 @@ static inline void arch_read_lock(arch_rwlock_t *rw)
254 smp_llsc_mb(); 254 smp_llsc_mb();
255} 255}
256 256
257/* Note the use of sub, not subu which will make the kernel die with an
258 overflow exception if we ever try to unlock an rwlock that is already
259 unlocked or is being held by a writer. */
260static inline void arch_read_unlock(arch_rwlock_t *rw) 257static inline void arch_read_unlock(arch_rwlock_t *rw)
261{ 258{
262 unsigned int tmp; 259 unsigned int tmp;
@@ -266,7 +263,7 @@ static inline void arch_read_unlock(arch_rwlock_t *rw)
266 if (R10000_LLSC_WAR) { 263 if (R10000_LLSC_WAR) {
267 __asm__ __volatile__( 264 __asm__ __volatile__(
268 "1: ll %1, %2 # arch_read_unlock \n" 265 "1: ll %1, %2 # arch_read_unlock \n"
269 " sub %1, 1 \n" 266 " addiu %1, 1 \n"
270 " sc %1, %0 \n" 267 " sc %1, %0 \n"
271 " beqzl %1, 1b \n" 268 " beqzl %1, 1b \n"
272 : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) 269 : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp)
@@ -276,7 +273,7 @@ static inline void arch_read_unlock(arch_rwlock_t *rw)
276 do { 273 do {
277 __asm__ __volatile__( 274 __asm__ __volatile__(
278 "1: ll %1, %2 # arch_read_unlock \n" 275 "1: ll %1, %2 # arch_read_unlock \n"
279 " sub %1, 1 \n" 276 " addiu %1, -1 \n"
280 " sc %1, %0 \n" 277 " sc %1, %0 \n"
281 : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) 278 : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp)
282 : GCC_OFF_SMALL_ASM() (rw->lock) 279 : GCC_OFF_SMALL_ASM() (rw->lock)