aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-mips/spinlock.h
diff options
context:
space:
mode:
authorRalf Baechle <ralf@linux-mips.org>2007-07-14 08:24:05 -0400
committerRalf Baechle <ralf@linux-mips.org>2007-07-20 13:57:39 -0400
commit17099b1142f6c0359fca60a3464dea8fb30badea (patch)
tree26b9f3955dca84ccab594a76680c2a71e166768a /include/asm-mips/spinlock.h
parented203dadcd1373e80e95b04075e1eefc554a914b (diff)
[MIPS] Make support for weakly ordered LL/SC a config option.
None of weakly ordered processor supported in tree need this but it seems like this could change ... Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'include/asm-mips/spinlock.h')
-rw-r--r--include/asm-mips/spinlock.h18
1 files changed, 9 insertions, 9 deletions
diff --git a/include/asm-mips/spinlock.h b/include/asm-mips/spinlock.h
index 35e431cd796b..bb897016c491 100644
--- a/include/asm-mips/spinlock.h
+++ b/include/asm-mips/spinlock.h
@@ -67,7 +67,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock)
67 : "memory"); 67 : "memory");
68 } 68 }
69 69
70 smp_mb(); 70 smp_llsc_mb();
71} 71}
72 72
73static inline void __raw_spin_unlock(raw_spinlock_t *lock) 73static inline void __raw_spin_unlock(raw_spinlock_t *lock)
@@ -118,7 +118,7 @@ static inline unsigned int __raw_spin_trylock(raw_spinlock_t *lock)
118 : "memory"); 118 : "memory");
119 } 119 }
120 120
121 smp_mb(); 121 smp_llsc_mb();
122 122
123 return res == 0; 123 return res == 0;
124} 124}
@@ -183,7 +183,7 @@ static inline void __raw_read_lock(raw_rwlock_t *rw)
183 : "memory"); 183 : "memory");
184 } 184 }
185 185
186 smp_mb(); 186 smp_llsc_mb();
187} 187}
188 188
189/* Note the use of sub, not subu which will make the kernel die with an 189/* Note the use of sub, not subu which will make the kernel die with an
@@ -193,7 +193,7 @@ static inline void __raw_read_unlock(raw_rwlock_t *rw)
193{ 193{
194 unsigned int tmp; 194 unsigned int tmp;
195 195
196 smp_mb(); 196 smp_llsc_mb();
197 197
198 if (R10000_LLSC_WAR) { 198 if (R10000_LLSC_WAR) {
199 __asm__ __volatile__( 199 __asm__ __volatile__(
@@ -262,7 +262,7 @@ static inline void __raw_write_lock(raw_rwlock_t *rw)
262 : "memory"); 262 : "memory");
263 } 263 }
264 264
265 smp_mb(); 265 smp_llsc_mb();
266} 266}
267 267
268static inline void __raw_write_unlock(raw_rwlock_t *rw) 268static inline void __raw_write_unlock(raw_rwlock_t *rw)
@@ -293,7 +293,7 @@ static inline int __raw_read_trylock(raw_rwlock_t *rw)
293 " .set reorder \n" 293 " .set reorder \n"
294 " beqzl %1, 1b \n" 294 " beqzl %1, 1b \n"
295 " nop \n" 295 " nop \n"
296 __WEAK_ORDERING_MB 296 __WEAK_LLSC_MB
297 " li %2, 1 \n" 297 " li %2, 1 \n"
298 "2: \n" 298 "2: \n"
299 : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret) 299 : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
@@ -310,7 +310,7 @@ static inline int __raw_read_trylock(raw_rwlock_t *rw)
310 " beqz %1, 1b \n" 310 " beqz %1, 1b \n"
311 " nop \n" 311 " nop \n"
312 " .set reorder \n" 312 " .set reorder \n"
313 __WEAK_ORDERING_MB 313 __WEAK_LLSC_MB
314 " li %2, 1 \n" 314 " li %2, 1 \n"
315 "2: \n" 315 "2: \n"
316 : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret) 316 : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
@@ -336,7 +336,7 @@ static inline int __raw_write_trylock(raw_rwlock_t *rw)
336 " sc %1, %0 \n" 336 " sc %1, %0 \n"
337 " beqzl %1, 1b \n" 337 " beqzl %1, 1b \n"
338 " nop \n" 338 " nop \n"
339 __WEAK_ORDERING_MB 339 __WEAK_LLSC_MB
340 " li %2, 1 \n" 340 " li %2, 1 \n"
341 " .set reorder \n" 341 " .set reorder \n"
342 "2: \n" 342 "2: \n"
@@ -354,7 +354,7 @@ static inline int __raw_write_trylock(raw_rwlock_t *rw)
354 " beqz %1, 3f \n" 354 " beqz %1, 3f \n"
355 " li %2, 1 \n" 355 " li %2, 1 \n"
356 "2: \n" 356 "2: \n"
357 __WEAK_ORDERING_MB 357 __WEAK_LLSC_MB
358 " .subsection 2 \n" 358 " .subsection 2 \n"
359 "3: b 1b \n" 359 "3: b 1b \n"
360 " li %2, 0 \n" 360 " li %2, 0 \n"