aboutsummaryrefslogtreecommitdiffstats
path: root/arch/mips/include/asm/bitops.h
diff options
context:
space:
mode:
authorDavid Daney <ddaney@caviumnetworks.com>2010-01-08 20:17:43 -0500
committerRalf Baechle <ralf@linux-mips.org>2010-02-27 06:53:06 -0500
commitf252ffd50c97dae87b45f1dbad24f71358ccfbd6 (patch)
treec057fc7c3a819152603b286f935fb367fc48ae73 /arch/mips/include/asm/bitops.h
parentec5380c768864c7afd92aa886dd4bb6d38497a01 (diff)
MIPS: New macro smp_mb__before_llsc.
Replace some instances of smp_llsc_mb() with a new macro smp_mb__before_llsc(). It is used before ll/sc sequences that are documented as needing write barrier semantics. The default implementation of smp_mb__before_llsc() is just smp_llsc_mb(), so there are no changes in semantics. Also simplify definition of smp_mb(), smp_rmb(), and smp_wmb() to be just barrier() in the non-SMP case. Signed-off-by: David Daney <ddaney@caviumnetworks.com> To: linux-mips@linux-mips.org Patchwork: http://patchwork.linux-mips.org/patch/851/ Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'arch/mips/include/asm/bitops.h')
-rw-r--r--arch/mips/include/asm/bitops.h8
1 files changed, 4 insertions, 4 deletions
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h
index 84a383806b2c..9255cfbee459 100644
--- a/arch/mips/include/asm/bitops.h
+++ b/arch/mips/include/asm/bitops.h
@@ -42,7 +42,7 @@
42/* 42/*
43 * clear_bit() doesn't provide any barrier for the compiler. 43 * clear_bit() doesn't provide any barrier for the compiler.
44 */ 44 */
45#define smp_mb__before_clear_bit() smp_llsc_mb() 45#define smp_mb__before_clear_bit() smp_mb__before_llsc()
46#define smp_mb__after_clear_bit() smp_llsc_mb() 46#define smp_mb__after_clear_bit() smp_llsc_mb()
47 47
48/* 48/*
@@ -258,7 +258,7 @@ static inline int test_and_set_bit(unsigned long nr,
258 unsigned short bit = nr & SZLONG_MASK; 258 unsigned short bit = nr & SZLONG_MASK;
259 unsigned long res; 259 unsigned long res;
260 260
261 smp_llsc_mb(); 261 smp_mb__before_llsc();
262 262
263 if (kernel_uses_llsc && R10000_LLSC_WAR) { 263 if (kernel_uses_llsc && R10000_LLSC_WAR) {
264 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 264 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
@@ -395,7 +395,7 @@ static inline int test_and_clear_bit(unsigned long nr,
395 unsigned short bit = nr & SZLONG_MASK; 395 unsigned short bit = nr & SZLONG_MASK;
396 unsigned long res; 396 unsigned long res;
397 397
398 smp_llsc_mb(); 398 smp_mb__before_llsc();
399 399
400 if (kernel_uses_llsc && R10000_LLSC_WAR) { 400 if (kernel_uses_llsc && R10000_LLSC_WAR) {
401 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 401 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
@@ -485,7 +485,7 @@ static inline int test_and_change_bit(unsigned long nr,
485 unsigned short bit = nr & SZLONG_MASK; 485 unsigned short bit = nr & SZLONG_MASK;
486 unsigned long res; 486 unsigned long res;
487 487
488 smp_llsc_mb(); 488 smp_mb__before_llsc();
489 489
490 if (kernel_uses_llsc && R10000_LLSC_WAR) { 490 if (kernel_uses_llsc && R10000_LLSC_WAR) {
491 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 491 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);