aboutsummaryrefslogtreecommitdiffstats
path: root/arch/mips/include/asm/atomic.h
diff options
context:
space:
mode:
authorDavid Daney <ddaney@caviumnetworks.com>2010-01-08 20:17:43 -0500
committerRalf Baechle <ralf@linux-mips.org>2010-02-27 06:53:06 -0500
commitf252ffd50c97dae87b45f1dbad24f71358ccfbd6 (patch)
treec057fc7c3a819152603b286f935fb367fc48ae73 /arch/mips/include/asm/atomic.h
parentec5380c768864c7afd92aa886dd4bb6d38497a01 (diff)
MIPS: New macro smp_mb__before_llsc.
Replace some instances of smp_llsc_mb() with a new macro smp_mb__before_llsc(). It is used before ll/sc sequences that are documented as needing write barrier semantics. The default implementation of smp_mb__before_llsc() is just smp_llsc_mb(), so there are no changes in semantics. Also simplify definition of smp_mb(), smp_rmb(), and smp_wmb() to be just barrier() in the non-SMP case. Signed-off-by: David Daney <ddaney@caviumnetworks.com> To: linux-mips@linux-mips.org Patchwork: http://patchwork.linux-mips.org/patch/851/ Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'arch/mips/include/asm/atomic.h')
-rw-r--r--arch/mips/include/asm/atomic.h16
1 files changed, 8 insertions, 8 deletions
diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h
index dd75d673447e..519197ede089 100644
--- a/arch/mips/include/asm/atomic.h
+++ b/arch/mips/include/asm/atomic.h
@@ -137,7 +137,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v)
137{ 137{
138 int result; 138 int result;
139 139
140 smp_llsc_mb(); 140 smp_mb__before_llsc();
141 141
142 if (kernel_uses_llsc && R10000_LLSC_WAR) { 142 if (kernel_uses_llsc && R10000_LLSC_WAR) {
143 int temp; 143 int temp;
@@ -189,7 +189,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v)
189{ 189{
190 int result; 190 int result;
191 191
192 smp_llsc_mb(); 192 smp_mb__before_llsc();
193 193
194 if (kernel_uses_llsc && R10000_LLSC_WAR) { 194 if (kernel_uses_llsc && R10000_LLSC_WAR) {
195 int temp; 195 int temp;
@@ -249,7 +249,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
249{ 249{
250 int result; 250 int result;
251 251
252 smp_llsc_mb(); 252 smp_mb__before_llsc();
253 253
254 if (kernel_uses_llsc && R10000_LLSC_WAR) { 254 if (kernel_uses_llsc && R10000_LLSC_WAR) {
255 int temp; 255 int temp;
@@ -516,7 +516,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v)
516{ 516{
517 long result; 517 long result;
518 518
519 smp_llsc_mb(); 519 smp_mb__before_llsc();
520 520
521 if (kernel_uses_llsc && R10000_LLSC_WAR) { 521 if (kernel_uses_llsc && R10000_LLSC_WAR) {
522 long temp; 522 long temp;
@@ -568,7 +568,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
568{ 568{
569 long result; 569 long result;
570 570
571 smp_llsc_mb(); 571 smp_mb__before_llsc();
572 572
573 if (kernel_uses_llsc && R10000_LLSC_WAR) { 573 if (kernel_uses_llsc && R10000_LLSC_WAR) {
574 long temp; 574 long temp;
@@ -628,7 +628,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
628{ 628{
629 long result; 629 long result;
630 630
631 smp_llsc_mb(); 631 smp_mb__before_llsc();
632 632
633 if (kernel_uses_llsc && R10000_LLSC_WAR) { 633 if (kernel_uses_llsc && R10000_LLSC_WAR) {
634 long temp; 634 long temp;
@@ -788,9 +788,9 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
788 * atomic*_return operations are serializing but not the non-*_return 788 * atomic*_return operations are serializing but not the non-*_return
789 * versions. 789 * versions.
790 */ 790 */
791#define smp_mb__before_atomic_dec() smp_llsc_mb() 791#define smp_mb__before_atomic_dec() smp_mb__before_llsc()
792#define smp_mb__after_atomic_dec() smp_llsc_mb() 792#define smp_mb__after_atomic_dec() smp_llsc_mb()
793#define smp_mb__before_atomic_inc() smp_llsc_mb() 793#define smp_mb__before_atomic_inc() smp_mb__before_llsc()
794#define smp_mb__after_atomic_inc() smp_llsc_mb() 794#define smp_mb__after_atomic_inc() smp_llsc_mb()
795 795
796#include <asm-generic/atomic-long.h> 796#include <asm-generic/atomic-long.h>