aboutsummaryrefslogtreecommitdiffstats
path: root/arch/mips
diff options
context:
space:
mode:
authorDavid Daney <ddaney@caviumnetworks.com>2010-01-08 20:17:43 -0500
committerRalf Baechle <ralf@linux-mips.org>2010-02-27 06:53:06 -0500
commitf252ffd50c97dae87b45f1dbad24f71358ccfbd6 (patch)
treec057fc7c3a819152603b286f935fb367fc48ae73 /arch/mips
parentec5380c768864c7afd92aa886dd4bb6d38497a01 (diff)
MIPS: New macro smp_mb__before_llsc.
Replace some instances of smp_llsc_mb() with a new macro smp_mb__before_llsc(). It is used before ll/sc sequences that are documented as needing write barrier semantics. The default implementation of smp_mb__before_llsc() is just smp_llsc_mb(), so there are no changes in semantics. Also simplify definition of smp_mb(), smp_rmb(), and smp_wmb() to be just barrier() in the non-SMP case. Signed-off-by: David Daney <ddaney@caviumnetworks.com> To: linux-mips@linux-mips.org Patchwork: http://patchwork.linux-mips.org/patch/851/ Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'arch/mips')
-rw-r--r--arch/mips/include/asm/atomic.h16
-rw-r--r--arch/mips/include/asm/barrier.h15
-rw-r--r--arch/mips/include/asm/bitops.h8
-rw-r--r--arch/mips/include/asm/cmpxchg.h10
-rw-r--r--arch/mips/include/asm/spinlock.h4
-rw-r--r--arch/mips/include/asm/system.h4
6 files changed, 32 insertions, 25 deletions
diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h
index dd75d673447e..519197ede089 100644
--- a/arch/mips/include/asm/atomic.h
+++ b/arch/mips/include/asm/atomic.h
@@ -137,7 +137,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v)
137{ 137{
138 int result; 138 int result;
139 139
140 smp_llsc_mb(); 140 smp_mb__before_llsc();
141 141
142 if (kernel_uses_llsc && R10000_LLSC_WAR) { 142 if (kernel_uses_llsc && R10000_LLSC_WAR) {
143 int temp; 143 int temp;
@@ -189,7 +189,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v)
189{ 189{
190 int result; 190 int result;
191 191
192 smp_llsc_mb(); 192 smp_mb__before_llsc();
193 193
194 if (kernel_uses_llsc && R10000_LLSC_WAR) { 194 if (kernel_uses_llsc && R10000_LLSC_WAR) {
195 int temp; 195 int temp;
@@ -249,7 +249,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
249{ 249{
250 int result; 250 int result;
251 251
252 smp_llsc_mb(); 252 smp_mb__before_llsc();
253 253
254 if (kernel_uses_llsc && R10000_LLSC_WAR) { 254 if (kernel_uses_llsc && R10000_LLSC_WAR) {
255 int temp; 255 int temp;
@@ -516,7 +516,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v)
516{ 516{
517 long result; 517 long result;
518 518
519 smp_llsc_mb(); 519 smp_mb__before_llsc();
520 520
521 if (kernel_uses_llsc && R10000_LLSC_WAR) { 521 if (kernel_uses_llsc && R10000_LLSC_WAR) {
522 long temp; 522 long temp;
@@ -568,7 +568,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
568{ 568{
569 long result; 569 long result;
570 570
571 smp_llsc_mb(); 571 smp_mb__before_llsc();
572 572
573 if (kernel_uses_llsc && R10000_LLSC_WAR) { 573 if (kernel_uses_llsc && R10000_LLSC_WAR) {
574 long temp; 574 long temp;
@@ -628,7 +628,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
628{ 628{
629 long result; 629 long result;
630 630
631 smp_llsc_mb(); 631 smp_mb__before_llsc();
632 632
633 if (kernel_uses_llsc && R10000_LLSC_WAR) { 633 if (kernel_uses_llsc && R10000_LLSC_WAR) {
634 long temp; 634 long temp;
@@ -788,9 +788,9 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
788 * atomic*_return operations are serializing but not the non-*_return 788 * atomic*_return operations are serializing but not the non-*_return
789 * versions. 789 * versions.
790 */ 790 */
791#define smp_mb__before_atomic_dec() smp_llsc_mb() 791#define smp_mb__before_atomic_dec() smp_mb__before_llsc()
792#define smp_mb__after_atomic_dec() smp_llsc_mb() 792#define smp_mb__after_atomic_dec() smp_llsc_mb()
793#define smp_mb__before_atomic_inc() smp_llsc_mb() 793#define smp_mb__before_atomic_inc() smp_mb__before_llsc()
794#define smp_mb__after_atomic_inc() smp_llsc_mb() 794#define smp_mb__after_atomic_inc() smp_llsc_mb()
795 795
796#include <asm-generic/atomic-long.h> 796#include <asm-generic/atomic-long.h>
diff --git a/arch/mips/include/asm/barrier.h b/arch/mips/include/asm/barrier.h
index 91785dc8e94e..1a5a51c3e96f 100644
--- a/arch/mips/include/asm/barrier.h
+++ b/arch/mips/include/asm/barrier.h
@@ -131,23 +131,26 @@
131#endif /* !CONFIG_CPU_HAS_WB */ 131#endif /* !CONFIG_CPU_HAS_WB */
132 132
133#if defined(CONFIG_WEAK_ORDERING) && defined(CONFIG_SMP) 133#if defined(CONFIG_WEAK_ORDERING) && defined(CONFIG_SMP)
134#define __WEAK_ORDERING_MB " sync \n" 134#define smp_mb() __asm__ __volatile__("sync" : : :"memory")
135#define smp_rmb() __asm__ __volatile__("sync" : : :"memory")
136#define smp_wmb() __asm__ __volatile__("sync" : : :"memory")
135#else 137#else
136#define __WEAK_ORDERING_MB " \n" 138#define smp_mb() barrier()
139#define smp_rmb() barrier()
140#define smp_wmb() barrier()
137#endif 141#endif
142
138#if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP) 143#if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP)
139#define __WEAK_LLSC_MB " sync \n" 144#define __WEAK_LLSC_MB " sync \n"
140#else 145#else
141#define __WEAK_LLSC_MB " \n" 146#define __WEAK_LLSC_MB " \n"
142#endif 147#endif
143 148
144#define smp_mb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
145#define smp_rmb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
146#define smp_wmb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
147
148#define set_mb(var, value) \ 149#define set_mb(var, value) \
149 do { var = value; smp_mb(); } while (0) 150 do { var = value; smp_mb(); } while (0)
150 151
151#define smp_llsc_mb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory") 152#define smp_llsc_mb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")
152 153
154#define smp_mb__before_llsc() smp_llsc_mb()
155
153#endif /* __ASM_BARRIER_H */ 156#endif /* __ASM_BARRIER_H */
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h
index 84a383806b2c..9255cfbee459 100644
--- a/arch/mips/include/asm/bitops.h
+++ b/arch/mips/include/asm/bitops.h
@@ -42,7 +42,7 @@
42/* 42/*
43 * clear_bit() doesn't provide any barrier for the compiler. 43 * clear_bit() doesn't provide any barrier for the compiler.
44 */ 44 */
45#define smp_mb__before_clear_bit() smp_llsc_mb() 45#define smp_mb__before_clear_bit() smp_mb__before_llsc()
46#define smp_mb__after_clear_bit() smp_llsc_mb() 46#define smp_mb__after_clear_bit() smp_llsc_mb()
47 47
48/* 48/*
@@ -258,7 +258,7 @@ static inline int test_and_set_bit(unsigned long nr,
258 unsigned short bit = nr & SZLONG_MASK; 258 unsigned short bit = nr & SZLONG_MASK;
259 unsigned long res; 259 unsigned long res;
260 260
261 smp_llsc_mb(); 261 smp_mb__before_llsc();
262 262
263 if (kernel_uses_llsc && R10000_LLSC_WAR) { 263 if (kernel_uses_llsc && R10000_LLSC_WAR) {
264 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 264 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
@@ -395,7 +395,7 @@ static inline int test_and_clear_bit(unsigned long nr,
395 unsigned short bit = nr & SZLONG_MASK; 395 unsigned short bit = nr & SZLONG_MASK;
396 unsigned long res; 396 unsigned long res;
397 397
398 smp_llsc_mb(); 398 smp_mb__before_llsc();
399 399
400 if (kernel_uses_llsc && R10000_LLSC_WAR) { 400 if (kernel_uses_llsc && R10000_LLSC_WAR) {
401 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 401 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
@@ -485,7 +485,7 @@ static inline int test_and_change_bit(unsigned long nr,
485 unsigned short bit = nr & SZLONG_MASK; 485 unsigned short bit = nr & SZLONG_MASK;
486 unsigned long res; 486 unsigned long res;
487 487
488 smp_llsc_mb(); 488 smp_mb__before_llsc();
489 489
490 if (kernel_uses_llsc && R10000_LLSC_WAR) { 490 if (kernel_uses_llsc && R10000_LLSC_WAR) {
491 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 491 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
diff --git a/arch/mips/include/asm/cmpxchg.h b/arch/mips/include/asm/cmpxchg.h
index 815a438a268d..ed9aaaaf0749 100644
--- a/arch/mips/include/asm/cmpxchg.h
+++ b/arch/mips/include/asm/cmpxchg.h
@@ -72,14 +72,14 @@
72 */ 72 */
73extern void __cmpxchg_called_with_bad_pointer(void); 73extern void __cmpxchg_called_with_bad_pointer(void);
74 74
75#define __cmpxchg(ptr, old, new, barrier) \ 75#define __cmpxchg(ptr, old, new, pre_barrier, post_barrier) \
76({ \ 76({ \
77 __typeof__(ptr) __ptr = (ptr); \ 77 __typeof__(ptr) __ptr = (ptr); \
78 __typeof__(*(ptr)) __old = (old); \ 78 __typeof__(*(ptr)) __old = (old); \
79 __typeof__(*(ptr)) __new = (new); \ 79 __typeof__(*(ptr)) __new = (new); \
80 __typeof__(*(ptr)) __res = 0; \ 80 __typeof__(*(ptr)) __res = 0; \
81 \ 81 \
82 barrier; \ 82 pre_barrier; \
83 \ 83 \
84 switch (sizeof(*(__ptr))) { \ 84 switch (sizeof(*(__ptr))) { \
85 case 4: \ 85 case 4: \
@@ -96,13 +96,13 @@ extern void __cmpxchg_called_with_bad_pointer(void);
96 break; \ 96 break; \
97 } \ 97 } \
98 \ 98 \
99 barrier; \ 99 post_barrier; \
100 \ 100 \
101 __res; \ 101 __res; \
102}) 102})
103 103
104#define cmpxchg(ptr, old, new) __cmpxchg(ptr, old, new, smp_llsc_mb()) 104#define cmpxchg(ptr, old, new) __cmpxchg(ptr, old, new, smp_mb__before_llsc(), smp_llsc_mb())
105#define cmpxchg_local(ptr, old, new) __cmpxchg(ptr, old, new, ) 105#define cmpxchg_local(ptr, old, new) __cmpxchg(ptr, old, new, , )
106 106
107#define cmpxchg64(ptr, o, n) \ 107#define cmpxchg64(ptr, o, n) \
108 ({ \ 108 ({ \
diff --git a/arch/mips/include/asm/spinlock.h b/arch/mips/include/asm/spinlock.h
index 21ef9efbde43..5f16696eaa00 100644
--- a/arch/mips/include/asm/spinlock.h
+++ b/arch/mips/include/asm/spinlock.h
@@ -138,7 +138,7 @@ static inline void arch_spin_unlock(arch_spinlock_t *lock)
138{ 138{
139 int tmp; 139 int tmp;
140 140
141 smp_llsc_mb(); 141 smp_mb__before_llsc();
142 142
143 if (R10000_LLSC_WAR) { 143 if (R10000_LLSC_WAR) {
144 __asm__ __volatile__ ( 144 __asm__ __volatile__ (
@@ -305,7 +305,7 @@ static inline void arch_read_unlock(arch_rwlock_t *rw)
305{ 305{
306 unsigned int tmp; 306 unsigned int tmp;
307 307
308 smp_llsc_mb(); 308 smp_mb__before_llsc();
309 309
310 if (R10000_LLSC_WAR) { 310 if (R10000_LLSC_WAR) {
311 __asm__ __volatile__( 311 __asm__ __volatile__(
diff --git a/arch/mips/include/asm/system.h b/arch/mips/include/asm/system.h
index 83b5509e09e8..bb937ccfba1e 100644
--- a/arch/mips/include/asm/system.h
+++ b/arch/mips/include/asm/system.h
@@ -95,6 +95,8 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
95{ 95{
96 __u32 retval; 96 __u32 retval;
97 97
98 smp_mb__before_llsc();
99
98 if (kernel_uses_llsc && R10000_LLSC_WAR) { 100 if (kernel_uses_llsc && R10000_LLSC_WAR) {
99 unsigned long dummy; 101 unsigned long dummy;
100 102
@@ -147,6 +149,8 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
147{ 149{
148 __u64 retval; 150 __u64 retval;
149 151
152 smp_mb__before_llsc();
153
150 if (kernel_uses_llsc && R10000_LLSC_WAR) { 154 if (kernel_uses_llsc && R10000_LLSC_WAR) {
151 unsigned long dummy; 155 unsigned long dummy;
152 156