diff options
author | David Daney <ddaney@caviumnetworks.com> | 2010-01-08 20:17:43 -0500 |
---|---|---|
committer | Ralf Baechle <ralf@linux-mips.org> | 2010-02-27 06:53:06 -0500 |
commit | f252ffd50c97dae87b45f1dbad24f71358ccfbd6 (patch) | |
tree | c057fc7c3a819152603b286f935fb367fc48ae73 /arch/mips/include/asm/barrier.h | |
parent | ec5380c768864c7afd92aa886dd4bb6d38497a01 (diff) |
MIPS: New macro smp_mb__before_llsc.
Replace some instances of smp_llsc_mb() with a new macro
smp_mb__before_llsc(). It is used before ll/sc sequences that are
documented as needing write barrier semantics.
The default implementation of smp_mb__before_llsc() is just smp_llsc_mb(),
so there are no changes in semantics.
Also simplify definition of smp_mb(), smp_rmb(), and smp_wmb() to be just
barrier() in the non-SMP case.
Signed-off-by: David Daney <ddaney@caviumnetworks.com>
To: linux-mips@linux-mips.org
Patchwork: http://patchwork.linux-mips.org/patch/851/
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'arch/mips/include/asm/barrier.h')
-rw-r--r-- | arch/mips/include/asm/barrier.h | 15 |
1 files changed, 9 insertions, 6 deletions
diff --git a/arch/mips/include/asm/barrier.h b/arch/mips/include/asm/barrier.h index 91785dc8e94e..1a5a51c3e96f 100644 --- a/arch/mips/include/asm/barrier.h +++ b/arch/mips/include/asm/barrier.h | |||
@@ -131,23 +131,26 @@ | |||
131 | #endif /* !CONFIG_CPU_HAS_WB */ | 131 | #endif /* !CONFIG_CPU_HAS_WB */ |
132 | 132 | ||
133 | #if defined(CONFIG_WEAK_ORDERING) && defined(CONFIG_SMP) | 133 | #if defined(CONFIG_WEAK_ORDERING) && defined(CONFIG_SMP) |
134 | #define __WEAK_ORDERING_MB " sync \n" | 134 | #define smp_mb() __asm__ __volatile__("sync" : : :"memory") |
135 | #define smp_rmb() __asm__ __volatile__("sync" : : :"memory") | ||
136 | #define smp_wmb() __asm__ __volatile__("sync" : : :"memory") | ||
135 | #else | 137 | #else |
136 | #define __WEAK_ORDERING_MB " \n" | 138 | #define smp_mb() barrier() |
139 | #define smp_rmb() barrier() | ||
140 | #define smp_wmb() barrier() | ||
137 | #endif | 141 | #endif |
142 | |||
138 | #if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP) | 143 | #if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP) |
139 | #define __WEAK_LLSC_MB " sync \n" | 144 | #define __WEAK_LLSC_MB " sync \n" |
140 | #else | 145 | #else |
141 | #define __WEAK_LLSC_MB " \n" | 146 | #define __WEAK_LLSC_MB " \n" |
142 | #endif | 147 | #endif |
143 | 148 | ||
144 | #define smp_mb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory") | ||
145 | #define smp_rmb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory") | ||
146 | #define smp_wmb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory") | ||
147 | |||
148 | #define set_mb(var, value) \ | 149 | #define set_mb(var, value) \ |
149 | do { var = value; smp_mb(); } while (0) | 150 | do { var = value; smp_mb(); } while (0) |
150 | 151 | ||
151 | #define smp_llsc_mb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory") | 152 | #define smp_llsc_mb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory") |
152 | 153 | ||
154 | #define smp_mb__before_llsc() smp_llsc_mb() | ||
155 | |||
153 | #endif /* __ASM_BARRIER_H */ | 156 | #endif /* __ASM_BARRIER_H */ |