summaryrefslogtreecommitdiffstats
path: root/arch
diff options
context:
space:
mode:
authorMichael S. Tsirkin <mst@redhat.com>2015-12-21 02:22:18 -0500
committerMichael S. Tsirkin <mst@redhat.com>2016-01-12 13:46:51 -0500
commitfa083e28f89a78b95ba8b7da86db40c13c60e95d (patch)
tree50609435543980f6e9754b72599ae8e9c1d7e089 /arch
parentabe114d9f0a80f27bc5040cd2287dca80423d13e (diff)
mips: reuse asm-generic/barrier.h
On mips dma_rmb, dma_wmb, smp_store_mb, read_barrier_depends, smp_read_barrier_depends, smp_store_release and smp_load_acquire match the asm-generic variants exactly. Drop the local definitions and pull in asm-generic/barrier.h instead. This is in preparation to refactoring this code area. Signed-off-by: Michael S. Tsirkin <mst@redhat.com> Acked-by: Arnd Bergmann <arnd@arndb.de> Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Diffstat (limited to 'arch')
-rw-r--r--arch/mips/include/asm/barrier.h25
1 files changed, 2 insertions, 23 deletions
diff --git a/arch/mips/include/asm/barrier.h b/arch/mips/include/asm/barrier.h
index 752e0b86c171..3eac4b909355 100644
--- a/arch/mips/include/asm/barrier.h
+++ b/arch/mips/include/asm/barrier.h
@@ -10,9 +10,6 @@
10 10
11#include <asm/addrspace.h> 11#include <asm/addrspace.h>
12 12
13#define read_barrier_depends() do { } while(0)
14#define smp_read_barrier_depends() do { } while(0)
15
16#ifdef CONFIG_CPU_HAS_SYNC 13#ifdef CONFIG_CPU_HAS_SYNC
17#define __sync() \ 14#define __sync() \
18 __asm__ __volatile__( \ 15 __asm__ __volatile__( \
@@ -87,8 +84,6 @@
87 84
88#define wmb() fast_wmb() 85#define wmb() fast_wmb()
89#define rmb() fast_rmb() 86#define rmb() fast_rmb()
90#define dma_wmb() fast_wmb()
91#define dma_rmb() fast_rmb()
92 87
93#if defined(CONFIG_WEAK_ORDERING) && defined(CONFIG_SMP) 88#if defined(CONFIG_WEAK_ORDERING) && defined(CONFIG_SMP)
94# ifdef CONFIG_CPU_CAVIUM_OCTEON 89# ifdef CONFIG_CPU_CAVIUM_OCTEON
@@ -112,9 +107,6 @@
112#define __WEAK_LLSC_MB " \n" 107#define __WEAK_LLSC_MB " \n"
113#endif 108#endif
114 109
115#define smp_store_mb(var, value) \
116 do { WRITE_ONCE(var, value); smp_mb(); } while (0)
117
118#define smp_llsc_mb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory") 110#define smp_llsc_mb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")
119 111
120#ifdef CONFIG_CPU_CAVIUM_OCTEON 112#ifdef CONFIG_CPU_CAVIUM_OCTEON
@@ -129,22 +121,9 @@
129#define nudge_writes() mb() 121#define nudge_writes() mb()
130#endif 122#endif
131 123
132#define smp_store_release(p, v) \
133do { \
134 compiletime_assert_atomic_type(*p); \
135 smp_mb(); \
136 WRITE_ONCE(*p, v); \
137} while (0)
138
139#define smp_load_acquire(p) \
140({ \
141 typeof(*p) ___p1 = READ_ONCE(*p); \
142 compiletime_assert_atomic_type(*p); \
143 smp_mb(); \
144 ___p1; \
145})
146
147#define smp_mb__before_atomic() smp_mb__before_llsc() 124#define smp_mb__before_atomic() smp_mb__before_llsc()
148#define smp_mb__after_atomic() smp_llsc_mb() 125#define smp_mb__after_atomic() smp_llsc_mb()
149 126
127#include <asm-generic/barrier.h>
128
150#endif /* __ASM_BARRIER_H */ 129#endif /* __ASM_BARRIER_H */