aboutsummaryrefslogtreecommitdiffstats
path: root/arch/mips/include/asm
diff options
context:
space:
mode:
authorMichael S. Tsirkin <mst@redhat.com>2015-12-27 08:04:42 -0500
committerMichael S. Tsirkin <mst@redhat.com>2016-01-12 13:46:56 -0500
commita60514bae72ee41b506b8702dfdd6eeeffe58556 (patch)
tree940c1afe50e15729a6919dc617088ba340acb66e /arch/mips/include/asm
parentafc22de0c0ca8b4697a8aec2bbb35d4cc385e7e0 (diff)
mips: define __smp_xxx
This defines __smp_xxx barriers for mips, for use by virtualization. smp_xxx barriers are removed as they are defined correctly by asm-generic/barriers.h Note: the only exception is smp_mb__before_llsc which is mips-specific. We define both the __smp_mb__before_llsc variant (for use in asm/barriers.h) and smp_mb__before_llsc (for use elsewhere on this architecture). Signed-off-by: Michael S. Tsirkin <mst@redhat.com> Acked-by: Arnd Bergmann <arnd@arndb.de> Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Diffstat (limited to 'arch/mips/include/asm')
-rw-r--r--arch/mips/include/asm/barrier.h26
1 files changed, 14 insertions, 12 deletions
diff --git a/arch/mips/include/asm/barrier.h b/arch/mips/include/asm/barrier.h
index 3eac4b909355..d296633d890e 100644
--- a/arch/mips/include/asm/barrier.h
+++ b/arch/mips/include/asm/barrier.h
@@ -85,20 +85,20 @@
85#define wmb() fast_wmb() 85#define wmb() fast_wmb()
86#define rmb() fast_rmb() 86#define rmb() fast_rmb()
87 87
88#if defined(CONFIG_WEAK_ORDERING) && defined(CONFIG_SMP) 88#if defined(CONFIG_WEAK_ORDERING)
89# ifdef CONFIG_CPU_CAVIUM_OCTEON 89# ifdef CONFIG_CPU_CAVIUM_OCTEON
90# define smp_mb() __sync() 90# define __smp_mb() __sync()
91# define smp_rmb() barrier() 91# define __smp_rmb() barrier()
92# define smp_wmb() __syncw() 92# define __smp_wmb() __syncw()
93# else 93# else
94# define smp_mb() __asm__ __volatile__("sync" : : :"memory") 94# define __smp_mb() __asm__ __volatile__("sync" : : :"memory")
95# define smp_rmb() __asm__ __volatile__("sync" : : :"memory") 95# define __smp_rmb() __asm__ __volatile__("sync" : : :"memory")
96# define smp_wmb() __asm__ __volatile__("sync" : : :"memory") 96# define __smp_wmb() __asm__ __volatile__("sync" : : :"memory")
97# endif 97# endif
98#else 98#else
99#define smp_mb() barrier() 99#define __smp_mb() barrier()
100#define smp_rmb() barrier() 100#define __smp_rmb() barrier()
101#define smp_wmb() barrier() 101#define __smp_wmb() barrier()
102#endif 102#endif
103 103
104#if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP) 104#if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP)
@@ -111,6 +111,7 @@
111 111
112#ifdef CONFIG_CPU_CAVIUM_OCTEON 112#ifdef CONFIG_CPU_CAVIUM_OCTEON
113#define smp_mb__before_llsc() smp_wmb() 113#define smp_mb__before_llsc() smp_wmb()
114#define __smp_mb__before_llsc() __smp_wmb()
114/* Cause previous writes to become visible on all CPUs as soon as possible */ 115/* Cause previous writes to become visible on all CPUs as soon as possible */
115#define nudge_writes() __asm__ __volatile__(".set push\n\t" \ 116#define nudge_writes() __asm__ __volatile__(".set push\n\t" \
116 ".set arch=octeon\n\t" \ 117 ".set arch=octeon\n\t" \
@@ -118,11 +119,12 @@
118 ".set pop" : : : "memory") 119 ".set pop" : : : "memory")
119#else 120#else
120#define smp_mb__before_llsc() smp_llsc_mb() 121#define smp_mb__before_llsc() smp_llsc_mb()
122#define __smp_mb__before_llsc() smp_llsc_mb()
121#define nudge_writes() mb() 123#define nudge_writes() mb()
122#endif 124#endif
123 125
124#define smp_mb__before_atomic() smp_mb__before_llsc() 126#define __smp_mb__before_atomic() __smp_mb__before_llsc()
125#define smp_mb__after_atomic() smp_llsc_mb() 127#define __smp_mb__after_atomic() smp_llsc_mb()
126 128
127#include <asm-generic/barrier.h> 129#include <asm-generic/barrier.h>
128 130