diff options
author | Boqun Feng <boqun.feng@gmail.com> | 2015-11-01 20:30:32 -0500 |
---|---|---|
committer | Michael Ellerman <mpe@ellerman.id.au> | 2015-12-14 04:39:01 -0500 |
commit | 81d7a3294de7e9828310bbf986a67246b13fa01e (patch) | |
tree | 5bf300937eb52355f7719898e5ab00e2002a6525 | |
parent | 49e9cf3f0c04bf76ffa59242254110309554861d (diff) |
powerpc: Make {cmp}xchg* and their atomic_ versions fully ordered
According to memory-barriers.txt, xchg*, cmpxchg* and their atomic_
versions all need to be fully ordered, however they are now just
RELEASE+ACQUIRE, which are not fully ordered.
So also replace PPC_RELEASE_BARRIER and PPC_ACQUIRE_BARRIER with
PPC_ATOMIC_ENTRY_BARRIER and PPC_ATOMIC_EXIT_BARRIER in
__{cmp,}xchg_{u32,u64} respectively to guarantee fully ordered semantics
of atomic{,64}_{cmp,}xchg() and {cmp,}xchg(), as a complement of commit
b97021f85517 ("powerpc: Fix atomic_xxx_return barrier semantics")
This patch depends on patch "powerpc: Make value-returning atomics fully
ordered" for PPC_ATOMIC_ENTRY_BARRIER definition.
Cc: stable@vger.kernel.org # 3.2+
Signed-off-by: Boqun Feng <boqun.feng@gmail.com>
Reviewed-by: Paul E. McKenney <paulmck@linux.vnet.ibm.com>
Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
-rw-r--r-- | arch/powerpc/include/asm/cmpxchg.h | 16 |
1 files changed, 8 insertions, 8 deletions
diff --git a/arch/powerpc/include/asm/cmpxchg.h b/arch/powerpc/include/asm/cmpxchg.h index ad6263cffb0f..d1a8d93cccfd 100644 --- a/arch/powerpc/include/asm/cmpxchg.h +++ b/arch/powerpc/include/asm/cmpxchg.h | |||
@@ -18,12 +18,12 @@ __xchg_u32(volatile void *p, unsigned long val) | |||
18 | unsigned long prev; | 18 | unsigned long prev; |
19 | 19 | ||
20 | __asm__ __volatile__( | 20 | __asm__ __volatile__( |
21 | PPC_RELEASE_BARRIER | 21 | PPC_ATOMIC_ENTRY_BARRIER |
22 | "1: lwarx %0,0,%2 \n" | 22 | "1: lwarx %0,0,%2 \n" |
23 | PPC405_ERR77(0,%2) | 23 | PPC405_ERR77(0,%2) |
24 | " stwcx. %3,0,%2 \n\ | 24 | " stwcx. %3,0,%2 \n\ |
25 | bne- 1b" | 25 | bne- 1b" |
26 | PPC_ACQUIRE_BARRIER | 26 | PPC_ATOMIC_EXIT_BARRIER |
27 | : "=&r" (prev), "+m" (*(volatile unsigned int *)p) | 27 | : "=&r" (prev), "+m" (*(volatile unsigned int *)p) |
28 | : "r" (p), "r" (val) | 28 | : "r" (p), "r" (val) |
29 | : "cc", "memory"); | 29 | : "cc", "memory"); |
@@ -61,12 +61,12 @@ __xchg_u64(volatile void *p, unsigned long val) | |||
61 | unsigned long prev; | 61 | unsigned long prev; |
62 | 62 | ||
63 | __asm__ __volatile__( | 63 | __asm__ __volatile__( |
64 | PPC_RELEASE_BARRIER | 64 | PPC_ATOMIC_ENTRY_BARRIER |
65 | "1: ldarx %0,0,%2 \n" | 65 | "1: ldarx %0,0,%2 \n" |
66 | PPC405_ERR77(0,%2) | 66 | PPC405_ERR77(0,%2) |
67 | " stdcx. %3,0,%2 \n\ | 67 | " stdcx. %3,0,%2 \n\ |
68 | bne- 1b" | 68 | bne- 1b" |
69 | PPC_ACQUIRE_BARRIER | 69 | PPC_ATOMIC_EXIT_BARRIER |
70 | : "=&r" (prev), "+m" (*(volatile unsigned long *)p) | 70 | : "=&r" (prev), "+m" (*(volatile unsigned long *)p) |
71 | : "r" (p), "r" (val) | 71 | : "r" (p), "r" (val) |
72 | : "cc", "memory"); | 72 | : "cc", "memory"); |
@@ -151,14 +151,14 @@ __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new) | |||
151 | unsigned int prev; | 151 | unsigned int prev; |
152 | 152 | ||
153 | __asm__ __volatile__ ( | 153 | __asm__ __volatile__ ( |
154 | PPC_RELEASE_BARRIER | 154 | PPC_ATOMIC_ENTRY_BARRIER |
155 | "1: lwarx %0,0,%2 # __cmpxchg_u32\n\ | 155 | "1: lwarx %0,0,%2 # __cmpxchg_u32\n\ |
156 | cmpw 0,%0,%3\n\ | 156 | cmpw 0,%0,%3\n\ |
157 | bne- 2f\n" | 157 | bne- 2f\n" |
158 | PPC405_ERR77(0,%2) | 158 | PPC405_ERR77(0,%2) |
159 | " stwcx. %4,0,%2\n\ | 159 | " stwcx. %4,0,%2\n\ |
160 | bne- 1b" | 160 | bne- 1b" |
161 | PPC_ACQUIRE_BARRIER | 161 | PPC_ATOMIC_EXIT_BARRIER |
162 | "\n\ | 162 | "\n\ |
163 | 2:" | 163 | 2:" |
164 | : "=&r" (prev), "+m" (*p) | 164 | : "=&r" (prev), "+m" (*p) |
@@ -197,13 +197,13 @@ __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new) | |||
197 | unsigned long prev; | 197 | unsigned long prev; |
198 | 198 | ||
199 | __asm__ __volatile__ ( | 199 | __asm__ __volatile__ ( |
200 | PPC_RELEASE_BARRIER | 200 | PPC_ATOMIC_ENTRY_BARRIER |
201 | "1: ldarx %0,0,%2 # __cmpxchg_u64\n\ | 201 | "1: ldarx %0,0,%2 # __cmpxchg_u64\n\ |
202 | cmpd 0,%0,%3\n\ | 202 | cmpd 0,%0,%3\n\ |
203 | bne- 2f\n\ | 203 | bne- 2f\n\ |
204 | stdcx. %4,0,%2\n\ | 204 | stdcx. %4,0,%2\n\ |
205 | bne- 1b" | 205 | bne- 1b" |
206 | PPC_ACQUIRE_BARRIER | 206 | PPC_ATOMIC_EXIT_BARRIER |
207 | "\n\ | 207 | "\n\ |
208 | 2:" | 208 | 2:" |
209 | : "=&r" (prev), "+m" (*p) | 209 | : "=&r" (prev), "+m" (*p) |