aboutsummaryrefslogtreecommitdiffstats
path: root/arch/powerpc/include/asm
diff options
context:
space:
mode:
authorAnton Blanchard <anton@samba.org>2010-02-09 20:04:06 -0500
committerBenjamin Herrenschmidt <benh@kernel.crashing.org>2010-02-16 22:03:15 -0500
commitf10e2e5b4b4c9937de596f96ffe028be3a565598 (patch)
tree89808d05159ac5bd4aeea53b52dc0ddb3373fe65 /arch/powerpc/include/asm
parent66d99b883419b8df6d0a24ca957da7ab4831cf6e (diff)
powerpc: Rename LWSYNC_ON_SMP to PPC_RELEASE_BARRIER, ISYNC_ON_SMP to PPC_ACQUIRE_BARRIER
For performance reasons we are about to change ISYNC_ON_SMP to sometimes be lwsync. Now that the macro name doesn't make sense, change it and LWSYNC_ON_SMP to better explain what the barriers are doing. Signed-off-by: Anton Blanchard <anton@samba.org> Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
Diffstat (limited to 'arch/powerpc/include/asm')
-rw-r--r--arch/powerpc/include/asm/atomic.h48
-rw-r--r--arch/powerpc/include/asm/bitops.h16
-rw-r--r--arch/powerpc/include/asm/futex.h6
-rw-r--r--arch/powerpc/include/asm/mutex.h6
-rw-r--r--arch/powerpc/include/asm/spinlock.h25
-rw-r--r--arch/powerpc/include/asm/synch.h8
-rw-r--r--arch/powerpc/include/asm/system.h16
7 files changed, 65 insertions, 60 deletions
diff --git a/arch/powerpc/include/asm/atomic.h b/arch/powerpc/include/asm/atomic.h
index 4012483b1899..b8f152ece025 100644
--- a/arch/powerpc/include/asm/atomic.h
+++ b/arch/powerpc/include/asm/atomic.h
@@ -49,13 +49,13 @@ static __inline__ int atomic_add_return(int a, atomic_t *v)
49 int t; 49 int t;
50 50
51 __asm__ __volatile__( 51 __asm__ __volatile__(
52 LWSYNC_ON_SMP 52 PPC_RELEASE_BARRIER
53"1: lwarx %0,0,%2 # atomic_add_return\n\ 53"1: lwarx %0,0,%2 # atomic_add_return\n\
54 add %0,%1,%0\n" 54 add %0,%1,%0\n"
55 PPC405_ERR77(0,%2) 55 PPC405_ERR77(0,%2)
56" stwcx. %0,0,%2 \n\ 56" stwcx. %0,0,%2 \n\
57 bne- 1b" 57 bne- 1b"
58 ISYNC_ON_SMP 58 PPC_ACQUIRE_BARRIER
59 : "=&r" (t) 59 : "=&r" (t)
60 : "r" (a), "r" (&v->counter) 60 : "r" (a), "r" (&v->counter)
61 : "cc", "memory"); 61 : "cc", "memory");
@@ -85,13 +85,13 @@ static __inline__ int atomic_sub_return(int a, atomic_t *v)
85 int t; 85 int t;
86 86
87 __asm__ __volatile__( 87 __asm__ __volatile__(
88 LWSYNC_ON_SMP 88 PPC_RELEASE_BARRIER
89"1: lwarx %0,0,%2 # atomic_sub_return\n\ 89"1: lwarx %0,0,%2 # atomic_sub_return\n\
90 subf %0,%1,%0\n" 90 subf %0,%1,%0\n"
91 PPC405_ERR77(0,%2) 91 PPC405_ERR77(0,%2)
92" stwcx. %0,0,%2 \n\ 92" stwcx. %0,0,%2 \n\
93 bne- 1b" 93 bne- 1b"
94 ISYNC_ON_SMP 94 PPC_ACQUIRE_BARRIER
95 : "=&r" (t) 95 : "=&r" (t)
96 : "r" (a), "r" (&v->counter) 96 : "r" (a), "r" (&v->counter)
97 : "cc", "memory"); 97 : "cc", "memory");
@@ -119,13 +119,13 @@ static __inline__ int atomic_inc_return(atomic_t *v)
119 int t; 119 int t;
120 120
121 __asm__ __volatile__( 121 __asm__ __volatile__(
122 LWSYNC_ON_SMP 122 PPC_RELEASE_BARRIER
123"1: lwarx %0,0,%1 # atomic_inc_return\n\ 123"1: lwarx %0,0,%1 # atomic_inc_return\n\
124 addic %0,%0,1\n" 124 addic %0,%0,1\n"
125 PPC405_ERR77(0,%1) 125 PPC405_ERR77(0,%1)
126" stwcx. %0,0,%1 \n\ 126" stwcx. %0,0,%1 \n\
127 bne- 1b" 127 bne- 1b"
128 ISYNC_ON_SMP 128 PPC_ACQUIRE_BARRIER
129 : "=&r" (t) 129 : "=&r" (t)
130 : "r" (&v->counter) 130 : "r" (&v->counter)
131 : "cc", "xer", "memory"); 131 : "cc", "xer", "memory");
@@ -163,13 +163,13 @@ static __inline__ int atomic_dec_return(atomic_t *v)
163 int t; 163 int t;
164 164
165 __asm__ __volatile__( 165 __asm__ __volatile__(
166 LWSYNC_ON_SMP 166 PPC_RELEASE_BARRIER
167"1: lwarx %0,0,%1 # atomic_dec_return\n\ 167"1: lwarx %0,0,%1 # atomic_dec_return\n\
168 addic %0,%0,-1\n" 168 addic %0,%0,-1\n"
169 PPC405_ERR77(0,%1) 169 PPC405_ERR77(0,%1)
170" stwcx. %0,0,%1\n\ 170" stwcx. %0,0,%1\n\
171 bne- 1b" 171 bne- 1b"
172 ISYNC_ON_SMP 172 PPC_ACQUIRE_BARRIER
173 : "=&r" (t) 173 : "=&r" (t)
174 : "r" (&v->counter) 174 : "r" (&v->counter)
175 : "cc", "xer", "memory"); 175 : "cc", "xer", "memory");
@@ -194,7 +194,7 @@ static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
194 int t; 194 int t;
195 195
196 __asm__ __volatile__ ( 196 __asm__ __volatile__ (
197 LWSYNC_ON_SMP 197 PPC_RELEASE_BARRIER
198"1: lwarx %0,0,%1 # atomic_add_unless\n\ 198"1: lwarx %0,0,%1 # atomic_add_unless\n\
199 cmpw 0,%0,%3 \n\ 199 cmpw 0,%0,%3 \n\
200 beq- 2f \n\ 200 beq- 2f \n\
@@ -202,7 +202,7 @@ static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
202 PPC405_ERR77(0,%2) 202 PPC405_ERR77(0,%2)
203" stwcx. %0,0,%1 \n\ 203" stwcx. %0,0,%1 \n\
204 bne- 1b \n" 204 bne- 1b \n"
205 ISYNC_ON_SMP 205 PPC_ACQUIRE_BARRIER
206" subf %0,%2,%0 \n\ 206" subf %0,%2,%0 \n\
2072:" 2072:"
208 : "=&r" (t) 208 : "=&r" (t)
@@ -227,7 +227,7 @@ static __inline__ int atomic_dec_if_positive(atomic_t *v)
227 int t; 227 int t;
228 228
229 __asm__ __volatile__( 229 __asm__ __volatile__(
230 LWSYNC_ON_SMP 230 PPC_RELEASE_BARRIER
231"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\ 231"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
232 cmpwi %0,1\n\ 232 cmpwi %0,1\n\
233 addi %0,%0,-1\n\ 233 addi %0,%0,-1\n\
@@ -235,7 +235,7 @@ static __inline__ int atomic_dec_if_positive(atomic_t *v)
235 PPC405_ERR77(0,%1) 235 PPC405_ERR77(0,%1)
236" stwcx. %0,0,%1\n\ 236" stwcx. %0,0,%1\n\
237 bne- 1b" 237 bne- 1b"
238 ISYNC_ON_SMP 238 PPC_ACQUIRE_BARRIER
239 "\n\ 239 "\n\
2402:" : "=&b" (t) 2402:" : "=&b" (t)
241 : "r" (&v->counter) 241 : "r" (&v->counter)
@@ -286,12 +286,12 @@ static __inline__ long atomic64_add_return(long a, atomic64_t *v)
286 long t; 286 long t;
287 287
288 __asm__ __volatile__( 288 __asm__ __volatile__(
289 LWSYNC_ON_SMP 289 PPC_RELEASE_BARRIER
290"1: ldarx %0,0,%2 # atomic64_add_return\n\ 290"1: ldarx %0,0,%2 # atomic64_add_return\n\
291 add %0,%1,%0\n\ 291 add %0,%1,%0\n\
292 stdcx. %0,0,%2 \n\ 292 stdcx. %0,0,%2 \n\
293 bne- 1b" 293 bne- 1b"
294 ISYNC_ON_SMP 294 PPC_ACQUIRE_BARRIER
295 : "=&r" (t) 295 : "=&r" (t)
296 : "r" (a), "r" (&v->counter) 296 : "r" (a), "r" (&v->counter)
297 : "cc", "memory"); 297 : "cc", "memory");
@@ -320,12 +320,12 @@ static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
320 long t; 320 long t;
321 321
322 __asm__ __volatile__( 322 __asm__ __volatile__(
323 LWSYNC_ON_SMP 323 PPC_RELEASE_BARRIER
324"1: ldarx %0,0,%2 # atomic64_sub_return\n\ 324"1: ldarx %0,0,%2 # atomic64_sub_return\n\
325 subf %0,%1,%0\n\ 325 subf %0,%1,%0\n\
326 stdcx. %0,0,%2 \n\ 326 stdcx. %0,0,%2 \n\
327 bne- 1b" 327 bne- 1b"
328 ISYNC_ON_SMP 328 PPC_ACQUIRE_BARRIER
329 : "=&r" (t) 329 : "=&r" (t)
330 : "r" (a), "r" (&v->counter) 330 : "r" (a), "r" (&v->counter)
331 : "cc", "memory"); 331 : "cc", "memory");
@@ -352,12 +352,12 @@ static __inline__ long atomic64_inc_return(atomic64_t *v)
352 long t; 352 long t;
353 353
354 __asm__ __volatile__( 354 __asm__ __volatile__(
355 LWSYNC_ON_SMP 355 PPC_RELEASE_BARRIER
356"1: ldarx %0,0,%1 # atomic64_inc_return\n\ 356"1: ldarx %0,0,%1 # atomic64_inc_return\n\
357 addic %0,%0,1\n\ 357 addic %0,%0,1\n\
358 stdcx. %0,0,%1 \n\ 358 stdcx. %0,0,%1 \n\
359 bne- 1b" 359 bne- 1b"
360 ISYNC_ON_SMP 360 PPC_ACQUIRE_BARRIER
361 : "=&r" (t) 361 : "=&r" (t)
362 : "r" (&v->counter) 362 : "r" (&v->counter)
363 : "cc", "xer", "memory"); 363 : "cc", "xer", "memory");
@@ -394,12 +394,12 @@ static __inline__ long atomic64_dec_return(atomic64_t *v)
394 long t; 394 long t;
395 395
396 __asm__ __volatile__( 396 __asm__ __volatile__(
397 LWSYNC_ON_SMP 397 PPC_RELEASE_BARRIER
398"1: ldarx %0,0,%1 # atomic64_dec_return\n\ 398"1: ldarx %0,0,%1 # atomic64_dec_return\n\
399 addic %0,%0,-1\n\ 399 addic %0,%0,-1\n\
400 stdcx. %0,0,%1\n\ 400 stdcx. %0,0,%1\n\
401 bne- 1b" 401 bne- 1b"
402 ISYNC_ON_SMP 402 PPC_ACQUIRE_BARRIER
403 : "=&r" (t) 403 : "=&r" (t)
404 : "r" (&v->counter) 404 : "r" (&v->counter)
405 : "cc", "xer", "memory"); 405 : "cc", "xer", "memory");
@@ -419,13 +419,13 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
419 long t; 419 long t;
420 420
421 __asm__ __volatile__( 421 __asm__ __volatile__(
422 LWSYNC_ON_SMP 422 PPC_RELEASE_BARRIER
423"1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\ 423"1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
424 addic. %0,%0,-1\n\ 424 addic. %0,%0,-1\n\
425 blt- 2f\n\ 425 blt- 2f\n\
426 stdcx. %0,0,%1\n\ 426 stdcx. %0,0,%1\n\
427 bne- 1b" 427 bne- 1b"
428 ISYNC_ON_SMP 428 PPC_ACQUIRE_BARRIER
429 "\n\ 429 "\n\
4302:" : "=&r" (t) 4302:" : "=&r" (t)
431 : "r" (&v->counter) 431 : "r" (&v->counter)
@@ -451,14 +451,14 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
451 long t; 451 long t;
452 452
453 __asm__ __volatile__ ( 453 __asm__ __volatile__ (
454 LWSYNC_ON_SMP 454 PPC_RELEASE_BARRIER
455"1: ldarx %0,0,%1 # atomic_add_unless\n\ 455"1: ldarx %0,0,%1 # atomic_add_unless\n\
456 cmpd 0,%0,%3 \n\ 456 cmpd 0,%0,%3 \n\
457 beq- 2f \n\ 457 beq- 2f \n\
458 add %0,%2,%0 \n" 458 add %0,%2,%0 \n"
459" stdcx. %0,0,%1 \n\ 459" stdcx. %0,0,%1 \n\
460 bne- 1b \n" 460 bne- 1b \n"
461 ISYNC_ON_SMP 461 PPC_ACQUIRE_BARRIER
462" subf %0,%2,%0 \n\ 462" subf %0,%2,%0 \n\
4632:" 4632:"
464 : "=&r" (t) 464 : "=&r" (t)
diff --git a/arch/powerpc/include/asm/bitops.h b/arch/powerpc/include/asm/bitops.h
index 3c7c37bd92e3..30964ae2d096 100644
--- a/arch/powerpc/include/asm/bitops.h
+++ b/arch/powerpc/include/asm/bitops.h
@@ -78,7 +78,7 @@ static __inline__ void fn(unsigned long mask, \
78 78
79DEFINE_BITOP(set_bits, or, "", "") 79DEFINE_BITOP(set_bits, or, "", "")
80DEFINE_BITOP(clear_bits, andc, "", "") 80DEFINE_BITOP(clear_bits, andc, "", "")
81DEFINE_BITOP(clear_bits_unlock, andc, LWSYNC_ON_SMP, "") 81DEFINE_BITOP(clear_bits_unlock, andc, PPC_RELEASE_BARRIER, "")
82DEFINE_BITOP(change_bits, xor, "", "") 82DEFINE_BITOP(change_bits, xor, "", "")
83 83
84static __inline__ void set_bit(int nr, volatile unsigned long *addr) 84static __inline__ void set_bit(int nr, volatile unsigned long *addr)
@@ -124,10 +124,14 @@ static __inline__ unsigned long fn( \
124 return (old & mask); \ 124 return (old & mask); \
125} 125}
126 126
127DEFINE_TESTOP(test_and_set_bits, or, LWSYNC_ON_SMP, ISYNC_ON_SMP, 0) 127DEFINE_TESTOP(test_and_set_bits, or, PPC_RELEASE_BARRIER,
128DEFINE_TESTOP(test_and_set_bits_lock, or, "", ISYNC_ON_SMP, 1) 128 PPC_ACQUIRE_BARRIER, 0)
129DEFINE_TESTOP(test_and_clear_bits, andc, LWSYNC_ON_SMP, ISYNC_ON_SMP, 0) 129DEFINE_TESTOP(test_and_set_bits_lock, or, "",
130DEFINE_TESTOP(test_and_change_bits, xor, LWSYNC_ON_SMP, ISYNC_ON_SMP, 0) 130 PPC_ACQUIRE_BARRIER, 1)
131DEFINE_TESTOP(test_and_clear_bits, andc, PPC_RELEASE_BARRIER,
132 PPC_ACQUIRE_BARRIER, 0)
133DEFINE_TESTOP(test_and_change_bits, xor, PPC_RELEASE_BARRIER,
134 PPC_ACQUIRE_BARRIER, 0)
131 135
132static __inline__ int test_and_set_bit(unsigned long nr, 136static __inline__ int test_and_set_bit(unsigned long nr,
133 volatile unsigned long *addr) 137 volatile unsigned long *addr)
@@ -158,7 +162,7 @@ static __inline__ int test_and_change_bit(unsigned long nr,
158 162
159static __inline__ void __clear_bit_unlock(int nr, volatile unsigned long *addr) 163static __inline__ void __clear_bit_unlock(int nr, volatile unsigned long *addr)
160{ 164{
161 __asm__ __volatile__(LWSYNC_ON_SMP "" ::: "memory"); 165 __asm__ __volatile__(PPC_RELEASE_BARRIER "" ::: "memory");
162 __clear_bit(nr, addr); 166 __clear_bit(nr, addr);
163} 167}
164 168
diff --git a/arch/powerpc/include/asm/futex.h b/arch/powerpc/include/asm/futex.h
index 9696cc36d2dc..7c589ef81fb0 100644
--- a/arch/powerpc/include/asm/futex.h
+++ b/arch/powerpc/include/asm/futex.h
@@ -11,7 +11,7 @@
11 11
12#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \ 12#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
13 __asm__ __volatile ( \ 13 __asm__ __volatile ( \
14 LWSYNC_ON_SMP \ 14 PPC_RELEASE_BARRIER \
15"1: lwarx %0,0,%2\n" \ 15"1: lwarx %0,0,%2\n" \
16 insn \ 16 insn \
17 PPC405_ERR77(0, %2) \ 17 PPC405_ERR77(0, %2) \
@@ -90,14 +90,14 @@ futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
90 return -EFAULT; 90 return -EFAULT;
91 91
92 __asm__ __volatile__ ( 92 __asm__ __volatile__ (
93 LWSYNC_ON_SMP 93 PPC_RELEASE_BARRIER
94"1: lwarx %0,0,%2 # futex_atomic_cmpxchg_inatomic\n\ 94"1: lwarx %0,0,%2 # futex_atomic_cmpxchg_inatomic\n\
95 cmpw 0,%0,%3\n\ 95 cmpw 0,%0,%3\n\
96 bne- 3f\n" 96 bne- 3f\n"
97 PPC405_ERR77(0,%2) 97 PPC405_ERR77(0,%2)
98"2: stwcx. %4,0,%2\n\ 98"2: stwcx. %4,0,%2\n\
99 bne- 1b\n" 99 bne- 1b\n"
100 ISYNC_ON_SMP 100 PPC_ACQUIRE_BARRIER
101"3: .section .fixup,\"ax\"\n\ 101"3: .section .fixup,\"ax\"\n\
1024: li %0,%5\n\ 1024: li %0,%5\n\
103 b 3b\n\ 103 b 3b\n\
diff --git a/arch/powerpc/include/asm/mutex.h b/arch/powerpc/include/asm/mutex.h
index dabc01c727b8..5399f7e18102 100644
--- a/arch/powerpc/include/asm/mutex.h
+++ b/arch/powerpc/include/asm/mutex.h
@@ -15,7 +15,7 @@ static inline int __mutex_cmpxchg_lock(atomic_t *v, int old, int new)
15 PPC405_ERR77(0,%1) 15 PPC405_ERR77(0,%1)
16" stwcx. %3,0,%1\n\ 16" stwcx. %3,0,%1\n\
17 bne- 1b" 17 bne- 1b"
18 ISYNC_ON_SMP 18 PPC_ACQUIRE_BARRIER
19 "\n\ 19 "\n\
202:" 202:"
21 : "=&r" (t) 21 : "=&r" (t)
@@ -35,7 +35,7 @@ static inline int __mutex_dec_return_lock(atomic_t *v)
35 PPC405_ERR77(0,%1) 35 PPC405_ERR77(0,%1)
36" stwcx. %0,0,%1\n\ 36" stwcx. %0,0,%1\n\
37 bne- 1b" 37 bne- 1b"
38 ISYNC_ON_SMP 38 PPC_ACQUIRE_BARRIER
39 : "=&r" (t) 39 : "=&r" (t)
40 : "r" (&v->counter) 40 : "r" (&v->counter)
41 : "cc", "memory"); 41 : "cc", "memory");
@@ -48,7 +48,7 @@ static inline int __mutex_inc_return_unlock(atomic_t *v)
48 int t; 48 int t;
49 49
50 __asm__ __volatile__( 50 __asm__ __volatile__(
51 LWSYNC_ON_SMP 51 PPC_RELEASE_BARRIER
52"1: lwarx %0,0,%1 # mutex unlock\n\ 52"1: lwarx %0,0,%1 # mutex unlock\n\
53 addic %0,%0,1\n" 53 addic %0,%0,1\n"
54 PPC405_ERR77(0,%1) 54 PPC405_ERR77(0,%1)
diff --git a/arch/powerpc/include/asm/spinlock.h b/arch/powerpc/include/asm/spinlock.h
index 1c35b59f6f30..f9611bd69ed2 100644
--- a/arch/powerpc/include/asm/spinlock.h
+++ b/arch/powerpc/include/asm/spinlock.h
@@ -65,9 +65,10 @@ static inline unsigned long __arch_spin_trylock(arch_spinlock_t *lock)
65 cmpwi 0,%0,0\n\ 65 cmpwi 0,%0,0\n\
66 bne- 2f\n\ 66 bne- 2f\n\
67 stwcx. %1,0,%2\n\ 67 stwcx. %1,0,%2\n\
68 bne- 1b\n\ 68 bne- 1b\n"
69 isync\n\ 69 PPC_ACQUIRE_BARRIER
702:" : "=&r" (tmp) 70"2:"
71 : "=&r" (tmp)
71 : "r" (token), "r" (&lock->slock) 72 : "r" (token), "r" (&lock->slock)
72 : "cr0", "memory"); 73 : "cr0", "memory");
73 74
@@ -145,7 +146,7 @@ static inline void arch_spin_unlock(arch_spinlock_t *lock)
145{ 146{
146 SYNC_IO; 147 SYNC_IO;
147 __asm__ __volatile__("# arch_spin_unlock\n\t" 148 __asm__ __volatile__("# arch_spin_unlock\n\t"
148 LWSYNC_ON_SMP: : :"memory"); 149 PPC_RELEASE_BARRIER: : :"memory");
149 lock->slock = 0; 150 lock->slock = 0;
150} 151}
151 152
@@ -193,9 +194,9 @@ static inline long __arch_read_trylock(arch_rwlock_t *rw)
193 ble- 2f\n" 194 ble- 2f\n"
194 PPC405_ERR77(0,%1) 195 PPC405_ERR77(0,%1)
195" stwcx. %0,0,%1\n\ 196" stwcx. %0,0,%1\n\
196 bne- 1b\n\ 197 bne- 1b\n"
197 isync\n\ 198 PPC_ACQUIRE_BARRIER
1982:" : "=&r" (tmp) 199"2:" : "=&r" (tmp)
199 : "r" (&rw->lock) 200 : "r" (&rw->lock)
200 : "cr0", "xer", "memory"); 201 : "cr0", "xer", "memory");
201 202
@@ -217,9 +218,9 @@ static inline long __arch_write_trylock(arch_rwlock_t *rw)
217 bne- 2f\n" 218 bne- 2f\n"
218 PPC405_ERR77(0,%1) 219 PPC405_ERR77(0,%1)
219" stwcx. %1,0,%2\n\ 220" stwcx. %1,0,%2\n\
220 bne- 1b\n\ 221 bne- 1b\n"
221 isync\n\ 222 PPC_ACQUIRE_BARRIER
2222:" : "=&r" (tmp) 223"2:" : "=&r" (tmp)
223 : "r" (token), "r" (&rw->lock) 224 : "r" (token), "r" (&rw->lock)
224 : "cr0", "memory"); 225 : "cr0", "memory");
225 226
@@ -270,7 +271,7 @@ static inline void arch_read_unlock(arch_rwlock_t *rw)
270 271
271 __asm__ __volatile__( 272 __asm__ __volatile__(
272 "# read_unlock\n\t" 273 "# read_unlock\n\t"
273 LWSYNC_ON_SMP 274 PPC_RELEASE_BARRIER
274"1: lwarx %0,0,%1\n\ 275"1: lwarx %0,0,%1\n\
275 addic %0,%0,-1\n" 276 addic %0,%0,-1\n"
276 PPC405_ERR77(0,%1) 277 PPC405_ERR77(0,%1)
@@ -284,7 +285,7 @@ static inline void arch_read_unlock(arch_rwlock_t *rw)
284static inline void arch_write_unlock(arch_rwlock_t *rw) 285static inline void arch_write_unlock(arch_rwlock_t *rw)
285{ 286{
286 __asm__ __volatile__("# write_unlock\n\t" 287 __asm__ __volatile__("# write_unlock\n\t"
287 LWSYNC_ON_SMP: : :"memory"); 288 PPC_RELEASE_BARRIER: : :"memory");
288 rw->lock = 0; 289 rw->lock = 0;
289} 290}
290 291
diff --git a/arch/powerpc/include/asm/synch.h b/arch/powerpc/include/asm/synch.h
index 28f6ddbff4cf..5db1f0d5ea82 100644
--- a/arch/powerpc/include/asm/synch.h
+++ b/arch/powerpc/include/asm/synch.h
@@ -37,11 +37,11 @@ static inline void isync(void)
37#endif 37#endif
38 38
39#ifdef CONFIG_SMP 39#ifdef CONFIG_SMP
40#define ISYNC_ON_SMP "\n\tisync\n" 40#define PPC_ACQUIRE_BARRIER "\n\tisync\n"
41#define LWSYNC_ON_SMP stringify_in_c(LWSYNC) "\n" 41#define PPC_RELEASE_BARRIER stringify_in_c(LWSYNC) "\n"
42#else 42#else
43#define ISYNC_ON_SMP 43#define PPC_ACQUIRE_BARRIER
44#define LWSYNC_ON_SMP 44#define PPC_RELEASE_BARRIER
45#endif 45#endif
46 46
47#endif /* __KERNEL__ */ 47#endif /* __KERNEL__ */
diff --git a/arch/powerpc/include/asm/system.h b/arch/powerpc/include/asm/system.h
index bb8e006a47c6..aa59f5b794e8 100644
--- a/arch/powerpc/include/asm/system.h
+++ b/arch/powerpc/include/asm/system.h
@@ -232,12 +232,12 @@ __xchg_u32(volatile void *p, unsigned long val)
232 unsigned long prev; 232 unsigned long prev;
233 233
234 __asm__ __volatile__( 234 __asm__ __volatile__(
235 LWSYNC_ON_SMP 235 PPC_RELEASE_BARRIER
236"1: lwarx %0,0,%2 \n" 236"1: lwarx %0,0,%2 \n"
237 PPC405_ERR77(0,%2) 237 PPC405_ERR77(0,%2)
238" stwcx. %3,0,%2 \n\ 238" stwcx. %3,0,%2 \n\
239 bne- 1b" 239 bne- 1b"
240 ISYNC_ON_SMP 240 PPC_ACQUIRE_BARRIER
241 : "=&r" (prev), "+m" (*(volatile unsigned int *)p) 241 : "=&r" (prev), "+m" (*(volatile unsigned int *)p)
242 : "r" (p), "r" (val) 242 : "r" (p), "r" (val)
243 : "cc", "memory"); 243 : "cc", "memory");
@@ -275,12 +275,12 @@ __xchg_u64(volatile void *p, unsigned long val)
275 unsigned long prev; 275 unsigned long prev;
276 276
277 __asm__ __volatile__( 277 __asm__ __volatile__(
278 LWSYNC_ON_SMP 278 PPC_RELEASE_BARRIER
279"1: ldarx %0,0,%2 \n" 279"1: ldarx %0,0,%2 \n"
280 PPC405_ERR77(0,%2) 280 PPC405_ERR77(0,%2)
281" stdcx. %3,0,%2 \n\ 281" stdcx. %3,0,%2 \n\
282 bne- 1b" 282 bne- 1b"
283 ISYNC_ON_SMP 283 PPC_ACQUIRE_BARRIER
284 : "=&r" (prev), "+m" (*(volatile unsigned long *)p) 284 : "=&r" (prev), "+m" (*(volatile unsigned long *)p)
285 : "r" (p), "r" (val) 285 : "r" (p), "r" (val)
286 : "cc", "memory"); 286 : "cc", "memory");
@@ -366,14 +366,14 @@ __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new)
366 unsigned int prev; 366 unsigned int prev;
367 367
368 __asm__ __volatile__ ( 368 __asm__ __volatile__ (
369 LWSYNC_ON_SMP 369 PPC_RELEASE_BARRIER
370"1: lwarx %0,0,%2 # __cmpxchg_u32\n\ 370"1: lwarx %0,0,%2 # __cmpxchg_u32\n\
371 cmpw 0,%0,%3\n\ 371 cmpw 0,%0,%3\n\
372 bne- 2f\n" 372 bne- 2f\n"
373 PPC405_ERR77(0,%2) 373 PPC405_ERR77(0,%2)
374" stwcx. %4,0,%2\n\ 374" stwcx. %4,0,%2\n\
375 bne- 1b" 375 bne- 1b"
376 ISYNC_ON_SMP 376 PPC_ACQUIRE_BARRIER
377 "\n\ 377 "\n\
3782:" 3782:"
379 : "=&r" (prev), "+m" (*p) 379 : "=&r" (prev), "+m" (*p)
@@ -412,13 +412,13 @@ __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new)
412 unsigned long prev; 412 unsigned long prev;
413 413
414 __asm__ __volatile__ ( 414 __asm__ __volatile__ (
415 LWSYNC_ON_SMP 415 PPC_RELEASE_BARRIER
416"1: ldarx %0,0,%2 # __cmpxchg_u64\n\ 416"1: ldarx %0,0,%2 # __cmpxchg_u64\n\
417 cmpd 0,%0,%3\n\ 417 cmpd 0,%0,%3\n\
418 bne- 2f\n\ 418 bne- 2f\n\
419 stdcx. %4,0,%2\n\ 419 stdcx. %4,0,%2\n\
420 bne- 1b" 420 bne- 1b"
421 ISYNC_ON_SMP 421 PPC_ACQUIRE_BARRIER
422 "\n\ 422 "\n\
4232:" 4232:"
424 : "=&r" (prev), "+m" (*p) 424 : "=&r" (prev), "+m" (*p)