aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--include/asm-powerpc/atomic.h20
-rw-r--r--include/asm-powerpc/bitops.h6
-rw-r--r--include/asm-powerpc/futex.h2
-rw-r--r--include/asm-powerpc/spinlock.h19
-rw-r--r--include/asm-powerpc/synch.h23
-rw-r--r--include/asm-powerpc/system.h8
6 files changed, 32 insertions, 46 deletions
diff --git a/include/asm-powerpc/atomic.h b/include/asm-powerpc/atomic.h
index 248f9aec959c..9ce51ba54c13 100644
--- a/include/asm-powerpc/atomic.h
+++ b/include/asm-powerpc/atomic.h
@@ -36,7 +36,7 @@ static __inline__ int atomic_add_return(int a, atomic_t *v)
36 int t; 36 int t;
37 37
38 __asm__ __volatile__( 38 __asm__ __volatile__(
39 EIEIO_ON_SMP 39 LWSYNC_ON_SMP
40"1: lwarx %0,0,%2 # atomic_add_return\n\ 40"1: lwarx %0,0,%2 # atomic_add_return\n\
41 add %0,%1,%0\n" 41 add %0,%1,%0\n"
42 PPC405_ERR77(0,%2) 42 PPC405_ERR77(0,%2)
@@ -72,7 +72,7 @@ static __inline__ int atomic_sub_return(int a, atomic_t *v)
72 int t; 72 int t;
73 73
74 __asm__ __volatile__( 74 __asm__ __volatile__(
75 EIEIO_ON_SMP 75 LWSYNC_ON_SMP
76"1: lwarx %0,0,%2 # atomic_sub_return\n\ 76"1: lwarx %0,0,%2 # atomic_sub_return\n\
77 subf %0,%1,%0\n" 77 subf %0,%1,%0\n"
78 PPC405_ERR77(0,%2) 78 PPC405_ERR77(0,%2)
@@ -106,7 +106,7 @@ static __inline__ int atomic_inc_return(atomic_t *v)
106 int t; 106 int t;
107 107
108 __asm__ __volatile__( 108 __asm__ __volatile__(
109 EIEIO_ON_SMP 109 LWSYNC_ON_SMP
110"1: lwarx %0,0,%1 # atomic_inc_return\n\ 110"1: lwarx %0,0,%1 # atomic_inc_return\n\
111 addic %0,%0,1\n" 111 addic %0,%0,1\n"
112 PPC405_ERR77(0,%1) 112 PPC405_ERR77(0,%1)
@@ -150,7 +150,7 @@ static __inline__ int atomic_dec_return(atomic_t *v)
150 int t; 150 int t;
151 151
152 __asm__ __volatile__( 152 __asm__ __volatile__(
153 EIEIO_ON_SMP 153 LWSYNC_ON_SMP
154"1: lwarx %0,0,%1 # atomic_dec_return\n\ 154"1: lwarx %0,0,%1 # atomic_dec_return\n\
155 addic %0,%0,-1\n" 155 addic %0,%0,-1\n"
156 PPC405_ERR77(0,%1) 156 PPC405_ERR77(0,%1)
@@ -204,7 +204,7 @@ static __inline__ int atomic_dec_if_positive(atomic_t *v)
204 int t; 204 int t;
205 205
206 __asm__ __volatile__( 206 __asm__ __volatile__(
207 EIEIO_ON_SMP 207 LWSYNC_ON_SMP
208"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\ 208"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
209 addic. %0,%0,-1\n\ 209 addic. %0,%0,-1\n\
210 blt- 2f\n" 210 blt- 2f\n"
@@ -253,7 +253,7 @@ static __inline__ long atomic64_add_return(long a, atomic64_t *v)
253 long t; 253 long t;
254 254
255 __asm__ __volatile__( 255 __asm__ __volatile__(
256 EIEIO_ON_SMP 256 LWSYNC_ON_SMP
257"1: ldarx %0,0,%2 # atomic64_add_return\n\ 257"1: ldarx %0,0,%2 # atomic64_add_return\n\
258 add %0,%1,%0\n\ 258 add %0,%1,%0\n\
259 stdcx. %0,0,%2 \n\ 259 stdcx. %0,0,%2 \n\
@@ -287,7 +287,7 @@ static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
287 long t; 287 long t;
288 288
289 __asm__ __volatile__( 289 __asm__ __volatile__(
290 EIEIO_ON_SMP 290 LWSYNC_ON_SMP
291"1: ldarx %0,0,%2 # atomic64_sub_return\n\ 291"1: ldarx %0,0,%2 # atomic64_sub_return\n\
292 subf %0,%1,%0\n\ 292 subf %0,%1,%0\n\
293 stdcx. %0,0,%2 \n\ 293 stdcx. %0,0,%2 \n\
@@ -319,7 +319,7 @@ static __inline__ long atomic64_inc_return(atomic64_t *v)
319 long t; 319 long t;
320 320
321 __asm__ __volatile__( 321 __asm__ __volatile__(
322 EIEIO_ON_SMP 322 LWSYNC_ON_SMP
323"1: ldarx %0,0,%1 # atomic64_inc_return\n\ 323"1: ldarx %0,0,%1 # atomic64_inc_return\n\
324 addic %0,%0,1\n\ 324 addic %0,%0,1\n\
325 stdcx. %0,0,%1 \n\ 325 stdcx. %0,0,%1 \n\
@@ -361,7 +361,7 @@ static __inline__ long atomic64_dec_return(atomic64_t *v)
361 long t; 361 long t;
362 362
363 __asm__ __volatile__( 363 __asm__ __volatile__(
364 EIEIO_ON_SMP 364 LWSYNC_ON_SMP
365"1: ldarx %0,0,%1 # atomic64_dec_return\n\ 365"1: ldarx %0,0,%1 # atomic64_dec_return\n\
366 addic %0,%0,-1\n\ 366 addic %0,%0,-1\n\
367 stdcx. %0,0,%1\n\ 367 stdcx. %0,0,%1\n\
@@ -386,7 +386,7 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
386 long t; 386 long t;
387 387
388 __asm__ __volatile__( 388 __asm__ __volatile__(
389 EIEIO_ON_SMP 389 LWSYNC_ON_SMP
390"1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\ 390"1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
391 addic. %0,%0,-1\n\ 391 addic. %0,%0,-1\n\
392 blt- 2f\n\ 392 blt- 2f\n\
diff --git a/include/asm-powerpc/bitops.h b/include/asm-powerpc/bitops.h
index 1996eaa8aeae..bf6941a810b8 100644
--- a/include/asm-powerpc/bitops.h
+++ b/include/asm-powerpc/bitops.h
@@ -112,7 +112,7 @@ static __inline__ int test_and_set_bit(unsigned long nr,
112 unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr); 112 unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
113 113
114 __asm__ __volatile__( 114 __asm__ __volatile__(
115 EIEIO_ON_SMP 115 LWSYNC_ON_SMP
116"1:" PPC_LLARX "%0,0,%3 # test_and_set_bit\n" 116"1:" PPC_LLARX "%0,0,%3 # test_and_set_bit\n"
117 "or %1,%0,%2 \n" 117 "or %1,%0,%2 \n"
118 PPC405_ERR77(0,%3) 118 PPC405_ERR77(0,%3)
@@ -134,7 +134,7 @@ static __inline__ int test_and_clear_bit(unsigned long nr,
134 unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr); 134 unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
135 135
136 __asm__ __volatile__( 136 __asm__ __volatile__(
137 EIEIO_ON_SMP 137 LWSYNC_ON_SMP
138"1:" PPC_LLARX "%0,0,%3 # test_and_clear_bit\n" 138"1:" PPC_LLARX "%0,0,%3 # test_and_clear_bit\n"
139 "andc %1,%0,%2 \n" 139 "andc %1,%0,%2 \n"
140 PPC405_ERR77(0,%3) 140 PPC405_ERR77(0,%3)
@@ -156,7 +156,7 @@ static __inline__ int test_and_change_bit(unsigned long nr,
156 unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr); 156 unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
157 157
158 __asm__ __volatile__( 158 __asm__ __volatile__(
159 EIEIO_ON_SMP 159 LWSYNC_ON_SMP
160"1:" PPC_LLARX "%0,0,%3 # test_and_change_bit\n" 160"1:" PPC_LLARX "%0,0,%3 # test_and_change_bit\n"
161 "xor %1,%0,%2 \n" 161 "xor %1,%0,%2 \n"
162 PPC405_ERR77(0,%3) 162 PPC405_ERR77(0,%3)
diff --git a/include/asm-powerpc/futex.h b/include/asm-powerpc/futex.h
index f0319d50b129..39e85f320a76 100644
--- a/include/asm-powerpc/futex.h
+++ b/include/asm-powerpc/futex.h
@@ -11,7 +11,7 @@
11 11
12#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \ 12#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
13 __asm__ __volatile ( \ 13 __asm__ __volatile ( \
14 SYNC_ON_SMP \ 14 LWSYNC_ON_SMP \
15"1: lwarx %0,0,%2\n" \ 15"1: lwarx %0,0,%2\n" \
16 insn \ 16 insn \
17 PPC405_ERR77(0, %2) \ 17 PPC405_ERR77(0, %2) \
diff --git a/include/asm-powerpc/spinlock.h b/include/asm-powerpc/spinlock.h
index 26b8744ed529..895cb6d3a42a 100644
--- a/include/asm-powerpc/spinlock.h
+++ b/include/asm-powerpc/spinlock.h
@@ -46,7 +46,7 @@ static __inline__ unsigned long __spin_trylock(raw_spinlock_t *lock)
46 46
47 token = LOCK_TOKEN; 47 token = LOCK_TOKEN;
48 __asm__ __volatile__( 48 __asm__ __volatile__(
49"1: lwarx %0,0,%2 # __spin_trylock\n\ 49"1: lwarx %0,0,%2\n\
50 cmpwi 0,%0,0\n\ 50 cmpwi 0,%0,0\n\
51 bne- 2f\n\ 51 bne- 2f\n\
52 stwcx. %1,0,%2\n\ 52 stwcx. %1,0,%2\n\
@@ -124,8 +124,8 @@ static void __inline__ __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long
124 124
125static __inline__ void __raw_spin_unlock(raw_spinlock_t *lock) 125static __inline__ void __raw_spin_unlock(raw_spinlock_t *lock)
126{ 126{
127 __asm__ __volatile__(SYNC_ON_SMP" # __raw_spin_unlock" 127 __asm__ __volatile__("# __raw_spin_unlock\n\t"
128 : : :"memory"); 128 LWSYNC_ON_SMP: : :"memory");
129 lock->slock = 0; 129 lock->slock = 0;
130} 130}
131 131
@@ -167,7 +167,7 @@ static long __inline__ __read_trylock(raw_rwlock_t *rw)
167 long tmp; 167 long tmp;
168 168
169 __asm__ __volatile__( 169 __asm__ __volatile__(
170"1: lwarx %0,0,%1 # read_trylock\n" 170"1: lwarx %0,0,%1\n"
171 __DO_SIGN_EXTEND 171 __DO_SIGN_EXTEND
172" addic. %0,%0,1\n\ 172" addic. %0,%0,1\n\
173 ble- 2f\n" 173 ble- 2f\n"
@@ -192,7 +192,7 @@ static __inline__ long __write_trylock(raw_rwlock_t *rw)
192 192
193 token = WRLOCK_TOKEN; 193 token = WRLOCK_TOKEN;
194 __asm__ __volatile__( 194 __asm__ __volatile__(
195"1: lwarx %0,0,%2 # write_trylock\n\ 195"1: lwarx %0,0,%2\n\
196 cmpwi 0,%0,0\n\ 196 cmpwi 0,%0,0\n\
197 bne- 2f\n" 197 bne- 2f\n"
198 PPC405_ERR77(0,%1) 198 PPC405_ERR77(0,%1)
@@ -249,8 +249,9 @@ static void __inline__ __raw_read_unlock(raw_rwlock_t *rw)
249 long tmp; 249 long tmp;
250 250
251 __asm__ __volatile__( 251 __asm__ __volatile__(
252 "eieio # read_unlock\n\ 252 "# read_unlock\n\t"
2531: lwarx %0,0,%1\n\ 253 LWSYNC_ON_SMP
254"1: lwarx %0,0,%1\n\
254 addic %0,%0,-1\n" 255 addic %0,%0,-1\n"
255 PPC405_ERR77(0,%1) 256 PPC405_ERR77(0,%1)
256" stwcx. %0,0,%1\n\ 257" stwcx. %0,0,%1\n\
@@ -262,8 +263,8 @@ static void __inline__ __raw_read_unlock(raw_rwlock_t *rw)
262 263
263static __inline__ void __raw_write_unlock(raw_rwlock_t *rw) 264static __inline__ void __raw_write_unlock(raw_rwlock_t *rw)
264{ 265{
265 __asm__ __volatile__(SYNC_ON_SMP" # write_unlock" 266 __asm__ __volatile__("# write_unlock\n\t"
266 : : :"memory"); 267 LWSYNC_ON_SMP: : :"memory");
267 rw->lock = 0; 268 rw->lock = 0;
268} 269}
269 270
diff --git a/include/asm-powerpc/synch.h b/include/asm-powerpc/synch.h
index 794870ab8fd3..c90d9d9aae72 100644
--- a/include/asm-powerpc/synch.h
+++ b/include/asm-powerpc/synch.h
@@ -2,6 +2,8 @@
2#define _ASM_POWERPC_SYNCH_H 2#define _ASM_POWERPC_SYNCH_H
3#ifdef __KERNEL__ 3#ifdef __KERNEL__
4 4
5#include <linux/stringify.h>
6
5#ifdef __powerpc64__ 7#ifdef __powerpc64__
6#define __SUBARCH_HAS_LWSYNC 8#define __SUBARCH_HAS_LWSYNC
7#endif 9#endif
@@ -12,20 +14,12 @@
12# define LWSYNC sync 14# define LWSYNC sync
13#endif 15#endif
14 16
15
16/*
17 * Arguably the bitops and *xchg operations don't imply any memory barrier
18 * or SMP ordering, but in fact a lot of drivers expect them to imply
19 * both, since they do on x86 cpus.
20 */
21#ifdef CONFIG_SMP 17#ifdef CONFIG_SMP
22#define EIEIO_ON_SMP "eieio\n"
23#define ISYNC_ON_SMP "\n\tisync" 18#define ISYNC_ON_SMP "\n\tisync"
24#define SYNC_ON_SMP __stringify(LWSYNC) "\n" 19#define LWSYNC_ON_SMP __stringify(LWSYNC) "\n"
25#else 20#else
26#define EIEIO_ON_SMP
27#define ISYNC_ON_SMP 21#define ISYNC_ON_SMP
28#define SYNC_ON_SMP 22#define LWSYNC_ON_SMP
29#endif 23#endif
30 24
31static inline void eieio(void) 25static inline void eieio(void)
@@ -38,14 +32,5 @@ static inline void isync(void)
38 __asm__ __volatile__ ("isync" : : : "memory"); 32 __asm__ __volatile__ ("isync" : : : "memory");
39} 33}
40 34
41#ifdef CONFIG_SMP
42#define eieio_on_smp() eieio()
43#define isync_on_smp() isync()
44#else
45#define eieio_on_smp() __asm__ __volatile__("": : :"memory")
46#define isync_on_smp() __asm__ __volatile__("": : :"memory")
47#endif
48
49#endif /* __KERNEL__ */ 35#endif /* __KERNEL__ */
50#endif /* _ASM_POWERPC_SYNCH_H */ 36#endif /* _ASM_POWERPC_SYNCH_H */
51
diff --git a/include/asm-powerpc/system.h b/include/asm-powerpc/system.h
index 9b822afa7d0e..d9bf53653b10 100644
--- a/include/asm-powerpc/system.h
+++ b/include/asm-powerpc/system.h
@@ -212,7 +212,7 @@ __xchg_u32(volatile void *p, unsigned long val)
212 unsigned long prev; 212 unsigned long prev;
213 213
214 __asm__ __volatile__( 214 __asm__ __volatile__(
215 EIEIO_ON_SMP 215 LWSYNC_ON_SMP
216"1: lwarx %0,0,%2 \n" 216"1: lwarx %0,0,%2 \n"
217 PPC405_ERR77(0,%2) 217 PPC405_ERR77(0,%2)
218" stwcx. %3,0,%2 \n\ 218" stwcx. %3,0,%2 \n\
@@ -232,7 +232,7 @@ __xchg_u64(volatile void *p, unsigned long val)
232 unsigned long prev; 232 unsigned long prev;
233 233
234 __asm__ __volatile__( 234 __asm__ __volatile__(
235 EIEIO_ON_SMP 235 LWSYNC_ON_SMP
236"1: ldarx %0,0,%2 \n" 236"1: ldarx %0,0,%2 \n"
237 PPC405_ERR77(0,%2) 237 PPC405_ERR77(0,%2)
238" stdcx. %3,0,%2 \n\ 238" stdcx. %3,0,%2 \n\
@@ -287,7 +287,7 @@ __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new)
287 unsigned int prev; 287 unsigned int prev;
288 288
289 __asm__ __volatile__ ( 289 __asm__ __volatile__ (
290 EIEIO_ON_SMP 290 LWSYNC_ON_SMP
291"1: lwarx %0,0,%2 # __cmpxchg_u32\n\ 291"1: lwarx %0,0,%2 # __cmpxchg_u32\n\
292 cmpw 0,%0,%3\n\ 292 cmpw 0,%0,%3\n\
293 bne- 2f\n" 293 bne- 2f\n"
@@ -311,7 +311,7 @@ __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new)
311 unsigned long prev; 311 unsigned long prev;
312 312
313 __asm__ __volatile__ ( 313 __asm__ __volatile__ (
314 EIEIO_ON_SMP 314 LWSYNC_ON_SMP
315"1: ldarx %0,0,%2 # __cmpxchg_u64\n\ 315"1: ldarx %0,0,%2 # __cmpxchg_u64\n\
316 cmpd 0,%0,%3\n\ 316 cmpd 0,%0,%3\n\
317 bne- 2f\n\ 317 bne- 2f\n\