aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-powerpc/system.h
diff options
context:
space:
mode:
authorAnton Blanchard <anton@samba.org>2006-01-12 23:37:17 -0500
committerPaul Mackerras <paulus@samba.org>2006-01-13 05:18:50 -0500
commit144b9c135b963bcb7f242c7b83bff930620d3161 (patch)
tree4b454f3e5e5921c5a528131dfa51df542259d918 /include/asm-powerpc/system.h
parent3356bb9f7ba378a6e2709f9df95f4ea52111f4df (diff)
[PATCH] powerpc: use lwsync in atomics, bitops, lock functions
eieio is only a store - store ordering. When used to order an unlock operation loads may leak out of the critical region. This is potentially buggy, one example is if a user wants to atomically read a couple of values. We can solve this with an lwsync which orders everything except store - load. I removed the (now unused) EIEIO_ON_SMP macros and the c versions isync_on_smp and eieio_on_smp now we dont use them. I also removed some old comments that were used to identify inline spinlocks in assembly, they dont make sense now our locks are out of line. Another interesting thing was that read_unlock was using an eieio even though the rest of the spinlock code had already been converted to use lwsync. Signed-off-by: Anton Blanchard <anton@samba.org> Signed-off-by: Paul Mackerras <paulus@samba.org>
Diffstat (limited to 'include/asm-powerpc/system.h')
-rw-r--r--include/asm-powerpc/system.h8
1 files changed, 4 insertions, 4 deletions
diff --git a/include/asm-powerpc/system.h b/include/asm-powerpc/system.h
index 9b822afa7d0e..d9bf53653b10 100644
--- a/include/asm-powerpc/system.h
+++ b/include/asm-powerpc/system.h
@@ -212,7 +212,7 @@ __xchg_u32(volatile void *p, unsigned long val)
212 unsigned long prev; 212 unsigned long prev;
213 213
214 __asm__ __volatile__( 214 __asm__ __volatile__(
215 EIEIO_ON_SMP 215 LWSYNC_ON_SMP
216"1: lwarx %0,0,%2 \n" 216"1: lwarx %0,0,%2 \n"
217 PPC405_ERR77(0,%2) 217 PPC405_ERR77(0,%2)
218" stwcx. %3,0,%2 \n\ 218" stwcx. %3,0,%2 \n\
@@ -232,7 +232,7 @@ __xchg_u64(volatile void *p, unsigned long val)
232 unsigned long prev; 232 unsigned long prev;
233 233
234 __asm__ __volatile__( 234 __asm__ __volatile__(
235 EIEIO_ON_SMP 235 LWSYNC_ON_SMP
236"1: ldarx %0,0,%2 \n" 236"1: ldarx %0,0,%2 \n"
237 PPC405_ERR77(0,%2) 237 PPC405_ERR77(0,%2)
238" stdcx. %3,0,%2 \n\ 238" stdcx. %3,0,%2 \n\
@@ -287,7 +287,7 @@ __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new)
287 unsigned int prev; 287 unsigned int prev;
288 288
289 __asm__ __volatile__ ( 289 __asm__ __volatile__ (
290 EIEIO_ON_SMP 290 LWSYNC_ON_SMP
291"1: lwarx %0,0,%2 # __cmpxchg_u32\n\ 291"1: lwarx %0,0,%2 # __cmpxchg_u32\n\
292 cmpw 0,%0,%3\n\ 292 cmpw 0,%0,%3\n\
293 bne- 2f\n" 293 bne- 2f\n"
@@ -311,7 +311,7 @@ __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new)
311 unsigned long prev; 311 unsigned long prev;
312 312
313 __asm__ __volatile__ ( 313 __asm__ __volatile__ (
314 EIEIO_ON_SMP 314 LWSYNC_ON_SMP
315"1: ldarx %0,0,%2 # __cmpxchg_u64\n\ 315"1: ldarx %0,0,%2 # __cmpxchg_u64\n\
316 cmpd 0,%0,%3\n\ 316 cmpd 0,%0,%3\n\
317 bne- 2f\n\ 317 bne- 2f\n\