diff options
author | Paul Mackerras <paulus@samba.org> | 2005-11-19 04:50:46 -0500 |
---|---|---|
committer | Paul Mackerras <paulus@samba.org> | 2005-11-19 04:50:46 -0500 |
commit | 0212ddd839470f7a54cccccbaecd4833b4123da2 (patch) | |
tree | 3e18fc4852768c840131155eea84e2f70ebbbb07 /include | |
parent | 21a6290220679d94912a068c75db2c5cd9c6552a (diff) |
powerpc: Merge spinlock.h
The result is mostly similar to the original ppc64 version but with
some adaptations for 32-bit compilation.
include/asm-ppc64 is now empty!
Signed-off-by: Paul Mackerras <paulus@samba.org>
Diffstat (limited to 'include')
-rw-r--r-- | include/asm-powerpc/spinlock.h (renamed from include/asm-ppc64/spinlock.h) | 72 |
1 files changed, 50 insertions, 22 deletions
diff --git a/include/asm-ppc64/spinlock.h b/include/asm-powerpc/spinlock.h index 7d84fb5e39f1..caa4b14e0e94 100644 --- a/include/asm-ppc64/spinlock.h +++ b/include/asm-powerpc/spinlock.h | |||
@@ -18,31 +18,41 @@ | |||
18 | * | 18 | * |
19 | * (the type definitions are in asm/spinlock_types.h) | 19 | * (the type definitions are in asm/spinlock_types.h) |
20 | */ | 20 | */ |
21 | #include <linux/config.h> | 21 | #ifdef CONFIG_PPC64 |
22 | #include <asm/paca.h> | 22 | #include <asm/paca.h> |
23 | #include <asm/hvcall.h> | 23 | #include <asm/hvcall.h> |
24 | #include <asm/iseries/hv_call.h> | 24 | #include <asm/iseries/hv_call.h> |
25 | #endif | ||
26 | #include <asm/asm-compat.h> | ||
27 | #include <asm/synch.h> | ||
25 | 28 | ||
26 | #define __raw_spin_is_locked(x) ((x)->slock != 0) | 29 | #define __raw_spin_is_locked(x) ((x)->slock != 0) |
27 | 30 | ||
31 | #ifdef CONFIG_PPC64 | ||
32 | /* use 0x800000yy when locked, where yy == CPU number */ | ||
33 | #define LOCK_TOKEN (*(u32 *)(&get_paca()->lock_token)) | ||
34 | #else | ||
35 | #define LOCK_TOKEN 1 | ||
36 | #endif | ||
37 | |||
28 | /* | 38 | /* |
29 | * This returns the old value in the lock, so we succeeded | 39 | * This returns the old value in the lock, so we succeeded |
30 | * in getting the lock if the return value is 0. | 40 | * in getting the lock if the return value is 0. |
31 | */ | 41 | */ |
32 | static __inline__ unsigned long __spin_trylock(raw_spinlock_t *lock) | 42 | static __inline__ unsigned long __spin_trylock(raw_spinlock_t *lock) |
33 | { | 43 | { |
34 | unsigned long tmp, tmp2; | 44 | unsigned long tmp, token; |
35 | 45 | ||
46 | token = LOCK_TOKEN; | ||
36 | __asm__ __volatile__( | 47 | __asm__ __volatile__( |
37 | " lwz %1,%3(13) # __spin_trylock\n\ | 48 | "1: lwarx %0,0,%2 # __spin_trylock\n\ |
38 | 1: lwarx %0,0,%2\n\ | ||
39 | cmpwi 0,%0,0\n\ | 49 | cmpwi 0,%0,0\n\ |
40 | bne- 2f\n\ | 50 | bne- 2f\n\ |
41 | stwcx. %1,0,%2\n\ | 51 | stwcx. %1,0,%2\n\ |
42 | bne- 1b\n\ | 52 | bne- 1b\n\ |
43 | isync\n\ | 53 | isync\n\ |
44 | 2:" : "=&r" (tmp), "=&r" (tmp2) | 54 | 2:" : "=&r" (tmp) |
45 | : "r" (&lock->slock), "i" (offsetof(struct paca_struct, lock_token)) | 55 | : "r" (token), "r" (&lock->slock) |
46 | : "cr0", "memory"); | 56 | : "cr0", "memory"); |
47 | 57 | ||
48 | return tmp; | 58 | return tmp; |
@@ -113,11 +123,17 @@ static void __inline__ __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long | |||
113 | 123 | ||
114 | static __inline__ void __raw_spin_unlock(raw_spinlock_t *lock) | 124 | static __inline__ void __raw_spin_unlock(raw_spinlock_t *lock) |
115 | { | 125 | { |
116 | __asm__ __volatile__("lwsync # __raw_spin_unlock": : :"memory"); | 126 | __asm__ __volatile__(SYNC_ON_SMP" # __raw_spin_unlock" |
127 | : : :"memory"); | ||
117 | lock->slock = 0; | 128 | lock->slock = 0; |
118 | } | 129 | } |
119 | 130 | ||
131 | #ifdef CONFIG_PPC64 | ||
120 | extern void __raw_spin_unlock_wait(raw_spinlock_t *lock); | 132 | extern void __raw_spin_unlock_wait(raw_spinlock_t *lock); |
133 | #else | ||
134 | #define __raw_spin_unlock_wait(lock) \ | ||
135 | do { while (__raw_spin_is_locked(lock)) cpu_relax(); } while (0) | ||
136 | #endif | ||
121 | 137 | ||
122 | /* | 138 | /* |
123 | * Read-write spinlocks, allowing multiple readers | 139 | * Read-write spinlocks, allowing multiple readers |
@@ -133,6 +149,14 @@ extern void __raw_spin_unlock_wait(raw_spinlock_t *lock); | |||
133 | #define __raw_read_can_lock(rw) ((rw)->lock >= 0) | 149 | #define __raw_read_can_lock(rw) ((rw)->lock >= 0) |
134 | #define __raw_write_can_lock(rw) (!(rw)->lock) | 150 | #define __raw_write_can_lock(rw) (!(rw)->lock) |
135 | 151 | ||
152 | #ifdef CONFIG_PPC64 | ||
153 | #define __DO_SIGN_EXTEND "extsw %0,%0\n" | ||
154 | #define WRLOCK_TOKEN LOCK_TOKEN /* it's negative */ | ||
155 | #else | ||
156 | #define __DO_SIGN_EXTEND | ||
157 | #define WRLOCK_TOKEN (-1) | ||
158 | #endif | ||
159 | |||
136 | /* | 160 | /* |
137 | * This returns the old value in the lock + 1, | 161 | * This returns the old value in the lock + 1, |
138 | * so we got a read lock if the return value is > 0. | 162 | * so we got a read lock if the return value is > 0. |
@@ -142,11 +166,12 @@ static long __inline__ __read_trylock(raw_rwlock_t *rw) | |||
142 | long tmp; | 166 | long tmp; |
143 | 167 | ||
144 | __asm__ __volatile__( | 168 | __asm__ __volatile__( |
145 | "1: lwarx %0,0,%1 # read_trylock\n\ | 169 | "1: lwarx %0,0,%1 # read_trylock\n" |
146 | extsw %0,%0\n\ | 170 | __DO_SIGN_EXTEND |
147 | addic. %0,%0,1\n\ | 171 | " addic. %0,%0,1\n\ |
148 | ble- 2f\n\ | 172 | ble- 2f\n" |
149 | stwcx. %0,0,%1\n\ | 173 | PPC405_ERR77(0,%1) |
174 | " stwcx. %0,0,%1\n\ | ||
150 | bne- 1b\n\ | 175 | bne- 1b\n\ |
151 | isync\n\ | 176 | isync\n\ |
152 | 2:" : "=&r" (tmp) | 177 | 2:" : "=&r" (tmp) |
@@ -162,18 +187,19 @@ static long __inline__ __read_trylock(raw_rwlock_t *rw) | |||
162 | */ | 187 | */ |
163 | static __inline__ long __write_trylock(raw_rwlock_t *rw) | 188 | static __inline__ long __write_trylock(raw_rwlock_t *rw) |
164 | { | 189 | { |
165 | long tmp, tmp2; | 190 | long tmp, token; |
166 | 191 | ||
192 | token = WRLOCK_TOKEN; | ||
167 | __asm__ __volatile__( | 193 | __asm__ __volatile__( |
168 | " lwz %1,%3(13) # write_trylock\n\ | 194 | "1: lwarx %0,0,%2 # write_trylock\n\ |
169 | 1: lwarx %0,0,%2\n\ | ||
170 | cmpwi 0,%0,0\n\ | 195 | cmpwi 0,%0,0\n\ |
171 | bne- 2f\n\ | 196 | bne- 2f\n" |
172 | stwcx. %1,0,%2\n\ | 197 | PPC405_ERR77(0,%1) |
198 | " stwcx. %1,0,%2\n\ | ||
173 | bne- 1b\n\ | 199 | bne- 1b\n\ |
174 | isync\n\ | 200 | isync\n\ |
175 | 2:" : "=&r" (tmp), "=&r" (tmp2) | 201 | 2:" : "=&r" (tmp) |
176 | : "r" (&rw->lock), "i" (offsetof(struct paca_struct, lock_token)) | 202 | : "r" (token), "r" (&rw->lock) |
177 | : "cr0", "memory"); | 203 | : "cr0", "memory"); |
178 | 204 | ||
179 | return tmp; | 205 | return tmp; |
@@ -224,8 +250,9 @@ static void __inline__ __raw_read_unlock(raw_rwlock_t *rw) | |||
224 | __asm__ __volatile__( | 250 | __asm__ __volatile__( |
225 | "eieio # read_unlock\n\ | 251 | "eieio # read_unlock\n\ |
226 | 1: lwarx %0,0,%1\n\ | 252 | 1: lwarx %0,0,%1\n\ |
227 | addic %0,%0,-1\n\ | 253 | addic %0,%0,-1\n" |
228 | stwcx. %0,0,%1\n\ | 254 | PPC405_ERR77(0,%1) |
255 | " stwcx. %0,0,%1\n\ | ||
229 | bne- 1b" | 256 | bne- 1b" |
230 | : "=&r"(tmp) | 257 | : "=&r"(tmp) |
231 | : "r"(&rw->lock) | 258 | : "r"(&rw->lock) |
@@ -234,7 +261,8 @@ static void __inline__ __raw_read_unlock(raw_rwlock_t *rw) | |||
234 | 261 | ||
235 | static __inline__ void __raw_write_unlock(raw_rwlock_t *rw) | 262 | static __inline__ void __raw_write_unlock(raw_rwlock_t *rw) |
236 | { | 263 | { |
237 | __asm__ __volatile__("lwsync # write_unlock": : :"memory"); | 264 | __asm__ __volatile__(SYNC_ON_SMP" # write_unlock" |
265 | : : :"memory"); | ||
238 | rw->lock = 0; | 266 | rw->lock = 0; |
239 | } | 267 | } |
240 | 268 | ||