diff options
Diffstat (limited to 'include/asm-powerpc/atomic.h')
-rw-r--r-- | include/asm-powerpc/atomic.h | 215 |
1 files changed, 206 insertions, 9 deletions
diff --git a/include/asm-powerpc/atomic.h b/include/asm-powerpc/atomic.h index ed4b345ed75d..ec4b14468959 100644 --- a/include/asm-powerpc/atomic.h +++ b/include/asm-powerpc/atomic.h | |||
@@ -9,21 +9,13 @@ typedef struct { volatile int counter; } atomic_t; | |||
9 | 9 | ||
10 | #ifdef __KERNEL__ | 10 | #ifdef __KERNEL__ |
11 | #include <asm/synch.h> | 11 | #include <asm/synch.h> |
12 | #include <asm/asm-compat.h> | ||
12 | 13 | ||
13 | #define ATOMIC_INIT(i) { (i) } | 14 | #define ATOMIC_INIT(i) { (i) } |
14 | 15 | ||
15 | #define atomic_read(v) ((v)->counter) | 16 | #define atomic_read(v) ((v)->counter) |
16 | #define atomic_set(v,i) (((v)->counter) = (i)) | 17 | #define atomic_set(v,i) (((v)->counter) = (i)) |
17 | 18 | ||
18 | /* Erratum #77 on the 405 means we need a sync or dcbt before every stwcx. | ||
19 | * The old ATOMIC_SYNC_FIX covered some but not all of this. | ||
20 | */ | ||
21 | #ifdef CONFIG_IBM405_ERR77 | ||
22 | #define PPC405_ERR77(ra,rb) "dcbt " #ra "," #rb ";" | ||
23 | #else | ||
24 | #define PPC405_ERR77(ra,rb) | ||
25 | #endif | ||
26 | |||
27 | static __inline__ void atomic_add(int a, atomic_t *v) | 19 | static __inline__ void atomic_add(int a, atomic_t *v) |
28 | { | 20 | { |
29 | int t; | 21 | int t; |
@@ -172,6 +164,33 @@ static __inline__ int atomic_dec_return(atomic_t *v) | |||
172 | return t; | 164 | return t; |
173 | } | 165 | } |
174 | 166 | ||
167 | #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) | ||
168 | |||
169 | /** | ||
170 | * atomic_add_unless - add unless the number is a given value | ||
171 | * @v: pointer of type atomic_t | ||
172 | * @a: the amount to add to v... | ||
173 | * @u: ...unless v is equal to u. | ||
174 | * | ||
175 | * Atomically adds @a to @v, so long as it was not @u. | ||
176 | * Returns non-zero if @v was not @u, and zero otherwise. | ||
177 | */ | ||
178 | #define atomic_add_unless(v, a, u) \ | ||
179 | ({ \ | ||
180 | int c, old; \ | ||
181 | c = atomic_read(v); \ | ||
182 | for (;;) { \ | ||
183 | if (unlikely(c == (u))) \ | ||
184 | break; \ | ||
185 | old = atomic_cmpxchg((v), c, c + (a)); \ | ||
186 | if (likely(old == c)) \ | ||
187 | break; \ | ||
188 | c = old; \ | ||
189 | } \ | ||
190 | c != (u); \ | ||
191 | }) | ||
192 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | ||
193 | |||
175 | #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0) | 194 | #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0) |
176 | #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0) | 195 | #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0) |
177 | 196 | ||
@@ -205,5 +224,183 @@ static __inline__ int atomic_dec_if_positive(atomic_t *v) | |||
205 | #define smp_mb__before_atomic_inc() smp_mb() | 224 | #define smp_mb__before_atomic_inc() smp_mb() |
206 | #define smp_mb__after_atomic_inc() smp_mb() | 225 | #define smp_mb__after_atomic_inc() smp_mb() |
207 | 226 | ||
227 | #ifdef __powerpc64__ | ||
228 | |||
229 | typedef struct { volatile long counter; } atomic64_t; | ||
230 | |||
231 | #define ATOMIC64_INIT(i) { (i) } | ||
232 | |||
233 | #define atomic64_read(v) ((v)->counter) | ||
234 | #define atomic64_set(v,i) (((v)->counter) = (i)) | ||
235 | |||
236 | static __inline__ void atomic64_add(long a, atomic64_t *v) | ||
237 | { | ||
238 | long t; | ||
239 | |||
240 | __asm__ __volatile__( | ||
241 | "1: ldarx %0,0,%3 # atomic64_add\n\ | ||
242 | add %0,%2,%0\n\ | ||
243 | stdcx. %0,0,%3 \n\ | ||
244 | bne- 1b" | ||
245 | : "=&r" (t), "=m" (v->counter) | ||
246 | : "r" (a), "r" (&v->counter), "m" (v->counter) | ||
247 | : "cc"); | ||
248 | } | ||
249 | |||
250 | static __inline__ long atomic64_add_return(long a, atomic64_t *v) | ||
251 | { | ||
252 | long t; | ||
253 | |||
254 | __asm__ __volatile__( | ||
255 | EIEIO_ON_SMP | ||
256 | "1: ldarx %0,0,%2 # atomic64_add_return\n\ | ||
257 | add %0,%1,%0\n\ | ||
258 | stdcx. %0,0,%2 \n\ | ||
259 | bne- 1b" | ||
260 | ISYNC_ON_SMP | ||
261 | : "=&r" (t) | ||
262 | : "r" (a), "r" (&v->counter) | ||
263 | : "cc", "memory"); | ||
264 | |||
265 | return t; | ||
266 | } | ||
267 | |||
268 | #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0) | ||
269 | |||
270 | static __inline__ void atomic64_sub(long a, atomic64_t *v) | ||
271 | { | ||
272 | long t; | ||
273 | |||
274 | __asm__ __volatile__( | ||
275 | "1: ldarx %0,0,%3 # atomic64_sub\n\ | ||
276 | subf %0,%2,%0\n\ | ||
277 | stdcx. %0,0,%3 \n\ | ||
278 | bne- 1b" | ||
279 | : "=&r" (t), "=m" (v->counter) | ||
280 | : "r" (a), "r" (&v->counter), "m" (v->counter) | ||
281 | : "cc"); | ||
282 | } | ||
283 | |||
284 | static __inline__ long atomic64_sub_return(long a, atomic64_t *v) | ||
285 | { | ||
286 | long t; | ||
287 | |||
288 | __asm__ __volatile__( | ||
289 | EIEIO_ON_SMP | ||
290 | "1: ldarx %0,0,%2 # atomic64_sub_return\n\ | ||
291 | subf %0,%1,%0\n\ | ||
292 | stdcx. %0,0,%2 \n\ | ||
293 | bne- 1b" | ||
294 | ISYNC_ON_SMP | ||
295 | : "=&r" (t) | ||
296 | : "r" (a), "r" (&v->counter) | ||
297 | : "cc", "memory"); | ||
298 | |||
299 | return t; | ||
300 | } | ||
301 | |||
302 | static __inline__ void atomic64_inc(atomic64_t *v) | ||
303 | { | ||
304 | long t; | ||
305 | |||
306 | __asm__ __volatile__( | ||
307 | "1: ldarx %0,0,%2 # atomic64_inc\n\ | ||
308 | addic %0,%0,1\n\ | ||
309 | stdcx. %0,0,%2 \n\ | ||
310 | bne- 1b" | ||
311 | : "=&r" (t), "=m" (v->counter) | ||
312 | : "r" (&v->counter), "m" (v->counter) | ||
313 | : "cc"); | ||
314 | } | ||
315 | |||
316 | static __inline__ long atomic64_inc_return(atomic64_t *v) | ||
317 | { | ||
318 | long t; | ||
319 | |||
320 | __asm__ __volatile__( | ||
321 | EIEIO_ON_SMP | ||
322 | "1: ldarx %0,0,%1 # atomic64_inc_return\n\ | ||
323 | addic %0,%0,1\n\ | ||
324 | stdcx. %0,0,%1 \n\ | ||
325 | bne- 1b" | ||
326 | ISYNC_ON_SMP | ||
327 | : "=&r" (t) | ||
328 | : "r" (&v->counter) | ||
329 | : "cc", "memory"); | ||
330 | |||
331 | return t; | ||
332 | } | ||
333 | |||
334 | /* | ||
335 | * atomic64_inc_and_test - increment and test | ||
336 | * @v: pointer of type atomic64_t | ||
337 | * | ||
338 | * Atomically increments @v by 1 | ||
339 | * and returns true if the result is zero, or false for all | ||
340 | * other cases. | ||
341 | */ | ||
342 | #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0) | ||
343 | |||
344 | static __inline__ void atomic64_dec(atomic64_t *v) | ||
345 | { | ||
346 | long t; | ||
347 | |||
348 | __asm__ __volatile__( | ||
349 | "1: ldarx %0,0,%2 # atomic64_dec\n\ | ||
350 | addic %0,%0,-1\n\ | ||
351 | stdcx. %0,0,%2\n\ | ||
352 | bne- 1b" | ||
353 | : "=&r" (t), "=m" (v->counter) | ||
354 | : "r" (&v->counter), "m" (v->counter) | ||
355 | : "cc"); | ||
356 | } | ||
357 | |||
358 | static __inline__ long atomic64_dec_return(atomic64_t *v) | ||
359 | { | ||
360 | long t; | ||
361 | |||
362 | __asm__ __volatile__( | ||
363 | EIEIO_ON_SMP | ||
364 | "1: ldarx %0,0,%1 # atomic64_dec_return\n\ | ||
365 | addic %0,%0,-1\n\ | ||
366 | stdcx. %0,0,%1\n\ | ||
367 | bne- 1b" | ||
368 | ISYNC_ON_SMP | ||
369 | : "=&r" (t) | ||
370 | : "r" (&v->counter) | ||
371 | : "cc", "memory"); | ||
372 | |||
373 | return t; | ||
374 | } | ||
375 | |||
376 | #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0) | ||
377 | #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0) | ||
378 | |||
379 | /* | ||
380 | * Atomically test *v and decrement if it is greater than 0. | ||
381 | * The function returns the old value of *v minus 1. | ||
382 | */ | ||
383 | static __inline__ long atomic64_dec_if_positive(atomic64_t *v) | ||
384 | { | ||
385 | long t; | ||
386 | |||
387 | __asm__ __volatile__( | ||
388 | EIEIO_ON_SMP | ||
389 | "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\ | ||
390 | addic. %0,%0,-1\n\ | ||
391 | blt- 2f\n\ | ||
392 | stdcx. %0,0,%1\n\ | ||
393 | bne- 1b" | ||
394 | ISYNC_ON_SMP | ||
395 | "\n\ | ||
396 | 2:" : "=&r" (t) | ||
397 | : "r" (&v->counter) | ||
398 | : "cc", "memory"); | ||
399 | |||
400 | return t; | ||
401 | } | ||
402 | |||
403 | #endif /* __powerpc64__ */ | ||
404 | |||
208 | #endif /* __KERNEL__ */ | 405 | #endif /* __KERNEL__ */ |
209 | #endif /* _ASM_POWERPC_ATOMIC_H_ */ | 406 | #endif /* _ASM_POWERPC_ATOMIC_H_ */ |