aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAnton Blanchard <anton@samba.org>2012-02-29 16:12:16 -0500
committerBenjamin Herrenschmidt <benh@kernel.crashing.org>2012-03-07 01:06:08 -0500
commita6cf7ed5119fb22f54584a9f867b638edd3c4384 (patch)
tree5a81f3e6cf92caa6e806e2a5b945406902e63e07
parentb1ada6010e234d6c53bfcd5a617610c9284846b8 (diff)
powerpc/atomic: Implement atomic*_inc_not_zero
Implement atomic_inc_not_zero and atomic64_inc_not_zero. At the moment we use atomic*_add_unless which requires us to put 0 and 1 constants into registers. We can also avoid a subtract by saving the original value in a second temporary. This removes 3 instructions from fget: - c0000000001b63c0: 39 00 00 00 li r8,0 - c0000000001b63c4: 39 40 00 01 li r10,1 ... - c0000000001b63e8: 7c 0a 00 50 subf r0,r10,r0 Signed-off-by: Anton Blanchard <anton@samba.org> Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
-rw-r--r--arch/powerpc/include/asm/atomic.h59
1 files changed, 58 insertions, 1 deletions
diff --git a/arch/powerpc/include/asm/atomic.h b/arch/powerpc/include/asm/atomic.h
index 02e41b53488d..14174e838ad9 100644
--- a/arch/powerpc/include/asm/atomic.h
+++ b/arch/powerpc/include/asm/atomic.h
@@ -212,6 +212,36 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
212 return t; 212 return t;
213} 213}
214 214
215/**
216 * atomic_inc_not_zero - increment unless the number is zero
217 * @v: pointer of type atomic_t
218 *
219 * Atomically increments @v by 1, so long as @v is non-zero.
220 * Returns non-zero if @v was non-zero, and zero otherwise.
221 */
222static __inline__ int atomic_inc_not_zero(atomic_t *v)
223{
224 int t1, t2;
225
226 __asm__ __volatile__ (
227 PPC_ATOMIC_ENTRY_BARRIER
228"1: lwarx %0,0,%2 # atomic_inc_not_zero\n\
229 cmpwi 0,%0,0\n\
230 beq- 2f\n\
231 addic %1,%0,1\n"
232 PPC405_ERR77(0,%2)
233" stwcx. %1,0,%2\n\
234 bne- 1b\n"
235 PPC_ATOMIC_EXIT_BARRIER
236 "\n\
2372:"
238 : "=&r" (t1), "=&r" (t2)
239 : "r" (&v->counter)
240 : "cc", "xer", "memory");
241
242 return t1;
243}
244#define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
215 245
216#define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0) 246#define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
217#define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0) 247#define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
@@ -467,7 +497,34 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
467 return t != u; 497 return t != u;
468} 498}
469 499
470#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) 500/**
501 * atomic_inc64_not_zero - increment unless the number is zero
502 * @v: pointer of type atomic64_t
503 *
504 * Atomically increments @v by 1, so long as @v is non-zero.
505 * Returns non-zero if @v was non-zero, and zero otherwise.
506 */
507static __inline__ long atomic64_inc_not_zero(atomic64_t *v)
508{
509 long t1, t2;
510
511 __asm__ __volatile__ (
512 PPC_ATOMIC_ENTRY_BARRIER
513"1: ldarx %0,0,%2 # atomic64_inc_not_zero\n\
514 cmpdi 0,%0,0\n\
515 beq- 2f\n\
516 addic %1,%0,1\n\
517 stdcx. %1,0,%2\n\
518 bne- 1b\n"
519 PPC_ATOMIC_EXIT_BARRIER
520 "\n\
5212:"
522 : "=&r" (t1), "=&r" (t2)
523 : "r" (&v->counter)
524 : "cc", "xer", "memory");
525
526 return t1;
527}
471 528
472#endif /* __powerpc64__ */ 529#endif /* __powerpc64__ */
473 530