aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sh/include/asm/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/sh/include/asm/atomic.h')
-rw-r--r--arch/sh/include/asm/atomic.h73
1 files changed, 29 insertions, 44 deletions
diff --git a/arch/sh/include/asm/atomic.h b/arch/sh/include/asm/atomic.h
index b16388d71954..275a448ae8c2 100644
--- a/arch/sh/include/asm/atomic.h
+++ b/arch/sh/include/asm/atomic.h
@@ -25,58 +25,43 @@
25#endif 25#endif
26 26
27#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) 27#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
28#define atomic_dec_return(v) atomic_sub_return(1, (v))
29#define atomic_inc_return(v) atomic_add_return(1, (v))
30#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
31#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
32#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
33#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
28 34
29#define atomic_dec_return(v) atomic_sub_return(1,(v)) 35#define atomic_inc(v) atomic_add(1, (v))
30#define atomic_inc_return(v) atomic_add_return(1,(v)) 36#define atomic_dec(v) atomic_sub(1, (v))
31 37
32/* 38#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
33 * atomic_inc_and_test - increment and test 39#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
40
41/**
42 * atomic_add_unless - add unless the number is a given value
34 * @v: pointer of type atomic_t 43 * @v: pointer of type atomic_t
44 * @a: the amount to add to v...
45 * @u: ...unless v is equal to u.
35 * 46 *
36 * Atomically increments @v by 1 47 * Atomically adds @a to @v, so long as it was not @u.
37 * and returns true if the result is zero, or false for all 48 * Returns non-zero if @v was not @u, and zero otherwise.
38 * other cases.
39 */ 49 */
40#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
41
42#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
43#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
44
45#define atomic_inc(v) atomic_add(1,(v))
46#define atomic_dec(v) atomic_sub(1,(v))
47
48#if !defined(CONFIG_GUSA_RB) && !defined(CONFIG_CPU_SH4A)
49static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
50{
51 int ret;
52 unsigned long flags;
53
54 local_irq_save(flags);
55 ret = v->counter;
56 if (likely(ret == old))
57 v->counter = new;
58 local_irq_restore(flags);
59
60 return ret;
61}
62
63static inline int atomic_add_unless(atomic_t *v, int a, int u) 50static inline int atomic_add_unless(atomic_t *v, int a, int u)
64{ 51{
65 int ret; 52 int c, old;
66 unsigned long flags; 53 c = atomic_read(v);
67 54 for (;;) {
68 local_irq_save(flags); 55 if (unlikely(c == (u)))
69 ret = v->counter; 56 break;
70 if (ret != u) 57 old = atomic_cmpxchg((v), c, c + (a));
71 v->counter += a; 58 if (likely(old == c))
72 local_irq_restore(flags); 59 break;
73 60 c = old;
74 return ret != u; 61 }
62
63 return c != (u);
75} 64}
76#endif /* !CONFIG_GUSA_RB && !CONFIG_CPU_SH4A */
77
78#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
79#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
80 65
81#define smp_mb__before_atomic_dec() smp_mb() 66#define smp_mb__before_atomic_dec() smp_mb()
82#define smp_mb__after_atomic_dec() smp_mb() 67#define smp_mb__after_atomic_dec() smp_mb()