summaryrefslogtreecommitdiffstats
path: root/arch/h8300/include
diff options
context:
space:
mode:
authorPeter Zijlstra <peterz@infradead.org>2016-04-17 19:16:08 -0400
committerIngo Molnar <mingo@kernel.org>2016-06-16 04:48:24 -0400
commit0c074cbc33091dd69fe70ec27474d228c3184860 (patch)
tree24f529cc54a3133c84aa96d3d325cb983c89cdf4 /arch/h8300/include
parentd9c730281617e55ca470e66f8e9d7d3f5f420fec (diff)
locking/atomic, arch/h8300: Implement atomic_fetch_{add,sub,and,or,xor}()
Implement FETCH-OP atomic primitives, these are very similar to the existing OP-RETURN primitives we already have, except they return the value of the atomic variable _before_ modification. This is especially useful for irreversible operations -- such as bitops (because it becomes impossible to reconstruct the state prior to modification). Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Cc: Andrew Morton <akpm@linux-foundation.org> Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Paul E. McKenney <paulmck@linux.vnet.ibm.com> Cc: Peter Zijlstra <peterz@infradead.org> Cc: Thomas Gleixner <tglx@linutronix.de> Cc: Yoshinori Sato <ysato@users.sourceforge.jp> Cc: linux-arch@vger.kernel.org Cc: linux-kernel@vger.kernel.org Cc: uclinux-h8-devel@lists.sourceforge.jp Signed-off-by: Ingo Molnar <mingo@kernel.org>
Diffstat (limited to 'arch/h8300/include')
-rw-r--r--arch/h8300/include/asm/atomic.h31
1 files changed, 25 insertions, 6 deletions
diff --git a/arch/h8300/include/asm/atomic.h b/arch/h8300/include/asm/atomic.h
index 4435a445ae7e..0961b618bdde 100644
--- a/arch/h8300/include/asm/atomic.h
+++ b/arch/h8300/include/asm/atomic.h
@@ -28,6 +28,19 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \
28 return ret; \ 28 return ret; \
29} 29}
30 30
31#define ATOMIC_FETCH_OP(op, c_op) \
32static inline int atomic_fetch_##op(int i, atomic_t *v) \
33{ \
34 h8300flags flags; \
35 int ret; \
36 \
37 flags = arch_local_irq_save(); \
38 ret = v->counter; \
39 v->counter c_op i; \
40 arch_local_irq_restore(flags); \
41 return ret; \
42}
43
31#define ATOMIC_OP(op, c_op) \ 44#define ATOMIC_OP(op, c_op) \
32static inline void atomic_##op(int i, atomic_t *v) \ 45static inline void atomic_##op(int i, atomic_t *v) \
33{ \ 46{ \
@@ -41,17 +54,23 @@ static inline void atomic_##op(int i, atomic_t *v) \
41ATOMIC_OP_RETURN(add, +=) 54ATOMIC_OP_RETURN(add, +=)
42ATOMIC_OP_RETURN(sub, -=) 55ATOMIC_OP_RETURN(sub, -=)
43 56
44ATOMIC_OP(and, &=) 57#define atomic_fetch_or atomic_fetch_or
45ATOMIC_OP(or, |=)
46ATOMIC_OP(xor, ^=)
47 58
59#define ATOMIC_OPS(op, c_op) \
60 ATOMIC_OP(op, c_op) \
61 ATOMIC_FETCH_OP(op, c_op)
62
63ATOMIC_OPS(and, &=)
64ATOMIC_OPS(or, |=)
65ATOMIC_OPS(xor, ^=)
66ATOMIC_OPS(add, +=)
67ATOMIC_OPS(sub, -=)
68
69#undef ATOMIC_OPS
48#undef ATOMIC_OP_RETURN 70#undef ATOMIC_OP_RETURN
49#undef ATOMIC_OP 71#undef ATOMIC_OP
50 72
51#define atomic_add(i, v) (void)atomic_add_return(i, v)
52#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) 73#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
53
54#define atomic_sub(i, v) (void)atomic_sub_return(i, v)
55#define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0) 74#define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
56 75
57#define atomic_inc_return(v) atomic_add_return(1, v) 76#define atomic_inc_return(v) atomic_add_return(1, v)