diff options
author | Peter Zijlstra <peterz@infradead.org> | 2014-04-23 10:12:30 -0400 |
---|---|---|
committer | Ingo Molnar <mingo@kernel.org> | 2014-08-14 06:48:14 -0400 |
commit | 560cb12a4080a48b84da8b96878cafbd193c4d64 (patch) | |
tree | e0b28be89d66e8a01b164b7c6123e918cafcc79c /lib | |
parent | d4608dd5b4ec13855680b89f719d8d4b2da92411 (diff) |
locking,arch: Rewrite generic atomic support
Rewrite generic atomic support to only require cmpxchg(), generate all
other primitives from that.
Furthermore reduce the endless repetition for all these primitives to
a few CPP macros. This way we get more for less lines.
Signed-off-by: Peter Zijlstra <peterz@infradead.org>
Link: http://lkml.kernel.org/r/20140508135852.940119622@infradead.org
Cc: Arnd Bergmann <arnd@arndb.de>
Cc: David Howells <dhowells@redhat.com>
Cc: Paul E. McKenney <paulmck@linux.vnet.ibm.com>
Cc: David S. Miller <davem@davemloft.net>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: linux-arch@vger.kernel.org
Signed-off-by: Ingo Molnar <mingo@kernel.org>
Diffstat (limited to 'lib')
-rw-r--r-- | lib/atomic64.c | 83 |
1 files changed, 36 insertions, 47 deletions
diff --git a/lib/atomic64.c b/lib/atomic64.c index 08a4f068e61e..1298c05ef528 100644 --- a/lib/atomic64.c +++ b/lib/atomic64.c | |||
@@ -70,53 +70,42 @@ void atomic64_set(atomic64_t *v, long long i) | |||
70 | } | 70 | } |
71 | EXPORT_SYMBOL(atomic64_set); | 71 | EXPORT_SYMBOL(atomic64_set); |
72 | 72 | ||
73 | void atomic64_add(long long a, atomic64_t *v) | 73 | #define ATOMIC64_OP(op, c_op) \ |
74 | { | 74 | void atomic64_##op(long long a, atomic64_t *v) \ |
75 | unsigned long flags; | 75 | { \ |
76 | raw_spinlock_t *lock = lock_addr(v); | 76 | unsigned long flags; \ |
77 | 77 | raw_spinlock_t *lock = lock_addr(v); \ | |
78 | raw_spin_lock_irqsave(lock, flags); | 78 | \ |
79 | v->counter += a; | 79 | raw_spin_lock_irqsave(lock, flags); \ |
80 | raw_spin_unlock_irqrestore(lock, flags); | 80 | v->counter c_op a; \ |
81 | } | 81 | raw_spin_unlock_irqrestore(lock, flags); \ |
82 | EXPORT_SYMBOL(atomic64_add); | 82 | } \ |
83 | 83 | EXPORT_SYMBOL(atomic64_##op); | |
84 | long long atomic64_add_return(long long a, atomic64_t *v) | 84 | |
85 | { | 85 | #define ATOMIC64_OP_RETURN(op, c_op) \ |
86 | unsigned long flags; | 86 | long long atomic64_##op##_return(long long a, atomic64_t *v) \ |
87 | raw_spinlock_t *lock = lock_addr(v); | 87 | { \ |
88 | long long val; | 88 | unsigned long flags; \ |
89 | 89 | raw_spinlock_t *lock = lock_addr(v); \ | |
90 | raw_spin_lock_irqsave(lock, flags); | 90 | long long val; \ |
91 | val = v->counter += a; | 91 | \ |
92 | raw_spin_unlock_irqrestore(lock, flags); | 92 | raw_spin_lock_irqsave(lock, flags); \ |
93 | return val; | 93 | val = (v->counter c_op a); \ |
94 | } | 94 | raw_spin_unlock_irqrestore(lock, flags); \ |
95 | EXPORT_SYMBOL(atomic64_add_return); | 95 | return val; \ |
96 | 96 | } \ | |
97 | void atomic64_sub(long long a, atomic64_t *v) | 97 | EXPORT_SYMBOL(atomic64_##op##_return); |
98 | { | 98 | |
99 | unsigned long flags; | 99 | #define ATOMIC64_OPS(op, c_op) \ |
100 | raw_spinlock_t *lock = lock_addr(v); | 100 | ATOMIC64_OP(op, c_op) \ |
101 | 101 | ATOMIC64_OP_RETURN(op, c_op) | |
102 | raw_spin_lock_irqsave(lock, flags); | 102 | |
103 | v->counter -= a; | 103 | ATOMIC64_OPS(add, +=) |
104 | raw_spin_unlock_irqrestore(lock, flags); | 104 | ATOMIC64_OPS(sub, -=) |
105 | } | 105 | |
106 | EXPORT_SYMBOL(atomic64_sub); | 106 | #undef ATOMIC64_OPS |
107 | 107 | #undef ATOMIC64_OP_RETURN | |
108 | long long atomic64_sub_return(long long a, atomic64_t *v) | 108 | #undef ATOMIC64_OP |
109 | { | ||
110 | unsigned long flags; | ||
111 | raw_spinlock_t *lock = lock_addr(v); | ||
112 | long long val; | ||
113 | |||
114 | raw_spin_lock_irqsave(lock, flags); | ||
115 | val = v->counter -= a; | ||
116 | raw_spin_unlock_irqrestore(lock, flags); | ||
117 | return val; | ||
118 | } | ||
119 | EXPORT_SYMBOL(atomic64_sub_return); | ||
120 | 109 | ||
121 | long long atomic64_dec_if_positive(atomic64_t *v) | 110 | long long atomic64_dec_if_positive(atomic64_t *v) |
122 | { | 111 | { |