aboutsummaryrefslogtreecommitdiffstats
path: root/arch/alpha
diff options
context:
space:
mode:
authorPeter Zijlstra <peterz@infradead.org>2016-04-17 19:16:10 -0400
committerIngo Molnar <mingo@kernel.org>2016-06-16 04:48:19 -0400
commit1f51dee7ca7424be6f84067395166f878dbdd8be (patch)
tree00ee8bbb41a295b4e8299bb51d0e5dcf0229d8b3 /arch/alpha
parentb316ff783d17bd6217804e2e4385ce9347d7dad9 (diff)
locking/atomic, arch/alpha: Implement atomic{,64}_fetch_{add,sub,and,andnot,or,xor}()
Implement FETCH-OP atomic primitives, these are very similar to the existing OP-RETURN primitives we already have, except they return the value of the atomic variable _before_ modification. This is especially useful for irreversible operations -- such as bitops (because it becomes impossible to reconstruct the state prior to modification). Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Cc: Andrew Morton <akpm@linux-foundation.org> Cc: Ivan Kokshaysky <ink@jurassic.park.msu.ru> Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Matt Turner <mattst88@gmail.com> Cc: Paul E. McKenney <paulmck@linux.vnet.ibm.com> Cc: Peter Zijlstra <peterz@infradead.org> Cc: Richard Henderson <rth@twiddle.net> Cc: Thomas Gleixner <tglx@linutronix.de> Cc: linux-alpha@vger.kernel.org Cc: linux-arch@vger.kernel.org Cc: linux-kernel@vger.kernel.org Signed-off-by: Ingo Molnar <mingo@kernel.org>
Diffstat (limited to 'arch/alpha')
-rw-r--r--arch/alpha/include/asm/atomic.h65
1 files changed, 56 insertions, 9 deletions
diff --git a/arch/alpha/include/asm/atomic.h b/arch/alpha/include/asm/atomic.h
index 572b228c44c7..8243f17999e3 100644
--- a/arch/alpha/include/asm/atomic.h
+++ b/arch/alpha/include/asm/atomic.h
@@ -65,6 +65,25 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \
65 return result; \ 65 return result; \
66} 66}
67 67
68#define ATOMIC_FETCH_OP(op, asm_op) \
69static inline int atomic_fetch_##op(int i, atomic_t *v) \
70{ \
71 long temp, result; \
72 smp_mb(); \
73 __asm__ __volatile__( \
74 "1: ldl_l %2,%1\n" \
75 " " #asm_op " %2,%3,%0\n" \
76 " stl_c %0,%1\n" \
77 " beq %0,2f\n" \
78 ".subsection 2\n" \
79 "2: br 1b\n" \
80 ".previous" \
81 :"=&r" (temp), "=m" (v->counter), "=&r" (result) \
82 :"Ir" (i), "m" (v->counter) : "memory"); \
83 smp_mb(); \
84 return result; \
85}
86
68#define ATOMIC64_OP(op, asm_op) \ 87#define ATOMIC64_OP(op, asm_op) \
69static __inline__ void atomic64_##op(long i, atomic64_t * v) \ 88static __inline__ void atomic64_##op(long i, atomic64_t * v) \
70{ \ 89{ \
@@ -101,11 +120,32 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \
101 return result; \ 120 return result; \
102} 121}
103 122
123#define ATOMIC64_FETCH_OP(op, asm_op) \
124static __inline__ long atomic64_fetch_##op(long i, atomic64_t * v) \
125{ \
126 long temp, result; \
127 smp_mb(); \
128 __asm__ __volatile__( \
129 "1: ldq_l %2,%1\n" \
130 " " #asm_op " %2,%3,%0\n" \
131 " stq_c %0,%1\n" \
132 " beq %0,2f\n" \
133 ".subsection 2\n" \
134 "2: br 1b\n" \
135 ".previous" \
136 :"=&r" (temp), "=m" (v->counter), "=&r" (result) \
137 :"Ir" (i), "m" (v->counter) : "memory"); \
138 smp_mb(); \
139 return result; \
140}
141
104#define ATOMIC_OPS(op) \ 142#define ATOMIC_OPS(op) \
105 ATOMIC_OP(op, op##l) \ 143 ATOMIC_OP(op, op##l) \
106 ATOMIC_OP_RETURN(op, op##l) \ 144 ATOMIC_OP_RETURN(op, op##l) \
145 ATOMIC_FETCH_OP(op, op##l) \
107 ATOMIC64_OP(op, op##q) \ 146 ATOMIC64_OP(op, op##q) \
108 ATOMIC64_OP_RETURN(op, op##q) 147 ATOMIC64_OP_RETURN(op, op##q) \
148 ATOMIC64_FETCH_OP(op, op##q)
109 149
110ATOMIC_OPS(add) 150ATOMIC_OPS(add)
111ATOMIC_OPS(sub) 151ATOMIC_OPS(sub)
@@ -113,18 +153,25 @@ ATOMIC_OPS(sub)
113#define atomic_andnot atomic_andnot 153#define atomic_andnot atomic_andnot
114#define atomic64_andnot atomic64_andnot 154#define atomic64_andnot atomic64_andnot
115 155
116ATOMIC_OP(and, and) 156#define atomic_fetch_or atomic_fetch_or
117ATOMIC_OP(andnot, bic) 157
118ATOMIC_OP(or, bis) 158#undef ATOMIC_OPS
119ATOMIC_OP(xor, xor) 159#define ATOMIC_OPS(op, asm) \
120ATOMIC64_OP(and, and) 160 ATOMIC_OP(op, asm) \
121ATOMIC64_OP(andnot, bic) 161 ATOMIC_FETCH_OP(op, asm) \
122ATOMIC64_OP(or, bis) 162 ATOMIC64_OP(op, asm) \
123ATOMIC64_OP(xor, xor) 163 ATOMIC64_FETCH_OP(op, asm)
164
165ATOMIC_OPS(and, and)
166ATOMIC_OPS(andnot, bic)
167ATOMIC_OPS(or, bis)
168ATOMIC_OPS(xor, xor)
124 169
125#undef ATOMIC_OPS 170#undef ATOMIC_OPS
171#undef ATOMIC64_FETCH_OP
126#undef ATOMIC64_OP_RETURN 172#undef ATOMIC64_OP_RETURN
127#undef ATOMIC64_OP 173#undef ATOMIC64_OP
174#undef ATOMIC_FETCH_OP
128#undef ATOMIC_OP_RETURN 175#undef ATOMIC_OP_RETURN
129#undef ATOMIC_OP 176#undef ATOMIC_OP
130 177