aboutsummaryrefslogtreecommitdiffstats
path: root/arch/alpha/include
diff options
context:
space:
mode:
authorPeter Zijlstra <peterz@infradead.org>2014-04-23 14:07:47 -0400
committerThomas Gleixner <tglx@linutronix.de>2015-07-27 08:06:21 -0400
commit212d3be102d73dce70cc12f39dce4e0aed2c025b (patch)
tree312fee245c34c7f4310c029a268e3d0d7641c2ed /arch/alpha/include
parent56d1defe0bbddaa97d6e74b51490904130fd4f1d (diff)
alpha: Provide atomic_{or,xor,and}
Implement atomic logic ops -- atomic_{or,xor,and}. These will replace the atomic_{set,clear}_mask functions that are available on some archs. Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Diffstat (limited to 'arch/alpha/include')
-rw-r--r--arch/alpha/include/asm/atomic.h43
1 files changed, 28 insertions, 15 deletions
diff --git a/arch/alpha/include/asm/atomic.h b/arch/alpha/include/asm/atomic.h
index 8f8eafbedd7c..0eff853398d2 100644
--- a/arch/alpha/include/asm/atomic.h
+++ b/arch/alpha/include/asm/atomic.h
@@ -29,13 +29,13 @@
29 * branch back to restart the operation. 29 * branch back to restart the operation.
30 */ 30 */
31 31
32#define ATOMIC_OP(op) \ 32#define ATOMIC_OP(op, asm_op) \
33static __inline__ void atomic_##op(int i, atomic_t * v) \ 33static __inline__ void atomic_##op(int i, atomic_t * v) \
34{ \ 34{ \
35 unsigned long temp; \ 35 unsigned long temp; \
36 __asm__ __volatile__( \ 36 __asm__ __volatile__( \
37 "1: ldl_l %0,%1\n" \ 37 "1: ldl_l %0,%1\n" \
38 " " #op "l %0,%2,%0\n" \ 38 " " #asm_op " %0,%2,%0\n" \
39 " stl_c %0,%1\n" \ 39 " stl_c %0,%1\n" \
40 " beq %0,2f\n" \ 40 " beq %0,2f\n" \
41 ".subsection 2\n" \ 41 ".subsection 2\n" \
@@ -45,15 +45,15 @@ static __inline__ void atomic_##op(int i, atomic_t * v) \
45 :"Ir" (i), "m" (v->counter)); \ 45 :"Ir" (i), "m" (v->counter)); \
46} \ 46} \
47 47
48#define ATOMIC_OP_RETURN(op) \ 48#define ATOMIC_OP_RETURN(op, asm_op) \
49static inline int atomic_##op##_return(int i, atomic_t *v) \ 49static inline int atomic_##op##_return(int i, atomic_t *v) \
50{ \ 50{ \
51 long temp, result; \ 51 long temp, result; \
52 smp_mb(); \ 52 smp_mb(); \
53 __asm__ __volatile__( \ 53 __asm__ __volatile__( \
54 "1: ldl_l %0,%1\n" \ 54 "1: ldl_l %0,%1\n" \
55 " " #op "l %0,%3,%2\n" \ 55 " " #asm_op " %0,%3,%2\n" \
56 " " #op "l %0,%3,%0\n" \ 56 " " #asm_op " %0,%3,%0\n" \
57 " stl_c %0,%1\n" \ 57 " stl_c %0,%1\n" \
58 " beq %0,2f\n" \ 58 " beq %0,2f\n" \
59 ".subsection 2\n" \ 59 ".subsection 2\n" \
@@ -65,13 +65,13 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \
65 return result; \ 65 return result; \
66} 66}
67 67
68#define ATOMIC64_OP(op) \ 68#define ATOMIC64_OP(op, asm_op) \
69static __inline__ void atomic64_##op(long i, atomic64_t * v) \ 69static __inline__ void atomic64_##op(long i, atomic64_t * v) \
70{ \ 70{ \
71 unsigned long temp; \ 71 unsigned long temp; \
72 __asm__ __volatile__( \ 72 __asm__ __volatile__( \
73 "1: ldq_l %0,%1\n" \ 73 "1: ldq_l %0,%1\n" \
74 " " #op "q %0,%2,%0\n" \ 74 " " #asm_op " %0,%2,%0\n" \
75 " stq_c %0,%1\n" \ 75 " stq_c %0,%1\n" \
76 " beq %0,2f\n" \ 76 " beq %0,2f\n" \
77 ".subsection 2\n" \ 77 ".subsection 2\n" \
@@ -81,15 +81,15 @@ static __inline__ void atomic64_##op(long i, atomic64_t * v) \
81 :"Ir" (i), "m" (v->counter)); \ 81 :"Ir" (i), "m" (v->counter)); \
82} \ 82} \
83 83
84#define ATOMIC64_OP_RETURN(op) \ 84#define ATOMIC64_OP_RETURN(op, asm_op) \
85static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \ 85static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \
86{ \ 86{ \
87 long temp, result; \ 87 long temp, result; \
88 smp_mb(); \ 88 smp_mb(); \
89 __asm__ __volatile__( \ 89 __asm__ __volatile__( \
90 "1: ldq_l %0,%1\n" \ 90 "1: ldq_l %0,%1\n" \
91 " " #op "q %0,%3,%2\n" \ 91 " " #asm_op " %0,%3,%2\n" \
92 " " #op "q %0,%3,%0\n" \ 92 " " #asm_op " %0,%3,%0\n" \
93 " stq_c %0,%1\n" \ 93 " stq_c %0,%1\n" \
94 " beq %0,2f\n" \ 94 " beq %0,2f\n" \
95 ".subsection 2\n" \ 95 ".subsection 2\n" \
@@ -101,15 +101,28 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \
101 return result; \ 101 return result; \
102} 102}
103 103
104#define ATOMIC_OPS(opg) \ 104#define ATOMIC_OPS(op) \
105 ATOMIC_OP(opg) \ 105 ATOMIC_OP(op, op##l) \
106 ATOMIC_OP_RETURN(opg) \ 106 ATOMIC_OP_RETURN(op, op##l) \
107 ATOMIC64_OP(opg) \ 107 ATOMIC64_OP(op, op##q) \
108 ATOMIC64_OP_RETURN(opg) 108 ATOMIC64_OP_RETURN(op, op##q)
109 109
110ATOMIC_OPS(add) 110ATOMIC_OPS(add)
111ATOMIC_OPS(sub) 111ATOMIC_OPS(sub)
112 112
113#define CONFIG_ARCH_HAS_ATOMIC_OR
114#define atomic_andnot atomic_andnot
115#define atomic64_andnot atomic64_andnot
116
117ATOMIC_OP(and, and)
118ATOMIC_OP(andnot, bic)
119ATOMIC_OP(or, bis)
120ATOMIC_OP(xor, xor)
121ATOMIC64_OP(and, and)
122ATOMIC64_OP(andnot, bic)
123ATOMIC64_OP(or, bis)
124ATOMIC64_OP(xor, xor)
125
113#undef ATOMIC_OPS 126#undef ATOMIC_OPS
114#undef ATOMIC64_OP_RETURN 127#undef ATOMIC64_OP_RETURN
115#undef ATOMIC64_OP 128#undef ATOMIC64_OP