diff options
author | Peter Zijlstra <peterz@infradead.org> | 2014-04-23 13:40:25 -0400 |
---|---|---|
committer | Thomas Gleixner <tglx@linutronix.de> | 2015-07-27 08:06:23 -0400 |
commit | 304a0d699a3c6103b61d5ea18d56820e7d8e3116 (patch) | |
tree | dd0166235d2a1dc0c7a152e6faa825d2794cc3a0 | |
parent | 658aa51459c2f5284183d35b6dd0beca0e0bfe2f (diff) |
sparc: Provide atomic_{or,xor,and}
Implement atomic logic ops -- atomic_{or,xor,and}.
These will replace the atomic_{set,clear}_mask functions that are
available on some archs.
Acked-by: David S. Miller <davem@davemloft.net>
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
-rw-r--r-- | arch/sparc/include/asm/atomic_32.h | 4 | ||||
-rw-r--r-- | arch/sparc/include/asm/atomic_64.h | 6 | ||||
-rw-r--r-- | arch/sparc/lib/atomic32.c | 22 | ||||
-rw-r--r-- | arch/sparc/lib/atomic_64.S | 6 | ||||
-rw-r--r-- | arch/sparc/lib/ksyms.c | 3 |
5 files changed, 38 insertions, 3 deletions
diff --git a/arch/sparc/include/asm/atomic_32.h b/arch/sparc/include/asm/atomic_32.h index 0e69b7e7a439..e19d8880b146 100644 --- a/arch/sparc/include/asm/atomic_32.h +++ b/arch/sparc/include/asm/atomic_32.h | |||
@@ -17,10 +17,14 @@ | |||
17 | #include <asm/barrier.h> | 17 | #include <asm/barrier.h> |
18 | #include <asm-generic/atomic64.h> | 18 | #include <asm-generic/atomic64.h> |
19 | 19 | ||
20 | #define CONFIG_ARCH_HAS_ATOMIC_OR | ||
20 | 21 | ||
21 | #define ATOMIC_INIT(i) { (i) } | 22 | #define ATOMIC_INIT(i) { (i) } |
22 | 23 | ||
23 | int atomic_add_return(int, atomic_t *); | 24 | int atomic_add_return(int, atomic_t *); |
25 | void atomic_and(int, atomic_t *); | ||
26 | void atomic_or(int, atomic_t *); | ||
27 | void atomic_xor(int, atomic_t *); | ||
24 | int atomic_cmpxchg(atomic_t *, int, int); | 28 | int atomic_cmpxchg(atomic_t *, int, int); |
25 | int atomic_xchg(atomic_t *, int); | 29 | int atomic_xchg(atomic_t *, int); |
26 | int __atomic_add_unless(atomic_t *, int, int); | 30 | int __atomic_add_unless(atomic_t *, int, int); |
diff --git a/arch/sparc/include/asm/atomic_64.h b/arch/sparc/include/asm/atomic_64.h index 4082749913ce..d6af27c93450 100644 --- a/arch/sparc/include/asm/atomic_64.h +++ b/arch/sparc/include/asm/atomic_64.h | |||
@@ -33,6 +33,12 @@ long atomic64_##op##_return(long, atomic64_t *); | |||
33 | ATOMIC_OPS(add) | 33 | ATOMIC_OPS(add) |
34 | ATOMIC_OPS(sub) | 34 | ATOMIC_OPS(sub) |
35 | 35 | ||
36 | #define CONFIG_ARCH_HAS_ATOMIC_OR | ||
37 | |||
38 | ATOMIC_OP(and) | ||
39 | ATOMIC_OP(or) | ||
40 | ATOMIC_OP(xor) | ||
41 | |||
36 | #undef ATOMIC_OPS | 42 | #undef ATOMIC_OPS |
37 | #undef ATOMIC_OP_RETURN | 43 | #undef ATOMIC_OP_RETURN |
38 | #undef ATOMIC_OP | 44 | #undef ATOMIC_OP |
diff --git a/arch/sparc/lib/atomic32.c b/arch/sparc/lib/atomic32.c index 71cd65ab200c..b9d63c0a7aab 100644 --- a/arch/sparc/lib/atomic32.c +++ b/arch/sparc/lib/atomic32.c | |||
@@ -27,22 +27,38 @@ static DEFINE_SPINLOCK(dummy); | |||
27 | 27 | ||
28 | #endif /* SMP */ | 28 | #endif /* SMP */ |
29 | 29 | ||
30 | #define ATOMIC_OP(op, cop) \ | 30 | #define ATOMIC_OP_RETURN(op, c_op) \ |
31 | int atomic_##op##_return(int i, atomic_t *v) \ | 31 | int atomic_##op##_return(int i, atomic_t *v) \ |
32 | { \ | 32 | { \ |
33 | int ret; \ | 33 | int ret; \ |
34 | unsigned long flags; \ | 34 | unsigned long flags; \ |
35 | spin_lock_irqsave(ATOMIC_HASH(v), flags); \ | 35 | spin_lock_irqsave(ATOMIC_HASH(v), flags); \ |
36 | \ | 36 | \ |
37 | ret = (v->counter cop i); \ | 37 | ret = (v->counter c_op i); \ |
38 | \ | 38 | \ |
39 | spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \ | 39 | spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \ |
40 | return ret; \ | 40 | return ret; \ |
41 | } \ | 41 | } \ |
42 | EXPORT_SYMBOL(atomic_##op##_return); | 42 | EXPORT_SYMBOL(atomic_##op##_return); |
43 | 43 | ||
44 | ATOMIC_OP(add, +=) | 44 | #define ATOMIC_OP(op, c_op) \ |
45 | void atomic_##op(int i, atomic_t *v) \ | ||
46 | { \ | ||
47 | unsigned long flags; \ | ||
48 | spin_lock_irqsave(ATOMIC_HASH(v), flags); \ | ||
49 | \ | ||
50 | v->counter c_op i; \ | ||
51 | \ | ||
52 | spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \ | ||
53 | } \ | ||
54 | EXPORT_SYMBOL(atomic_##op); | ||
55 | |||
56 | ATOMIC_OP_RETURN(add, +=) | ||
57 | ATOMIC_OP(and, &=) | ||
58 | ATOMIC_OP(or, |=) | ||
59 | ATOMIC_OP(xor, ^=) | ||
45 | 60 | ||
61 | #undef ATOMIC_OP_RETURN | ||
46 | #undef ATOMIC_OP | 62 | #undef ATOMIC_OP |
47 | 63 | ||
48 | int atomic_xchg(atomic_t *v, int new) | 64 | int atomic_xchg(atomic_t *v, int new) |
diff --git a/arch/sparc/lib/atomic_64.S b/arch/sparc/lib/atomic_64.S index 05dac43907d1..d6b0363f345b 100644 --- a/arch/sparc/lib/atomic_64.S +++ b/arch/sparc/lib/atomic_64.S | |||
@@ -47,6 +47,9 @@ ENDPROC(atomic_##op##_return); | |||
47 | 47 | ||
48 | ATOMIC_OPS(add) | 48 | ATOMIC_OPS(add) |
49 | ATOMIC_OPS(sub) | 49 | ATOMIC_OPS(sub) |
50 | ATOMIC_OP(and) | ||
51 | ATOMIC_OP(or) | ||
52 | ATOMIC_OP(xor) | ||
50 | 53 | ||
51 | #undef ATOMIC_OPS | 54 | #undef ATOMIC_OPS |
52 | #undef ATOMIC_OP_RETURN | 55 | #undef ATOMIC_OP_RETURN |
@@ -84,6 +87,9 @@ ENDPROC(atomic64_##op##_return); | |||
84 | 87 | ||
85 | ATOMIC64_OPS(add) | 88 | ATOMIC64_OPS(add) |
86 | ATOMIC64_OPS(sub) | 89 | ATOMIC64_OPS(sub) |
90 | ATOMIC64_OP(and) | ||
91 | ATOMIC64_OP(or) | ||
92 | ATOMIC64_OP(xor) | ||
87 | 93 | ||
88 | #undef ATOMIC64_OPS | 94 | #undef ATOMIC64_OPS |
89 | #undef ATOMIC64_OP_RETURN | 95 | #undef ATOMIC64_OP_RETURN |
diff --git a/arch/sparc/lib/ksyms.c b/arch/sparc/lib/ksyms.c index 1d649a95660c..bb6005997268 100644 --- a/arch/sparc/lib/ksyms.c +++ b/arch/sparc/lib/ksyms.c | |||
@@ -111,6 +111,9 @@ EXPORT_SYMBOL(atomic64_##op##_return); | |||
111 | 111 | ||
112 | ATOMIC_OPS(add) | 112 | ATOMIC_OPS(add) |
113 | ATOMIC_OPS(sub) | 113 | ATOMIC_OPS(sub) |
114 | ATOMIC_OP(and) | ||
115 | ATOMIC_OP(or) | ||
116 | ATOMIC_OP(xor) | ||
114 | 117 | ||
115 | #undef ATOMIC_OPS | 118 | #undef ATOMIC_OPS |
116 | #undef ATOMIC_OP_RETURN | 119 | #undef ATOMIC_OP_RETURN |