aboutsummaryrefslogtreecommitdiffstats
path: root/arch/blackfin/include/asm
diff options
context:
space:
mode:
authorPeter Zijlstra <peterz@infradead.org>2015-04-23 15:44:42 -0400
committerThomas Gleixner <tglx@linutronix.de>2015-07-27 08:06:22 -0400
commitd835b6c4cc02507b3bf3f8ee6c86857cf0ee67ab (patch)
tree4bee7d7516eb8f382d137cde96b46fe2b6eea442 /arch/blackfin/include/asm
parentf8a570e270bf62363cd498ac2ac8ea07a76ad4d6 (diff)
blackfin: Provide atomic_{or,xor,and}
Implement atomic logic ops -- atomic_{or,xor,and}. These will replace the atomic_{set,clear}_mask functions that are available on some archs. TODO: use inline asm or at least asm macros to collapse the lot. Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Diffstat (limited to 'arch/blackfin/include/asm')
-rw-r--r--arch/blackfin/include/asm/atomic.h28
1 files changed, 21 insertions, 7 deletions
diff --git a/arch/blackfin/include/asm/atomic.h b/arch/blackfin/include/asm/atomic.h
index a107a98e9978..eafa55b81a7b 100644
--- a/arch/blackfin/include/asm/atomic.h
+++ b/arch/blackfin/include/asm/atomic.h
@@ -16,19 +16,33 @@
16#include <linux/types.h> 16#include <linux/types.h>
17 17
18asmlinkage int __raw_uncached_fetch_asm(const volatile int *ptr); 18asmlinkage int __raw_uncached_fetch_asm(const volatile int *ptr);
19asmlinkage int __raw_atomic_update_asm(volatile int *ptr, int value); 19asmlinkage int __raw_atomic_add_asm(volatile int *ptr, int value);
20asmlinkage int __raw_atomic_clear_asm(volatile int *ptr, int value); 20
21asmlinkage int __raw_atomic_set_asm(volatile int *ptr, int value); 21asmlinkage int __raw_atomic_and_asm(volatile int *ptr, int value);
22asmlinkage int __raw_atomic_or_asm(volatile int *ptr, int value);
22asmlinkage int __raw_atomic_xor_asm(volatile int *ptr, int value); 23asmlinkage int __raw_atomic_xor_asm(volatile int *ptr, int value);
23asmlinkage int __raw_atomic_test_asm(const volatile int *ptr, int value); 24asmlinkage int __raw_atomic_test_asm(const volatile int *ptr, int value);
24 25
25#define atomic_read(v) __raw_uncached_fetch_asm(&(v)->counter) 26#define atomic_read(v) __raw_uncached_fetch_asm(&(v)->counter)
26 27
27#define atomic_add_return(i, v) __raw_atomic_update_asm(&(v)->counter, i) 28#define atomic_add_return(i, v) __raw_atomic_add_asm(&(v)->counter, i)
28#define atomic_sub_return(i, v) __raw_atomic_update_asm(&(v)->counter, -(i)) 29#define atomic_sub_return(i, v) __raw_atomic_add_asm(&(v)->counter, -(i))
30
31#define CONFIG_ARCH_HAS_ATOMIC_OR
32
33#define atomic_or(i, v) (void)__raw_atomic_or_asm(&(v)->counter, i)
34#define atomic_and(i, v) (void)__raw_atomic_and_asm(&(v)->counter, i)
35#define atomic_xor(i, v) (void)__raw_atomic_xor_asm(&(v)->counter, i)
36
37static inline __deprecated void atomic_clear_mask(unsigned int mask, atomic_t *v)
38{
39 atomic_and(~mask, v);
40}
29 41
30#define atomic_clear_mask(m, v) __raw_atomic_clear_asm(&(v)->counter, m) 42static inline __deprecated void atomic_set_mask(unsigned int mask, atomic_t *v)
31#define atomic_set_mask(m, v) __raw_atomic_set_asm(&(v)->counter, m) 43{
44 atomic_or(mask, v);
45}
32 46
33#endif 47#endif
34 48