aboutsummaryrefslogtreecommitdiffstats
path: root/arch
diff options
context:
space:
mode:
Diffstat (limited to 'arch')
-rw-r--r--arch/blackfin/include/asm/atomic.h104
1 files changed, 10 insertions, 94 deletions
diff --git a/arch/blackfin/include/asm/atomic.h b/arch/blackfin/include/asm/atomic.h
index 135225696fd2..54c6e2887e9f 100644
--- a/arch/blackfin/include/asm/atomic.h
+++ b/arch/blackfin/include/asm/atomic.h
@@ -1,5 +1,5 @@
1/* 1/*
2 * Copyright 2004-2009 Analog Devices Inc. 2 * Copyright 2004-2011 Analog Devices Inc.
3 * 3 *
4 * Licensed under the GPL-2 or later. 4 * Licensed under the GPL-2 or later.
5 */ 5 */
@@ -7,111 +7,27 @@
7#ifndef __ARCH_BLACKFIN_ATOMIC__ 7#ifndef __ARCH_BLACKFIN_ATOMIC__
8#define __ARCH_BLACKFIN_ATOMIC__ 8#define __ARCH_BLACKFIN_ATOMIC__
9 9
10#ifndef CONFIG_SMP 10#ifdef CONFIG_SMP
11# include <asm-generic/atomic.h>
12#else
13 11
14#include <linux/types.h> 12#include <linux/linkage.h>
15#include <asm/system.h> /* local_irq_XXX() */
16
17/*
18 * Atomic operations that C can't guarantee us. Useful for
19 * resource counting etc..
20 */
21
22#define ATOMIC_INIT(i) { (i) }
23#define atomic_set(v, i) (((v)->counter) = i)
24
25#define atomic_read(v) __raw_uncached_fetch_asm(&(v)->counter)
26 13
27asmlinkage int __raw_uncached_fetch_asm(const volatile int *ptr); 14asmlinkage int __raw_uncached_fetch_asm(const volatile int *ptr);
28
29asmlinkage int __raw_atomic_update_asm(volatile int *ptr, int value); 15asmlinkage int __raw_atomic_update_asm(volatile int *ptr, int value);
30
31asmlinkage int __raw_atomic_clear_asm(volatile int *ptr, int value); 16asmlinkage int __raw_atomic_clear_asm(volatile int *ptr, int value);
32
33asmlinkage int __raw_atomic_set_asm(volatile int *ptr, int value); 17asmlinkage int __raw_atomic_set_asm(volatile int *ptr, int value);
34
35asmlinkage int __raw_atomic_xor_asm(volatile int *ptr, int value); 18asmlinkage int __raw_atomic_xor_asm(volatile int *ptr, int value);
36
37asmlinkage int __raw_atomic_test_asm(const volatile int *ptr, int value); 19asmlinkage int __raw_atomic_test_asm(const volatile int *ptr, int value);
38 20
39static inline void atomic_add(int i, atomic_t *v) 21#define atomic_read(v) __raw_uncached_fetch_asm(&(v)->counter)
40{
41 __raw_atomic_update_asm(&v->counter, i);
42}
43
44static inline void atomic_sub(int i, atomic_t *v)
45{
46 __raw_atomic_update_asm(&v->counter, -i);
47}
48
49static inline int atomic_add_return(int i, atomic_t *v)
50{
51 return __raw_atomic_update_asm(&v->counter, i);
52}
53
54static inline int atomic_sub_return(int i, atomic_t *v)
55{
56 return __raw_atomic_update_asm(&v->counter, -i);
57}
58 22
59static inline void atomic_inc(volatile atomic_t *v) 23#define atomic_add_return(i, v) __raw_atomic_update_asm(&(v)->counter, i)
60{ 24#define atomic_sub_return(i, v) __raw_atomic_update_asm(&(v)->counter, -(i))
61 __raw_atomic_update_asm(&v->counter, 1);
62}
63
64static inline void atomic_dec(volatile atomic_t *v)
65{
66 __raw_atomic_update_asm(&v->counter, -1);
67}
68
69static inline void atomic_clear_mask(int mask, atomic_t *v)
70{
71 __raw_atomic_clear_asm(&v->counter, mask);
72}
73
74static inline void atomic_set_mask(int mask, atomic_t *v)
75{
76 __raw_atomic_set_asm(&v->counter, mask);
77}
78
79/* Atomic operations are already serializing */
80#define smp_mb__before_atomic_dec() barrier()
81#define smp_mb__after_atomic_dec() barrier()
82#define smp_mb__before_atomic_inc() barrier()
83#define smp_mb__after_atomic_inc() barrier()
84
85#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
86#define atomic_dec_return(v) atomic_sub_return(1,(v))
87#define atomic_inc_return(v) atomic_add_return(1,(v))
88
89#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
90#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
91
92#define __atomic_add_unless(v, a, u) \
93({ \
94 int c, old; \
95 c = atomic_read(v); \
96 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
97 c = old; \
98 c; \
99})
100
101/*
102 * atomic_inc_and_test - increment and test
103 * @v: pointer of type atomic_t
104 *
105 * Atomically increments @v by 1
106 * and returns true if the result is zero, or false for all
107 * other cases.
108 */
109#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
110
111#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
112#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
113 25
26#define atomic_clear_mask(m, v) __raw_atomic_clear_asm(&(v)->counter, m)
27#define atomic_set_mask(m, v) __raw_atomic_set_asm(&(v)->counter, m)
114 28
115#endif 29#endif
116 30
31#include <asm-generic/atomic.h>
32
117#endif 33#endif