aboutsummaryrefslogtreecommitdiffstats
path: root/arch/blackfin/include/asm/atomic.h
diff options
context:
space:
mode:
authorGraf Yang <graf.yang@analog.com>2009-01-07 10:14:39 -0500
committerBryan Wu <cooloney@kernel.org>2009-01-07 10:14:39 -0500
commit6b3087c64a92a36ae20d33479b4df6d7afc910d4 (patch)
tree95984fc623658ebf150d0d912a7f6c5a0301a5a9 /arch/blackfin/include/asm/atomic.h
parentc51b4488cd5bff08ed5690a8f303ff7f0894da2a (diff)
Blackfin arch: SMP supporting patchset: Blackfin header files and machine common code
Blackfin dual core BF561 processor can support SMP like features. https://docs.blackfin.uclinux.org/doku.php?id=linux-kernel:smp-like In this patch, we provide SMP extend to Blackfin header files and machine common code Signed-off-by: Graf Yang <graf.yang@analog.com> Signed-off-by: Bryan Wu <cooloney@kernel.org>
Diffstat (limited to 'arch/blackfin/include/asm/atomic.h')
-rw-r--r--arch/blackfin/include/asm/atomic.h119
1 files changed, 95 insertions, 24 deletions
diff --git a/arch/blackfin/include/asm/atomic.h b/arch/blackfin/include/asm/atomic.h
index 25776c19064b..d76275e5638c 100644
--- a/arch/blackfin/include/asm/atomic.h
+++ b/arch/blackfin/include/asm/atomic.h
@@ -15,11 +15,80 @@
15 */ 15 */
16 16
17#define ATOMIC_INIT(i) { (i) } 17#define ATOMIC_INIT(i) { (i) }
18
19#define atomic_read(v) ((v)->counter)
20#define atomic_set(v, i) (((v)->counter) = i) 18#define atomic_set(v, i) (((v)->counter) = i)
21 19
22static __inline__ void atomic_add(int i, atomic_t * v) 20#ifdef CONFIG_SMP
21
22#define atomic_read(v) __raw_uncached_fetch_asm(&(v)->counter)
23
24asmlinkage int __raw_uncached_fetch_asm(const volatile int *ptr);
25
26asmlinkage int __raw_atomic_update_asm(volatile int *ptr, int value);
27
28asmlinkage int __raw_atomic_clear_asm(volatile int *ptr, int value);
29
30asmlinkage int __raw_atomic_set_asm(volatile int *ptr, int value);
31
32asmlinkage int __raw_atomic_xor_asm(volatile int *ptr, int value);
33
34asmlinkage int __raw_atomic_test_asm(const volatile int *ptr, int value);
35
36static inline void atomic_add(int i, atomic_t *v)
37{
38 __raw_atomic_update_asm(&v->counter, i);
39}
40
41static inline void atomic_sub(int i, atomic_t *v)
42{
43 __raw_atomic_update_asm(&v->counter, -i);
44}
45
46static inline int atomic_add_return(int i, atomic_t *v)
47{
48 return __raw_atomic_update_asm(&v->counter, i);
49}
50
51static inline int atomic_sub_return(int i, atomic_t *v)
52{
53 return __raw_atomic_update_asm(&v->counter, -i);
54}
55
56static inline void atomic_inc(volatile atomic_t *v)
57{
58 __raw_atomic_update_asm(&v->counter, 1);
59}
60
61static inline void atomic_dec(volatile atomic_t *v)
62{
63 __raw_atomic_update_asm(&v->counter, -1);
64}
65
66static inline void atomic_clear_mask(int mask, atomic_t *v)
67{
68 __raw_atomic_clear_asm(&v->counter, mask);
69}
70
71static inline void atomic_set_mask(int mask, atomic_t *v)
72{
73 __raw_atomic_set_asm(&v->counter, mask);
74}
75
76static inline int atomic_test_mask(int mask, atomic_t *v)
77{
78 return __raw_atomic_test_asm(&v->counter, mask);
79}
80
81/* Atomic operations are already serializing */
82#define smp_mb__before_atomic_dec() barrier()
83#define smp_mb__after_atomic_dec() barrier()
84#define smp_mb__before_atomic_inc() barrier()
85#define smp_mb__after_atomic_inc() barrier()
86
87#else /* !CONFIG_SMP */
88
89#define atomic_read(v) ((v)->counter)
90
91static inline void atomic_add(int i, atomic_t *v)
23{ 92{
24 long flags; 93 long flags;
25 94
@@ -28,7 +97,7 @@ static __inline__ void atomic_add(int i, atomic_t * v)
28 local_irq_restore(flags); 97 local_irq_restore(flags);
29} 98}
30 99
31static __inline__ void atomic_sub(int i, atomic_t * v) 100static inline void atomic_sub(int i, atomic_t *v)
32{ 101{
33 long flags; 102 long flags;
34 103
@@ -38,7 +107,7 @@ static __inline__ void atomic_sub(int i, atomic_t * v)
38 107
39} 108}
40 109
41static inline int atomic_add_return(int i, atomic_t * v) 110static inline int atomic_add_return(int i, atomic_t *v)
42{ 111{
43 int __temp = 0; 112 int __temp = 0;
44 long flags; 113 long flags;
@@ -52,8 +121,7 @@ static inline int atomic_add_return(int i, atomic_t * v)
52 return __temp; 121 return __temp;
53} 122}
54 123
55#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) 124static inline int atomic_sub_return(int i, atomic_t *v)
56static inline int atomic_sub_return(int i, atomic_t * v)
57{ 125{
58 int __temp = 0; 126 int __temp = 0;
59 long flags; 127 long flags;
@@ -66,7 +134,7 @@ static inline int atomic_sub_return(int i, atomic_t * v)
66 return __temp; 134 return __temp;
67} 135}
68 136
69static __inline__ void atomic_inc(volatile atomic_t * v) 137static inline void atomic_inc(volatile atomic_t *v)
70{ 138{
71 long flags; 139 long flags;
72 140
@@ -75,20 +143,7 @@ static __inline__ void atomic_inc(volatile atomic_t * v)
75 local_irq_restore(flags); 143 local_irq_restore(flags);
76} 144}
77 145
78#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) 146static inline void atomic_dec(volatile atomic_t *v)
79#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
80
81#define atomic_add_unless(v, a, u) \
82({ \
83 int c, old; \
84 c = atomic_read(v); \
85 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
86 c = old; \
87 c != (u); \
88})
89#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
90
91static __inline__ void atomic_dec(volatile atomic_t * v)
92{ 147{
93 long flags; 148 long flags;
94 149
@@ -97,7 +152,7 @@ static __inline__ void atomic_dec(volatile atomic_t * v)
97 local_irq_restore(flags); 152 local_irq_restore(flags);
98} 153}
99 154
100static __inline__ void atomic_clear_mask(unsigned int mask, atomic_t * v) 155static inline void atomic_clear_mask(unsigned int mask, atomic_t *v)
101{ 156{
102 long flags; 157 long flags;
103 158
@@ -106,7 +161,7 @@ static __inline__ void atomic_clear_mask(unsigned int mask, atomic_t * v)
106 local_irq_restore(flags); 161 local_irq_restore(flags);
107} 162}
108 163
109static __inline__ void atomic_set_mask(unsigned int mask, atomic_t * v) 164static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
110{ 165{
111 long flags; 166 long flags;
112 167
@@ -121,9 +176,25 @@ static __inline__ void atomic_set_mask(unsigned int mask, atomic_t * v)
121#define smp_mb__before_atomic_inc() barrier() 176#define smp_mb__before_atomic_inc() barrier()
122#define smp_mb__after_atomic_inc() barrier() 177#define smp_mb__after_atomic_inc() barrier()
123 178
179#endif /* !CONFIG_SMP */
180
181#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
124#define atomic_dec_return(v) atomic_sub_return(1,(v)) 182#define atomic_dec_return(v) atomic_sub_return(1,(v))
125#define atomic_inc_return(v) atomic_add_return(1,(v)) 183#define atomic_inc_return(v) atomic_add_return(1,(v))
126 184
185#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
186#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
187
188#define atomic_add_unless(v, a, u) \
189({ \
190 int c, old; \
191 c = atomic_read(v); \
192 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
193 c = old; \
194 c != (u); \
195})
196#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
197
127/* 198/*
128 * atomic_inc_and_test - increment and test 199 * atomic_inc_and_test - increment and test
129 * @v: pointer of type atomic_t 200 * @v: pointer of type atomic_t