aboutsummaryrefslogtreecommitdiffstats
path: root/arch/blackfin/include/asm/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/blackfin/include/asm/atomic.h')
-rw-r--r--arch/blackfin/include/asm/atomic.h155
1 files changed, 112 insertions, 43 deletions
diff --git a/arch/blackfin/include/asm/atomic.h b/arch/blackfin/include/asm/atomic.h
index 7cf508718605..94b2a9b19451 100644
--- a/arch/blackfin/include/asm/atomic.h
+++ b/arch/blackfin/include/asm/atomic.h
@@ -1,6 +1,7 @@
1#ifndef __ARCH_BLACKFIN_ATOMIC__ 1#ifndef __ARCH_BLACKFIN_ATOMIC__
2#define __ARCH_BLACKFIN_ATOMIC__ 2#define __ARCH_BLACKFIN_ATOMIC__
3 3
4#include <linux/types.h>
4#include <asm/system.h> /* local_irq_XXX() */ 5#include <asm/system.h> /* local_irq_XXX() */
5 6
6/* 7/*
@@ -13,108 +14,160 @@
13 * Tony Kou (tonyko@lineo.ca) Lineo Inc. 2001 14 * Tony Kou (tonyko@lineo.ca) Lineo Inc. 2001
14 */ 15 */
15 16
16typedef struct {
17 int counter;
18} atomic_t;
19#define ATOMIC_INIT(i) { (i) } 17#define ATOMIC_INIT(i) { (i) }
20
21#define atomic_read(v) ((v)->counter)
22#define atomic_set(v, i) (((v)->counter) = i) 18#define atomic_set(v, i) (((v)->counter) = i)
23 19
24static __inline__ void atomic_add(int i, atomic_t * v) 20#ifdef CONFIG_SMP
21
22#define atomic_read(v) __raw_uncached_fetch_asm(&(v)->counter)
23
24asmlinkage int __raw_uncached_fetch_asm(const volatile int *ptr);
25
26asmlinkage int __raw_atomic_update_asm(volatile int *ptr, int value);
27
28asmlinkage int __raw_atomic_clear_asm(volatile int *ptr, int value);
29
30asmlinkage int __raw_atomic_set_asm(volatile int *ptr, int value);
31
32asmlinkage int __raw_atomic_xor_asm(volatile int *ptr, int value);
33
34asmlinkage int __raw_atomic_test_asm(const volatile int *ptr, int value);
35
36static inline void atomic_add(int i, atomic_t *v)
37{
38 __raw_atomic_update_asm(&v->counter, i);
39}
40
41static inline void atomic_sub(int i, atomic_t *v)
42{
43 __raw_atomic_update_asm(&v->counter, -i);
44}
45
46static inline int atomic_add_return(int i, atomic_t *v)
47{
48 return __raw_atomic_update_asm(&v->counter, i);
49}
50
51static inline int atomic_sub_return(int i, atomic_t *v)
52{
53 return __raw_atomic_update_asm(&v->counter, -i);
54}
55
56static inline void atomic_inc(volatile atomic_t *v)
57{
58 __raw_atomic_update_asm(&v->counter, 1);
59}
60
61static inline void atomic_dec(volatile atomic_t *v)
62{
63 __raw_atomic_update_asm(&v->counter, -1);
64}
65
66static inline void atomic_clear_mask(int mask, atomic_t *v)
67{
68 __raw_atomic_clear_asm(&v->counter, mask);
69}
70
71static inline void atomic_set_mask(int mask, atomic_t *v)
72{
73 __raw_atomic_set_asm(&v->counter, mask);
74}
75
76static inline int atomic_test_mask(int mask, atomic_t *v)
77{
78 return __raw_atomic_test_asm(&v->counter, mask);
79}
80
81/* Atomic operations are already serializing */
82#define smp_mb__before_atomic_dec() barrier()
83#define smp_mb__after_atomic_dec() barrier()
84#define smp_mb__before_atomic_inc() barrier()
85#define smp_mb__after_atomic_inc() barrier()
86
87#else /* !CONFIG_SMP */
88
89#define atomic_read(v) ((v)->counter)
90
91static inline void atomic_add(int i, atomic_t *v)
25{ 92{
26 long flags; 93 long flags;
27 94
28 local_irq_save(flags); 95 local_irq_save_hw(flags);
29 v->counter += i; 96 v->counter += i;
30 local_irq_restore(flags); 97 local_irq_restore_hw(flags);
31} 98}
32 99
33static __inline__ void atomic_sub(int i, atomic_t * v) 100static inline void atomic_sub(int i, atomic_t *v)
34{ 101{
35 long flags; 102 long flags;
36 103
37 local_irq_save(flags); 104 local_irq_save_hw(flags);
38 v->counter -= i; 105 v->counter -= i;
39 local_irq_restore(flags); 106 local_irq_restore_hw(flags);
40 107
41} 108}
42 109
43static inline int atomic_add_return(int i, atomic_t * v) 110static inline int atomic_add_return(int i, atomic_t *v)
44{ 111{
45 int __temp = 0; 112 int __temp = 0;
46 long flags; 113 long flags;
47 114
48 local_irq_save(flags); 115 local_irq_save_hw(flags);
49 v->counter += i; 116 v->counter += i;
50 __temp = v->counter; 117 __temp = v->counter;
51 local_irq_restore(flags); 118 local_irq_restore_hw(flags);
52 119
53 120
54 return __temp; 121 return __temp;
55} 122}
56 123
57#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) 124static inline int atomic_sub_return(int i, atomic_t *v)
58static inline int atomic_sub_return(int i, atomic_t * v)
59{ 125{
60 int __temp = 0; 126 int __temp = 0;
61 long flags; 127 long flags;
62 128
63 local_irq_save(flags); 129 local_irq_save_hw(flags);
64 v->counter -= i; 130 v->counter -= i;
65 __temp = v->counter; 131 __temp = v->counter;
66 local_irq_restore(flags); 132 local_irq_restore_hw(flags);
67 133
68 return __temp; 134 return __temp;
69} 135}
70 136
71static __inline__ void atomic_inc(volatile atomic_t * v) 137static inline void atomic_inc(volatile atomic_t *v)
72{ 138{
73 long flags; 139 long flags;
74 140
75 local_irq_save(flags); 141 local_irq_save_hw(flags);
76 v->counter++; 142 v->counter++;
77 local_irq_restore(flags); 143 local_irq_restore_hw(flags);
78} 144}
79 145
80#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) 146static inline void atomic_dec(volatile atomic_t *v)
81#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
82
83#define atomic_add_unless(v, a, u) \
84({ \
85 int c, old; \
86 c = atomic_read(v); \
87 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
88 c = old; \
89 c != (u); \
90})
91#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
92
93static __inline__ void atomic_dec(volatile atomic_t * v)
94{ 147{
95 long flags; 148 long flags;
96 149
97 local_irq_save(flags); 150 local_irq_save_hw(flags);
98 v->counter--; 151 v->counter--;
99 local_irq_restore(flags); 152 local_irq_restore_hw(flags);
100} 153}
101 154
102static __inline__ void atomic_clear_mask(unsigned int mask, atomic_t * v) 155static inline void atomic_clear_mask(unsigned int mask, atomic_t *v)
103{ 156{
104 long flags; 157 long flags;
105 158
106 local_irq_save(flags); 159 local_irq_save_hw(flags);
107 v->counter &= ~mask; 160 v->counter &= ~mask;
108 local_irq_restore(flags); 161 local_irq_restore_hw(flags);
109} 162}
110 163
111static __inline__ void atomic_set_mask(unsigned int mask, atomic_t * v) 164static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
112{ 165{
113 long flags; 166 long flags;
114 167
115 local_irq_save(flags); 168 local_irq_save_hw(flags);
116 v->counter |= mask; 169 v->counter |= mask;
117 local_irq_restore(flags); 170 local_irq_restore_hw(flags);
118} 171}
119 172
120/* Atomic operations are already serializing */ 173/* Atomic operations are already serializing */
@@ -123,9 +176,25 @@ static __inline__ void atomic_set_mask(unsigned int mask, atomic_t * v)
123#define smp_mb__before_atomic_inc() barrier() 176#define smp_mb__before_atomic_inc() barrier()
124#define smp_mb__after_atomic_inc() barrier() 177#define smp_mb__after_atomic_inc() barrier()
125 178
179#endif /* !CONFIG_SMP */
180
181#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
126#define atomic_dec_return(v) atomic_sub_return(1,(v)) 182#define atomic_dec_return(v) atomic_sub_return(1,(v))
127#define atomic_inc_return(v) atomic_add_return(1,(v)) 183#define atomic_inc_return(v) atomic_add_return(1,(v))
128 184
185#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
186#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
187
188#define atomic_add_unless(v, a, u) \
189({ \
190 int c, old; \
191 c = atomic_read(v); \
192 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
193 c = old; \
194 c != (u); \
195})
196#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
197
129/* 198/*
130 * atomic_inc_and_test - increment and test 199 * atomic_inc_and_test - increment and test
131 * @v: pointer of type atomic_t 200 * @v: pointer of type atomic_t