aboutsummaryrefslogtreecommitdiffstats
path: root/arch/blackfin/include/asm/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/blackfin/include/asm/atomic.h')
-rw-r--r--arch/blackfin/include/asm/atomic.h109
1 files changed, 7 insertions, 102 deletions
diff --git a/arch/blackfin/include/asm/atomic.h b/arch/blackfin/include/asm/atomic.h
index b1d92f13ef96..88f36d599fe8 100644
--- a/arch/blackfin/include/asm/atomic.h
+++ b/arch/blackfin/include/asm/atomic.h
@@ -1,24 +1,21 @@
1#ifndef __ARCH_BLACKFIN_ATOMIC__ 1#ifndef __ARCH_BLACKFIN_ATOMIC__
2#define __ARCH_BLACKFIN_ATOMIC__ 2#define __ARCH_BLACKFIN_ATOMIC__
3 3
4#ifndef CONFIG_SMP
5# include <asm-generic/atomic.h>
6#else
7
4#include <linux/types.h> 8#include <linux/types.h>
5#include <asm/system.h> /* local_irq_XXX() */ 9#include <asm/system.h> /* local_irq_XXX() */
6 10
7/* 11/*
8 * Atomic operations that C can't guarantee us. Useful for 12 * Atomic operations that C can't guarantee us. Useful for
9 * resource counting etc.. 13 * resource counting etc..
10 *
11 * Generally we do not concern about SMP BFIN systems, so we don't have
12 * to deal with that.
13 *
14 * Tony Kou (tonyko@lineo.ca) Lineo Inc. 2001
15 */ 14 */
16 15
17#define ATOMIC_INIT(i) { (i) } 16#define ATOMIC_INIT(i) { (i) }
18#define atomic_set(v, i) (((v)->counter) = i) 17#define atomic_set(v, i) (((v)->counter) = i)
19 18
20#ifdef CONFIG_SMP
21
22#define atomic_read(v) __raw_uncached_fetch_asm(&(v)->counter) 19#define atomic_read(v) __raw_uncached_fetch_asm(&(v)->counter)
23 20
24asmlinkage int __raw_uncached_fetch_asm(const volatile int *ptr); 21asmlinkage int __raw_uncached_fetch_asm(const volatile int *ptr);
@@ -84,100 +81,6 @@ static inline int atomic_test_mask(int mask, atomic_t *v)
84#define smp_mb__before_atomic_inc() barrier() 81#define smp_mb__before_atomic_inc() barrier()
85#define smp_mb__after_atomic_inc() barrier() 82#define smp_mb__after_atomic_inc() barrier()
86 83
87#else /* !CONFIG_SMP */
88
89#define atomic_read(v) ((v)->counter)
90
91static inline void atomic_add(int i, atomic_t *v)
92{
93 unsigned long flags;
94
95 local_irq_save_hw(flags);
96 v->counter += i;
97 local_irq_restore_hw(flags);
98}
99
100static inline void atomic_sub(int i, atomic_t *v)
101{
102 unsigned long flags;
103
104 local_irq_save_hw(flags);
105 v->counter -= i;
106 local_irq_restore_hw(flags);
107
108}
109
110static inline int atomic_add_return(int i, atomic_t *v)
111{
112 int __temp = 0;
113 unsigned long flags;
114
115 local_irq_save_hw(flags);
116 v->counter += i;
117 __temp = v->counter;
118 local_irq_restore_hw(flags);
119
120
121 return __temp;
122}
123
124static inline int atomic_sub_return(int i, atomic_t *v)
125{
126 int __temp = 0;
127 unsigned long flags;
128
129 local_irq_save_hw(flags);
130 v->counter -= i;
131 __temp = v->counter;
132 local_irq_restore_hw(flags);
133
134 return __temp;
135}
136
137static inline void atomic_inc(volatile atomic_t *v)
138{
139 unsigned long flags;
140
141 local_irq_save_hw(flags);
142 v->counter++;
143 local_irq_restore_hw(flags);
144}
145
146static inline void atomic_dec(volatile atomic_t *v)
147{
148 unsigned long flags;
149
150 local_irq_save_hw(flags);
151 v->counter--;
152 local_irq_restore_hw(flags);
153}
154
155static inline void atomic_clear_mask(unsigned int mask, atomic_t *v)
156{
157 unsigned long flags;
158
159 local_irq_save_hw(flags);
160 v->counter &= ~mask;
161 local_irq_restore_hw(flags);
162}
163
164static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
165{
166 unsigned long flags;
167
168 local_irq_save_hw(flags);
169 v->counter |= mask;
170 local_irq_restore_hw(flags);
171}
172
173/* Atomic operations are already serializing */
174#define smp_mb__before_atomic_dec() barrier()
175#define smp_mb__after_atomic_dec() barrier()
176#define smp_mb__before_atomic_inc() barrier()
177#define smp_mb__after_atomic_inc() barrier()
178
179#endif /* !CONFIG_SMP */
180
181#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) 84#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
182#define atomic_dec_return(v) atomic_sub_return(1,(v)) 85#define atomic_dec_return(v) atomic_sub_return(1,(v))
183#define atomic_inc_return(v) atomic_add_return(1,(v)) 86#define atomic_inc_return(v) atomic_add_return(1,(v))
@@ -210,4 +113,6 @@ static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
210 113
211#include <asm-generic/atomic-long.h> 114#include <asm-generic/atomic-long.h>
212 115
213#endif /* __ARCH_BLACKFIN_ATOMIC __ */ 116#endif
117
118#endif