diff options
Diffstat (limited to 'include/asm-arm/atomic.h')
-rw-r--r-- | include/asm-arm/atomic.h | 44 |
1 files changed, 44 insertions, 0 deletions
diff --git a/include/asm-arm/atomic.h b/include/asm-arm/atomic.h index 2885972b0855..d586f65c8228 100644 --- a/include/asm-arm/atomic.h +++ b/include/asm-arm/atomic.h | |||
@@ -12,6 +12,7 @@ | |||
12 | #define __ASM_ARM_ATOMIC_H | 12 | #define __ASM_ARM_ATOMIC_H |
13 | 13 | ||
14 | #include <linux/config.h> | 14 | #include <linux/config.h> |
15 | #include <linux/compiler.h> | ||
15 | 16 | ||
16 | typedef struct { volatile int counter; } atomic_t; | 17 | typedef struct { volatile int counter; } atomic_t; |
17 | 18 | ||
@@ -80,6 +81,24 @@ static inline int atomic_sub_return(int i, atomic_t *v) | |||
80 | return result; | 81 | return result; |
81 | } | 82 | } |
82 | 83 | ||
84 | static inline int atomic_cmpxchg(atomic_t *ptr, int old, int new) | ||
85 | { | ||
86 | unsigned long oldval, res; | ||
87 | |||
88 | do { | ||
89 | __asm__ __volatile__("@ atomic_cmpxchg\n" | ||
90 | "ldrex %1, [%2]\n" | ||
91 | "mov %0, #0\n" | ||
92 | "teq %1, %3\n" | ||
93 | "strexeq %0, %4, [%2]\n" | ||
94 | : "=&r" (res), "=&r" (oldval) | ||
95 | : "r" (&ptr->counter), "Ir" (old), "r" (new) | ||
96 | : "cc"); | ||
97 | } while (res); | ||
98 | |||
99 | return oldval; | ||
100 | } | ||
101 | |||
83 | static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr) | 102 | static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr) |
84 | { | 103 | { |
85 | unsigned long tmp, tmp2; | 104 | unsigned long tmp, tmp2; |
@@ -131,6 +150,20 @@ static inline int atomic_sub_return(int i, atomic_t *v) | |||
131 | return val; | 150 | return val; |
132 | } | 151 | } |
133 | 152 | ||
153 | static inline int atomic_cmpxchg(atomic_t *v, int old, int new) | ||
154 | { | ||
155 | int ret; | ||
156 | unsigned long flags; | ||
157 | |||
158 | local_irq_save(flags); | ||
159 | ret = v->counter; | ||
160 | if (likely(ret == old)) | ||
161 | v->counter = new; | ||
162 | local_irq_restore(flags); | ||
163 | |||
164 | return ret; | ||
165 | } | ||
166 | |||
134 | static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr) | 167 | static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr) |
135 | { | 168 | { |
136 | unsigned long flags; | 169 | unsigned long flags; |
@@ -142,6 +175,17 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr) | |||
142 | 175 | ||
143 | #endif /* __LINUX_ARM_ARCH__ */ | 176 | #endif /* __LINUX_ARM_ARCH__ */ |
144 | 177 | ||
178 | static inline int atomic_add_unless(atomic_t *v, int a, int u) | ||
179 | { | ||
180 | int c, old; | ||
181 | |||
182 | c = atomic_read(v); | ||
183 | while (c != u && (old = atomic_cmpxchg((v), c, c + a)) != c) | ||
184 | c = old; | ||
185 | return c != u; | ||
186 | } | ||
187 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | ||
188 | |||
145 | #define atomic_add(i, v) (void) atomic_add_return(i, v) | 189 | #define atomic_add(i, v) (void) atomic_add_return(i, v) |
146 | #define atomic_inc(v) (void) atomic_add_return(1, v) | 190 | #define atomic_inc(v) (void) atomic_add_return(1, v) |
147 | #define atomic_sub(i, v) (void) atomic_sub_return(i, v) | 191 | #define atomic_sub(i, v) (void) atomic_sub_return(i, v) |