aboutsummaryrefslogtreecommitdiffstats
path: root/arch/m32r/include
diff options
context:
space:
mode:
authorPeter Zijlstra <peterz@infradead.org>2014-04-23 13:57:49 -0400
committerThomas Gleixner <tglx@linutronix.de>2015-07-27 08:06:22 -0400
commitc66e45edef51e1f54297ddaf202fc2dd00852734 (patch)
treec60759cb030b6642ed242e97bbf4f3a28362c3f9 /arch/m32r/include
parent70ed47390d10a7351e44c3a07c5de376780cb6c1 (diff)
m32r: Provide atomic_{or,xor,and}
Implement atomic logic ops -- atomic_{or,xor,and}. These will replace the atomic_{set,clear}_mask functions that are available on some archs. Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Diffstat (limited to 'arch/m32r/include')
-rw-r--r--arch/m32r/include/asm/atomic.h44
1 files changed, 10 insertions, 34 deletions
diff --git a/arch/m32r/include/asm/atomic.h b/arch/m32r/include/asm/atomic.h
index 31bb74adba08..7245463c1e98 100644
--- a/arch/m32r/include/asm/atomic.h
+++ b/arch/m32r/include/asm/atomic.h
@@ -94,6 +94,12 @@ static __inline__ int atomic_##op##_return(int i, atomic_t *v) \
94ATOMIC_OPS(add) 94ATOMIC_OPS(add)
95ATOMIC_OPS(sub) 95ATOMIC_OPS(sub)
96 96
97#define CONFIG_ARCH_HAS_ATOMIC_OR
98
99ATOMIC_OP(and)
100ATOMIC_OP(or)
101ATOMIC_OP(xor)
102
97#undef ATOMIC_OPS 103#undef ATOMIC_OPS
98#undef ATOMIC_OP_RETURN 104#undef ATOMIC_OP_RETURN
99#undef ATOMIC_OP 105#undef ATOMIC_OP
@@ -240,44 +246,14 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
240} 246}
241 247
242 248
243static __inline__ void atomic_clear_mask(unsigned long mask, atomic_t *addr) 249static __inline__ __deprecated void atomic_clear_mask(unsigned int mask, atomic_t *v)
244{ 250{
245 unsigned long flags; 251 atomic_and(~mask, v);
246 unsigned long tmp;
247
248 local_irq_save(flags);
249 __asm__ __volatile__ (
250 "# atomic_clear_mask \n\t"
251 DCACHE_CLEAR("%0", "r5", "%1")
252 M32R_LOCK" %0, @%1; \n\t"
253 "and %0, %2; \n\t"
254 M32R_UNLOCK" %0, @%1; \n\t"
255 : "=&r" (tmp)
256 : "r" (addr), "r" (~mask)
257 : "memory"
258 __ATOMIC_CLOBBER
259 );
260 local_irq_restore(flags);
261} 252}
262 253
263static __inline__ void atomic_set_mask(unsigned long mask, atomic_t *addr) 254static __inline__ __deprecated void atomic_set_mask(unsigned int mask, atomic_t *v)
264{ 255{
265 unsigned long flags; 256 atomic_or(mask, v);
266 unsigned long tmp;
267
268 local_irq_save(flags);
269 __asm__ __volatile__ (
270 "# atomic_set_mask \n\t"
271 DCACHE_CLEAR("%0", "r5", "%1")
272 M32R_LOCK" %0, @%1; \n\t"
273 "or %0, %2; \n\t"
274 M32R_UNLOCK" %0, @%1; \n\t"
275 : "=&r" (tmp)
276 : "r" (addr), "r" (mask)
277 : "memory"
278 __ATOMIC_CLOBBER
279 );
280 local_irq_restore(flags);
281} 257}
282 258
283#endif /* _ASM_M32R_ATOMIC_H */ 259#endif /* _ASM_M32R_ATOMIC_H */