aboutsummaryrefslogtreecommitdiffstats
path: root/arch/frv/include/asm
diff options
context:
space:
mode:
Diffstat (limited to 'arch/frv/include/asm')
-rw-r--r--arch/frv/include/asm/atomic.h7
-rw-r--r--arch/frv/include/asm/bitops.h6
2 files changed, 1 insertions, 12 deletions
diff --git a/arch/frv/include/asm/atomic.h b/arch/frv/include/asm/atomic.h
index b86329d0e316..f6c3a1690101 100644
--- a/arch/frv/include/asm/atomic.h
+++ b/arch/frv/include/asm/atomic.h
@@ -17,6 +17,7 @@
17#include <linux/types.h> 17#include <linux/types.h>
18#include <asm/spr-regs.h> 18#include <asm/spr-regs.h>
19#include <asm/cmpxchg.h> 19#include <asm/cmpxchg.h>
20#include <asm/barrier.h>
20 21
21#ifdef CONFIG_SMP 22#ifdef CONFIG_SMP
22#error not SMP safe 23#error not SMP safe
@@ -29,12 +30,6 @@
29 * We do not have SMP systems, so we don't have to deal with that. 30 * We do not have SMP systems, so we don't have to deal with that.
30 */ 31 */
31 32
32/* Atomic operations are already serializing */
33#define smp_mb__before_atomic_dec() barrier()
34#define smp_mb__after_atomic_dec() barrier()
35#define smp_mb__before_atomic_inc() barrier()
36#define smp_mb__after_atomic_inc() barrier()
37
38#define ATOMIC_INIT(i) { (i) } 33#define ATOMIC_INIT(i) { (i) }
39#define atomic_read(v) (*(volatile int *)&(v)->counter) 34#define atomic_read(v) (*(volatile int *)&(v)->counter)
40#define atomic_set(v, i) (((v)->counter) = (i)) 35#define atomic_set(v, i) (((v)->counter) = (i))
diff --git a/arch/frv/include/asm/bitops.h b/arch/frv/include/asm/bitops.h
index 57bf85db893f..96de220ef131 100644
--- a/arch/frv/include/asm/bitops.h
+++ b/arch/frv/include/asm/bitops.h
@@ -25,12 +25,6 @@
25 25
26#include <asm-generic/bitops/ffz.h> 26#include <asm-generic/bitops/ffz.h>
27 27
28/*
29 * clear_bit() doesn't provide any barrier for the compiler.
30 */
31#define smp_mb__before_clear_bit() barrier()
32#define smp_mb__after_clear_bit() barrier()
33
34#ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS 28#ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS
35static inline 29static inline
36unsigned long atomic_test_and_ANDNOT_mask(unsigned long mask, volatile unsigned long *v) 30unsigned long atomic_test_and_ANDNOT_mask(unsigned long mask, volatile unsigned long *v)