aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--arch/mips/include/asm/atomic.h9
-rw-r--r--arch/mips/include/asm/barrier.h3
-rw-r--r--arch/mips/include/asm/bitops.h11
-rw-r--r--arch/mips/kernel/irq.c4
4 files changed, 7 insertions, 20 deletions
diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h
index e8eb3d53a241..37b2befe651a 100644
--- a/arch/mips/include/asm/atomic.h
+++ b/arch/mips/include/asm/atomic.h
@@ -761,13 +761,4 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
761 761
762#endif /* CONFIG_64BIT */ 762#endif /* CONFIG_64BIT */
763 763
764/*
765 * atomic*_return operations are serializing but not the non-*_return
766 * versions.
767 */
768#define smp_mb__before_atomic_dec() smp_mb__before_llsc()
769#define smp_mb__after_atomic_dec() smp_llsc_mb()
770#define smp_mb__before_atomic_inc() smp_mb__before_llsc()
771#define smp_mb__after_atomic_inc() smp_llsc_mb()
772
773#endif /* _ASM_ATOMIC_H */ 764#endif /* _ASM_ATOMIC_H */
diff --git a/arch/mips/include/asm/barrier.h b/arch/mips/include/asm/barrier.h
index e1aa4e4c2984..d0101dd0575e 100644
--- a/arch/mips/include/asm/barrier.h
+++ b/arch/mips/include/asm/barrier.h
@@ -195,4 +195,7 @@ do { \
195 ___p1; \ 195 ___p1; \
196}) 196})
197 197
198#define smp_mb__before_atomic() smp_mb__before_llsc()
199#define smp_mb__after_atomic() smp_llsc_mb()
200
198#endif /* __ASM_BARRIER_H */ 201#endif /* __ASM_BARRIER_H */
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h
index 6a65d49e2c0d..7c8816f7b7c4 100644
--- a/arch/mips/include/asm/bitops.h
+++ b/arch/mips/include/asm/bitops.h
@@ -38,13 +38,6 @@
38#endif 38#endif
39 39
40/* 40/*
41 * clear_bit() doesn't provide any barrier for the compiler.
42 */
43#define smp_mb__before_clear_bit() smp_mb__before_llsc()
44#define smp_mb__after_clear_bit() smp_llsc_mb()
45
46
47/*
48 * These are the "slower" versions of the functions and are in bitops.c. 41 * These are the "slower" versions of the functions and are in bitops.c.
49 * These functions call raw_local_irq_{save,restore}(). 42 * These functions call raw_local_irq_{save,restore}().
50 */ 43 */
@@ -120,7 +113,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
120 * 113 *
121 * clear_bit() is atomic and may not be reordered. However, it does 114 * clear_bit() is atomic and may not be reordered. However, it does
122 * not contain a memory barrier, so if it is used for locking purposes, 115 * not contain a memory barrier, so if it is used for locking purposes,
123 * you should call smp_mb__before_clear_bit() and/or smp_mb__after_clear_bit() 116 * you should call smp_mb__before_atomic() and/or smp_mb__after_atomic()
124 * in order to ensure changes are visible on other processors. 117 * in order to ensure changes are visible on other processors.
125 */ 118 */
126static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) 119static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
@@ -175,7 +168,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
175 */ 168 */
176static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr) 169static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
177{ 170{
178 smp_mb__before_clear_bit(); 171 smp_mb__before_atomic();
179 clear_bit(nr, addr); 172 clear_bit(nr, addr);
180} 173}
181 174
diff --git a/arch/mips/kernel/irq.c b/arch/mips/kernel/irq.c
index d1fea7a054be..1818da4dbb85 100644
--- a/arch/mips/kernel/irq.c
+++ b/arch/mips/kernel/irq.c
@@ -62,9 +62,9 @@ void __init alloc_legacy_irqno(void)
62 62
63void free_irqno(unsigned int irq) 63void free_irqno(unsigned int irq)
64{ 64{
65 smp_mb__before_clear_bit(); 65 smp_mb__before_atomic();
66 clear_bit(irq, irq_map); 66 clear_bit(irq, irq_map);
67 smp_mb__after_clear_bit(); 67 smp_mb__after_atomic();
68} 68}
69 69
70/* 70/*