diff options
Diffstat (limited to 'include')
| -rw-r--r-- | include/asm-sparc64/atomic.h | 8 | ||||
| -rw-r--r-- | include/asm-sparc64/bitops.h | 4 | ||||
| -rw-r--r-- | include/asm-sparc64/spinlock.h | 4 | ||||
| -rw-r--r-- | include/asm-sparc64/system.h | 17 |
4 files changed, 18 insertions, 15 deletions
diff --git a/include/asm-sparc64/atomic.h b/include/asm-sparc64/atomic.h index d80f3379669b..e175afcf2cde 100644 --- a/include/asm-sparc64/atomic.h +++ b/include/asm-sparc64/atomic.h | |||
| @@ -72,10 +72,10 @@ extern int atomic64_sub_ret(int, atomic64_t *); | |||
| 72 | 72 | ||
| 73 | /* Atomic operations are already serializing */ | 73 | /* Atomic operations are already serializing */ |
| 74 | #ifdef CONFIG_SMP | 74 | #ifdef CONFIG_SMP |
| 75 | #define smp_mb__before_atomic_dec() membar("#StoreLoad | #LoadLoad") | 75 | #define smp_mb__before_atomic_dec() membar_storeload_loadload(); |
| 76 | #define smp_mb__after_atomic_dec() membar("#StoreLoad | #StoreStore") | 76 | #define smp_mb__after_atomic_dec() membar_storeload_storestore(); |
| 77 | #define smp_mb__before_atomic_inc() membar("#StoreLoad | #LoadLoad") | 77 | #define smp_mb__before_atomic_inc() membar_storeload_loadload(); |
| 78 | #define smp_mb__after_atomic_inc() membar("#StoreLoad | #StoreStore") | 78 | #define smp_mb__after_atomic_inc() membar_storeload_storestore(); |
| 79 | #else | 79 | #else |
| 80 | #define smp_mb__before_atomic_dec() barrier() | 80 | #define smp_mb__before_atomic_dec() barrier() |
| 81 | #define smp_mb__after_atomic_dec() barrier() | 81 | #define smp_mb__after_atomic_dec() barrier() |
diff --git a/include/asm-sparc64/bitops.h b/include/asm-sparc64/bitops.h index 9c5e71970287..6388b8376c50 100644 --- a/include/asm-sparc64/bitops.h +++ b/include/asm-sparc64/bitops.h | |||
| @@ -72,8 +72,8 @@ static inline int __test_and_change_bit(int nr, volatile unsigned long *addr) | |||
| 72 | } | 72 | } |
| 73 | 73 | ||
| 74 | #ifdef CONFIG_SMP | 74 | #ifdef CONFIG_SMP |
| 75 | #define smp_mb__before_clear_bit() membar("#StoreLoad | #LoadLoad") | 75 | #define smp_mb__before_clear_bit() membar_storeload_loadload() |
| 76 | #define smp_mb__after_clear_bit() membar("#StoreLoad | #StoreStore") | 76 | #define smp_mb__after_clear_bit() membar_storeload_storestore() |
| 77 | #else | 77 | #else |
| 78 | #define smp_mb__before_clear_bit() barrier() | 78 | #define smp_mb__before_clear_bit() barrier() |
| 79 | #define smp_mb__after_clear_bit() barrier() | 79 | #define smp_mb__after_clear_bit() barrier() |
diff --git a/include/asm-sparc64/spinlock.h b/include/asm-sparc64/spinlock.h index d265bf6570fe..a02c4370eb42 100644 --- a/include/asm-sparc64/spinlock.h +++ b/include/asm-sparc64/spinlock.h | |||
| @@ -43,7 +43,7 @@ typedef struct { | |||
| 43 | #define spin_is_locked(lp) ((lp)->lock != 0) | 43 | #define spin_is_locked(lp) ((lp)->lock != 0) |
| 44 | 44 | ||
| 45 | #define spin_unlock_wait(lp) \ | 45 | #define spin_unlock_wait(lp) \ |
| 46 | do { membar("#LoadLoad"); \ | 46 | do { rmb(); \ |
| 47 | } while((lp)->lock) | 47 | } while((lp)->lock) |
| 48 | 48 | ||
| 49 | static inline void _raw_spin_lock(spinlock_t *lock) | 49 | static inline void _raw_spin_lock(spinlock_t *lock) |
| @@ -129,7 +129,7 @@ typedef struct { | |||
| 129 | #define spin_is_locked(__lock) ((__lock)->lock != 0) | 129 | #define spin_is_locked(__lock) ((__lock)->lock != 0) |
| 130 | #define spin_unlock_wait(__lock) \ | 130 | #define spin_unlock_wait(__lock) \ |
| 131 | do { \ | 131 | do { \ |
| 132 | membar("#LoadLoad"); \ | 132 | rmb(); \ |
| 133 | } while((__lock)->lock) | 133 | } while((__lock)->lock) |
| 134 | 134 | ||
| 135 | extern void _do_spin_lock(spinlock_t *lock, char *str, unsigned long caller); | 135 | extern void _do_spin_lock(spinlock_t *lock, char *str, unsigned long caller); |
diff --git a/include/asm-sparc64/system.h b/include/asm-sparc64/system.h index ee4bdfc6b88f..5e94c05dc2fc 100644 --- a/include/asm-sparc64/system.h +++ b/include/asm-sparc64/system.h | |||
| @@ -28,6 +28,14 @@ enum sparc_cpu { | |||
| 28 | #define ARCH_SUN4C_SUN4 0 | 28 | #define ARCH_SUN4C_SUN4 0 |
| 29 | #define ARCH_SUN4 0 | 29 | #define ARCH_SUN4 0 |
| 30 | 30 | ||
| 31 | extern void mb(void); | ||
| 32 | extern void rmb(void); | ||
| 33 | extern void wmb(void); | ||
| 34 | extern void membar_storeload(void); | ||
| 35 | extern void membar_storeload_storestore(void); | ||
| 36 | extern void membar_storeload_loadload(void); | ||
| 37 | extern void membar_storestore_loadstore(void); | ||
| 38 | |||
| 31 | #endif | 39 | #endif |
| 32 | 40 | ||
| 33 | #define setipl(__new_ipl) \ | 41 | #define setipl(__new_ipl) \ |
| @@ -78,16 +86,11 @@ enum sparc_cpu { | |||
| 78 | 86 | ||
| 79 | #define nop() __asm__ __volatile__ ("nop") | 87 | #define nop() __asm__ __volatile__ ("nop") |
| 80 | 88 | ||
| 81 | #define membar(type) __asm__ __volatile__ ("membar " type : : : "memory") | ||
| 82 | #define mb() \ | ||
| 83 | membar("#LoadLoad | #LoadStore | #StoreStore | #StoreLoad") | ||
| 84 | #define rmb() membar("#LoadLoad") | ||
| 85 | #define wmb() membar("#StoreStore") | ||
| 86 | #define read_barrier_depends() do { } while(0) | 89 | #define read_barrier_depends() do { } while(0) |
| 87 | #define set_mb(__var, __value) \ | 90 | #define set_mb(__var, __value) \ |
| 88 | do { __var = __value; membar("#StoreLoad | #StoreStore"); } while(0) | 91 | do { __var = __value; membar_storeload_storestore(); } while(0) |
| 89 | #define set_wmb(__var, __value) \ | 92 | #define set_wmb(__var, __value) \ |
| 90 | do { __var = __value; membar("#StoreStore"); } while(0) | 93 | do { __var = __value; wmb(); } while(0) |
| 91 | 94 | ||
| 92 | #ifdef CONFIG_SMP | 95 | #ifdef CONFIG_SMP |
| 93 | #define smp_mb() mb() | 96 | #define smp_mb() mb() |
