diff options
Diffstat (limited to 'include/asm-i386/atomic.h')
| -rw-r--r-- | include/asm-i386/atomic.h | 36 |
1 files changed, 18 insertions, 18 deletions
diff --git a/include/asm-i386/atomic.h b/include/asm-i386/atomic.h index de649d3aa2d4..22d80ece95cb 100644 --- a/include/asm-i386/atomic.h +++ b/include/asm-i386/atomic.h | |||
| @@ -10,12 +10,6 @@ | |||
| 10 | * resource counting etc.. | 10 | * resource counting etc.. |
| 11 | */ | 11 | */ |
| 12 | 12 | ||
| 13 | #ifdef CONFIG_SMP | ||
| 14 | #define LOCK "lock ; " | ||
| 15 | #else | ||
| 16 | #define LOCK "" | ||
| 17 | #endif | ||
| 18 | |||
| 19 | /* | 13 | /* |
| 20 | * Make sure gcc doesn't try to be clever and move things around | 14 | * Make sure gcc doesn't try to be clever and move things around |
| 21 | * on us. We need to use _exactly_ the address the user gave us, | 15 | * on us. We need to use _exactly_ the address the user gave us, |
| @@ -52,7 +46,7 @@ typedef struct { volatile int counter; } atomic_t; | |||
| 52 | static __inline__ void atomic_add(int i, atomic_t *v) | 46 | static __inline__ void atomic_add(int i, atomic_t *v) |
| 53 | { | 47 | { |
| 54 | __asm__ __volatile__( | 48 | __asm__ __volatile__( |
| 55 | LOCK "addl %1,%0" | 49 | LOCK_PREFIX "addl %1,%0" |
| 56 | :"=m" (v->counter) | 50 | :"=m" (v->counter) |
| 57 | :"ir" (i), "m" (v->counter)); | 51 | :"ir" (i), "m" (v->counter)); |
| 58 | } | 52 | } |
| @@ -67,7 +61,7 @@ static __inline__ void atomic_add(int i, atomic_t *v) | |||
| 67 | static __inline__ void atomic_sub(int i, atomic_t *v) | 61 | static __inline__ void atomic_sub(int i, atomic_t *v) |
| 68 | { | 62 | { |
| 69 | __asm__ __volatile__( | 63 | __asm__ __volatile__( |
| 70 | LOCK "subl %1,%0" | 64 | LOCK_PREFIX "subl %1,%0" |
| 71 | :"=m" (v->counter) | 65 | :"=m" (v->counter) |
| 72 | :"ir" (i), "m" (v->counter)); | 66 | :"ir" (i), "m" (v->counter)); |
| 73 | } | 67 | } |
| @@ -86,7 +80,7 @@ static __inline__ int atomic_sub_and_test(int i, atomic_t *v) | |||
| 86 | unsigned char c; | 80 | unsigned char c; |
| 87 | 81 | ||
| 88 | __asm__ __volatile__( | 82 | __asm__ __volatile__( |
| 89 | LOCK "subl %2,%0; sete %1" | 83 | LOCK_PREFIX "subl %2,%0; sete %1" |
| 90 | :"=m" (v->counter), "=qm" (c) | 84 | :"=m" (v->counter), "=qm" (c) |
| 91 | :"ir" (i), "m" (v->counter) : "memory"); | 85 | :"ir" (i), "m" (v->counter) : "memory"); |
| 92 | return c; | 86 | return c; |
| @@ -101,7 +95,7 @@ static __inline__ int atomic_sub_and_test(int i, atomic_t *v) | |||
| 101 | static __inline__ void atomic_inc(atomic_t *v) | 95 | static __inline__ void atomic_inc(atomic_t *v) |
| 102 | { | 96 | { |
| 103 | __asm__ __volatile__( | 97 | __asm__ __volatile__( |
| 104 | LOCK "incl %0" | 98 | LOCK_PREFIX "incl %0" |
| 105 | :"=m" (v->counter) | 99 | :"=m" (v->counter) |
| 106 | :"m" (v->counter)); | 100 | :"m" (v->counter)); |
| 107 | } | 101 | } |
| @@ -115,7 +109,7 @@ static __inline__ void atomic_inc(atomic_t *v) | |||
| 115 | static __inline__ void atomic_dec(atomic_t *v) | 109 | static __inline__ void atomic_dec(atomic_t *v) |
| 116 | { | 110 | { |
| 117 | __asm__ __volatile__( | 111 | __asm__ __volatile__( |
| 118 | LOCK "decl %0" | 112 | LOCK_PREFIX "decl %0" |
| 119 | :"=m" (v->counter) | 113 | :"=m" (v->counter) |
| 120 | :"m" (v->counter)); | 114 | :"m" (v->counter)); |
| 121 | } | 115 | } |
| @@ -133,7 +127,7 @@ static __inline__ int atomic_dec_and_test(atomic_t *v) | |||
| 133 | unsigned char c; | 127 | unsigned char c; |
| 134 | 128 | ||
| 135 | __asm__ __volatile__( | 129 | __asm__ __volatile__( |
| 136 | LOCK "decl %0; sete %1" | 130 | LOCK_PREFIX "decl %0; sete %1" |
| 137 | :"=m" (v->counter), "=qm" (c) | 131 | :"=m" (v->counter), "=qm" (c) |
| 138 | :"m" (v->counter) : "memory"); | 132 | :"m" (v->counter) : "memory"); |
| 139 | return c != 0; | 133 | return c != 0; |
| @@ -152,7 +146,7 @@ static __inline__ int atomic_inc_and_test(atomic_t *v) | |||
| 152 | unsigned char c; | 146 | unsigned char c; |
| 153 | 147 | ||
| 154 | __asm__ __volatile__( | 148 | __asm__ __volatile__( |
| 155 | LOCK "incl %0; sete %1" | 149 | LOCK_PREFIX "incl %0; sete %1" |
| 156 | :"=m" (v->counter), "=qm" (c) | 150 | :"=m" (v->counter), "=qm" (c) |
| 157 | :"m" (v->counter) : "memory"); | 151 | :"m" (v->counter) : "memory"); |
| 158 | return c != 0; | 152 | return c != 0; |
| @@ -172,7 +166,7 @@ static __inline__ int atomic_add_negative(int i, atomic_t *v) | |||
| 172 | unsigned char c; | 166 | unsigned char c; |
| 173 | 167 | ||
| 174 | __asm__ __volatile__( | 168 | __asm__ __volatile__( |
| 175 | LOCK "addl %2,%0; sets %1" | 169 | LOCK_PREFIX "addl %2,%0; sets %1" |
| 176 | :"=m" (v->counter), "=qm" (c) | 170 | :"=m" (v->counter), "=qm" (c) |
| 177 | :"ir" (i), "m" (v->counter) : "memory"); | 171 | :"ir" (i), "m" (v->counter) : "memory"); |
| 178 | return c; | 172 | return c; |
| @@ -195,7 +189,7 @@ static __inline__ int atomic_add_return(int i, atomic_t *v) | |||
| 195 | /* Modern 486+ processor */ | 189 | /* Modern 486+ processor */ |
| 196 | __i = i; | 190 | __i = i; |
| 197 | __asm__ __volatile__( | 191 | __asm__ __volatile__( |
| 198 | LOCK "xaddl %0, %1;" | 192 | LOCK_PREFIX "xaddl %0, %1;" |
| 199 | :"=r"(i) | 193 | :"=r"(i) |
| 200 | :"m"(v->counter), "0"(i)); | 194 | :"m"(v->counter), "0"(i)); |
| 201 | return i + __i; | 195 | return i + __i; |
| @@ -231,8 +225,14 @@ static __inline__ int atomic_sub_return(int i, atomic_t *v) | |||
| 231 | ({ \ | 225 | ({ \ |
| 232 | int c, old; \ | 226 | int c, old; \ |
| 233 | c = atomic_read(v); \ | 227 | c = atomic_read(v); \ |
| 234 | while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \ | 228 | for (;;) { \ |
| 229 | if (unlikely(c == (u))) \ | ||
| 230 | break; \ | ||
| 231 | old = atomic_cmpxchg((v), c, c + (a)); \ | ||
| 232 | if (likely(old == c)) \ | ||
| 233 | break; \ | ||
| 235 | c = old; \ | 234 | c = old; \ |
| 235 | } \ | ||
| 236 | c != (u); \ | 236 | c != (u); \ |
| 237 | }) | 237 | }) |
| 238 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | 238 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) |
| @@ -242,11 +242,11 @@ static __inline__ int atomic_sub_return(int i, atomic_t *v) | |||
| 242 | 242 | ||
| 243 | /* These are x86-specific, used by some header files */ | 243 | /* These are x86-specific, used by some header files */ |
| 244 | #define atomic_clear_mask(mask, addr) \ | 244 | #define atomic_clear_mask(mask, addr) \ |
| 245 | __asm__ __volatile__(LOCK "andl %0,%1" \ | 245 | __asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \ |
| 246 | : : "r" (~(mask)),"m" (*addr) : "memory") | 246 | : : "r" (~(mask)),"m" (*addr) : "memory") |
| 247 | 247 | ||
| 248 | #define atomic_set_mask(mask, addr) \ | 248 | #define atomic_set_mask(mask, addr) \ |
| 249 | __asm__ __volatile__(LOCK "orl %0,%1" \ | 249 | __asm__ __volatile__(LOCK_PREFIX "orl %0,%1" \ |
| 250 | : : "r" (mask),"m" (*(addr)) : "memory") | 250 | : : "r" (mask),"m" (*(addr)) : "memory") |
| 251 | 251 | ||
| 252 | /* Atomic operations are already serializing on x86 */ | 252 | /* Atomic operations are already serializing on x86 */ |
