diff options
Diffstat (limited to 'arch/x86/include/asm/atomic.h')
| -rw-r--r-- | arch/x86/include/asm/atomic.h | 299 |
1 files changed, 297 insertions, 2 deletions
diff --git a/arch/x86/include/asm/atomic.h b/arch/x86/include/asm/atomic.h index 4e1b8873c474..8f8217b9bdac 100644 --- a/arch/x86/include/asm/atomic.h +++ b/arch/x86/include/asm/atomic.h | |||
| @@ -1,5 +1,300 @@ | |||
| 1 | #ifndef _ASM_X86_ATOMIC_H | ||
| 2 | #define _ASM_X86_ATOMIC_H | ||
| 3 | |||
| 4 | #include <linux/compiler.h> | ||
| 5 | #include <linux/types.h> | ||
| 6 | #include <asm/processor.h> | ||
| 7 | #include <asm/alternative.h> | ||
| 8 | #include <asm/cmpxchg.h> | ||
| 9 | |||
| 10 | /* | ||
| 11 | * Atomic operations that C can't guarantee us. Useful for | ||
| 12 | * resource counting etc.. | ||
| 13 | */ | ||
| 14 | |||
| 15 | #define ATOMIC_INIT(i) { (i) } | ||
| 16 | |||
| 17 | /** | ||
| 18 | * atomic_read - read atomic variable | ||
| 19 | * @v: pointer of type atomic_t | ||
| 20 | * | ||
| 21 | * Atomically reads the value of @v. | ||
| 22 | */ | ||
| 23 | static inline int atomic_read(const atomic_t *v) | ||
| 24 | { | ||
| 25 | return v->counter; | ||
| 26 | } | ||
| 27 | |||
| 28 | /** | ||
| 29 | * atomic_set - set atomic variable | ||
| 30 | * @v: pointer of type atomic_t | ||
| 31 | * @i: required value | ||
| 32 | * | ||
| 33 | * Atomically sets the value of @v to @i. | ||
| 34 | */ | ||
| 35 | static inline void atomic_set(atomic_t *v, int i) | ||
| 36 | { | ||
| 37 | v->counter = i; | ||
| 38 | } | ||
| 39 | |||
| 40 | /** | ||
| 41 | * atomic_add - add integer to atomic variable | ||
| 42 | * @i: integer value to add | ||
| 43 | * @v: pointer of type atomic_t | ||
| 44 | * | ||
| 45 | * Atomically adds @i to @v. | ||
| 46 | */ | ||
| 47 | static inline void atomic_add(int i, atomic_t *v) | ||
| 48 | { | ||
| 49 | asm volatile(LOCK_PREFIX "addl %1,%0" | ||
| 50 | : "+m" (v->counter) | ||
| 51 | : "ir" (i)); | ||
| 52 | } | ||
| 53 | |||
| 54 | /** | ||
| 55 | * atomic_sub - subtract integer from atomic variable | ||
| 56 | * @i: integer value to subtract | ||
| 57 | * @v: pointer of type atomic_t | ||
| 58 | * | ||
| 59 | * Atomically subtracts @i from @v. | ||
| 60 | */ | ||
| 61 | static inline void atomic_sub(int i, atomic_t *v) | ||
| 62 | { | ||
| 63 | asm volatile(LOCK_PREFIX "subl %1,%0" | ||
| 64 | : "+m" (v->counter) | ||
| 65 | : "ir" (i)); | ||
| 66 | } | ||
| 67 | |||
| 68 | /** | ||
| 69 | * atomic_sub_and_test - subtract value from variable and test result | ||
| 70 | * @i: integer value to subtract | ||
| 71 | * @v: pointer of type atomic_t | ||
| 72 | * | ||
| 73 | * Atomically subtracts @i from @v and returns | ||
| 74 | * true if the result is zero, or false for all | ||
| 75 | * other cases. | ||
| 76 | */ | ||
| 77 | static inline int atomic_sub_and_test(int i, atomic_t *v) | ||
| 78 | { | ||
| 79 | unsigned char c; | ||
| 80 | |||
| 81 | asm volatile(LOCK_PREFIX "subl %2,%0; sete %1" | ||
| 82 | : "+m" (v->counter), "=qm" (c) | ||
| 83 | : "ir" (i) : "memory"); | ||
| 84 | return c; | ||
| 85 | } | ||
| 86 | |||
| 87 | /** | ||
| 88 | * atomic_inc - increment atomic variable | ||
| 89 | * @v: pointer of type atomic_t | ||
| 90 | * | ||
| 91 | * Atomically increments @v by 1. | ||
| 92 | */ | ||
| 93 | static inline void atomic_inc(atomic_t *v) | ||
| 94 | { | ||
| 95 | asm volatile(LOCK_PREFIX "incl %0" | ||
| 96 | : "+m" (v->counter)); | ||
| 97 | } | ||
| 98 | |||
| 99 | /** | ||
| 100 | * atomic_dec - decrement atomic variable | ||
| 101 | * @v: pointer of type atomic_t | ||
| 102 | * | ||
| 103 | * Atomically decrements @v by 1. | ||
| 104 | */ | ||
| 105 | static inline void atomic_dec(atomic_t *v) | ||
| 106 | { | ||
| 107 | asm volatile(LOCK_PREFIX "decl %0" | ||
| 108 | : "+m" (v->counter)); | ||
| 109 | } | ||
| 110 | |||
| 111 | /** | ||
| 112 | * atomic_dec_and_test - decrement and test | ||
| 113 | * @v: pointer of type atomic_t | ||
| 114 | * | ||
| 115 | * Atomically decrements @v by 1 and | ||
| 116 | * returns true if the result is 0, or false for all other | ||
| 117 | * cases. | ||
| 118 | */ | ||
| 119 | static inline int atomic_dec_and_test(atomic_t *v) | ||
| 120 | { | ||
| 121 | unsigned char c; | ||
| 122 | |||
| 123 | asm volatile(LOCK_PREFIX "decl %0; sete %1" | ||
| 124 | : "+m" (v->counter), "=qm" (c) | ||
| 125 | : : "memory"); | ||
| 126 | return c != 0; | ||
| 127 | } | ||
| 128 | |||
| 129 | /** | ||
| 130 | * atomic_inc_and_test - increment and test | ||
| 131 | * @v: pointer of type atomic_t | ||
| 132 | * | ||
| 133 | * Atomically increments @v by 1 | ||
| 134 | * and returns true if the result is zero, or false for all | ||
| 135 | * other cases. | ||
| 136 | */ | ||
| 137 | static inline int atomic_inc_and_test(atomic_t *v) | ||
| 138 | { | ||
| 139 | unsigned char c; | ||
| 140 | |||
| 141 | asm volatile(LOCK_PREFIX "incl %0; sete %1" | ||
| 142 | : "+m" (v->counter), "=qm" (c) | ||
| 143 | : : "memory"); | ||
| 144 | return c != 0; | ||
| 145 | } | ||
| 146 | |||
| 147 | /** | ||
| 148 | * atomic_add_negative - add and test if negative | ||
| 149 | * @i: integer value to add | ||
| 150 | * @v: pointer of type atomic_t | ||
| 151 | * | ||
| 152 | * Atomically adds @i to @v and returns true | ||
| 153 | * if the result is negative, or false when | ||
| 154 | * result is greater than or equal to zero. | ||
| 155 | */ | ||
| 156 | static inline int atomic_add_negative(int i, atomic_t *v) | ||
| 157 | { | ||
| 158 | unsigned char c; | ||
| 159 | |||
| 160 | asm volatile(LOCK_PREFIX "addl %2,%0; sets %1" | ||
| 161 | : "+m" (v->counter), "=qm" (c) | ||
| 162 | : "ir" (i) : "memory"); | ||
| 163 | return c; | ||
| 164 | } | ||
| 165 | |||
| 166 | /** | ||
| 167 | * atomic_add_return - add integer and return | ||
| 168 | * @i: integer value to add | ||
| 169 | * @v: pointer of type atomic_t | ||
| 170 | * | ||
| 171 | * Atomically adds @i to @v and returns @i + @v | ||
| 172 | */ | ||
| 173 | static inline int atomic_add_return(int i, atomic_t *v) | ||
| 174 | { | ||
| 175 | int __i; | ||
| 176 | #ifdef CONFIG_M386 | ||
| 177 | unsigned long flags; | ||
| 178 | if (unlikely(boot_cpu_data.x86 <= 3)) | ||
| 179 | goto no_xadd; | ||
| 180 | #endif | ||
| 181 | /* Modern 486+ processor */ | ||
| 182 | __i = i; | ||
| 183 | asm volatile(LOCK_PREFIX "xaddl %0, %1" | ||
| 184 | : "+r" (i), "+m" (v->counter) | ||
| 185 | : : "memory"); | ||
| 186 | return i + __i; | ||
| 187 | |||
| 188 | #ifdef CONFIG_M386 | ||
| 189 | no_xadd: /* Legacy 386 processor */ | ||
| 190 | raw_local_irq_save(flags); | ||
| 191 | __i = atomic_read(v); | ||
| 192 | atomic_set(v, i + __i); | ||
| 193 | raw_local_irq_restore(flags); | ||
| 194 | return i + __i; | ||
| 195 | #endif | ||
| 196 | } | ||
| 197 | |||
| 198 | /** | ||
| 199 | * atomic_sub_return - subtract integer and return | ||
| 200 | * @v: pointer of type atomic_t | ||
| 201 | * @i: integer value to subtract | ||
| 202 | * | ||
| 203 | * Atomically subtracts @i from @v and returns @v - @i | ||
| 204 | */ | ||
| 205 | static inline int atomic_sub_return(int i, atomic_t *v) | ||
| 206 | { | ||
| 207 | return atomic_add_return(-i, v); | ||
| 208 | } | ||
| 209 | |||
| 210 | #define atomic_inc_return(v) (atomic_add_return(1, v)) | ||
| 211 | #define atomic_dec_return(v) (atomic_sub_return(1, v)) | ||
| 212 | |||
| 213 | static inline int atomic_cmpxchg(atomic_t *v, int old, int new) | ||
| 214 | { | ||
| 215 | return cmpxchg(&v->counter, old, new); | ||
| 216 | } | ||
| 217 | |||
| 218 | static inline int atomic_xchg(atomic_t *v, int new) | ||
| 219 | { | ||
| 220 | return xchg(&v->counter, new); | ||
| 221 | } | ||
| 222 | |||
| 223 | /** | ||
| 224 | * atomic_add_unless - add unless the number is already a given value | ||
| 225 | * @v: pointer of type atomic_t | ||
| 226 | * @a: the amount to add to v... | ||
| 227 | * @u: ...unless v is equal to u. | ||
| 228 | * | ||
| 229 | * Atomically adds @a to @v, so long as @v was not already @u. | ||
| 230 | * Returns non-zero if @v was not @u, and zero otherwise. | ||
| 231 | */ | ||
| 232 | static inline int atomic_add_unless(atomic_t *v, int a, int u) | ||
| 233 | { | ||
| 234 | int c, old; | ||
| 235 | c = atomic_read(v); | ||
| 236 | for (;;) { | ||
| 237 | if (unlikely(c == (u))) | ||
| 238 | break; | ||
| 239 | old = atomic_cmpxchg((v), c, c + (a)); | ||
| 240 | if (likely(old == c)) | ||
| 241 | break; | ||
| 242 | c = old; | ||
| 243 | } | ||
| 244 | return c != (u); | ||
| 245 | } | ||
| 246 | |||
| 247 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | ||
| 248 | |||
| 249 | /** | ||
| 250 | * atomic_inc_short - increment of a short integer | ||
| 251 | * @v: pointer to type int | ||
| 252 | * | ||
| 253 | * Atomically adds 1 to @v | ||
| 254 | * Returns the new value of @u | ||
| 255 | */ | ||
| 256 | static inline short int atomic_inc_short(short int *v) | ||
| 257 | { | ||
| 258 | asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v)); | ||
| 259 | return *v; | ||
| 260 | } | ||
| 261 | |||
| 262 | #ifdef CONFIG_X86_64 | ||
| 263 | /** | ||
| 264 | * atomic_or_long - OR of two long integers | ||
| 265 | * @v1: pointer to type unsigned long | ||
| 266 | * @v2: pointer to type unsigned long | ||
| 267 | * | ||
| 268 | * Atomically ORs @v1 and @v2 | ||
| 269 | * Returns the result of the OR | ||
| 270 | */ | ||
| 271 | static inline void atomic_or_long(unsigned long *v1, unsigned long v2) | ||
| 272 | { | ||
| 273 | asm(LOCK_PREFIX "orq %1, %0" : "+m" (*v1) : "r" (v2)); | ||
| 274 | } | ||
| 275 | #endif | ||
| 276 | |||
| 277 | /* These are x86-specific, used by some header files */ | ||
| 278 | #define atomic_clear_mask(mask, addr) \ | ||
| 279 | asm volatile(LOCK_PREFIX "andl %0,%1" \ | ||
| 280 | : : "r" (~(mask)), "m" (*(addr)) : "memory") | ||
| 281 | |||
| 282 | #define atomic_set_mask(mask, addr) \ | ||
| 283 | asm volatile(LOCK_PREFIX "orl %0,%1" \ | ||
| 284 | : : "r" ((unsigned)(mask)), "m" (*(addr)) \ | ||
| 285 | : "memory") | ||
| 286 | |||
| 287 | /* Atomic operations are already serializing on x86 */ | ||
| 288 | #define smp_mb__before_atomic_dec() barrier() | ||
| 289 | #define smp_mb__after_atomic_dec() barrier() | ||
| 290 | #define smp_mb__before_atomic_inc() barrier() | ||
| 291 | #define smp_mb__after_atomic_inc() barrier() | ||
| 292 | |||
| 1 | #ifdef CONFIG_X86_32 | 293 | #ifdef CONFIG_X86_32 |
| 2 | # include "atomic_32.h" | 294 | # include "atomic64_32.h" |
| 3 | #else | 295 | #else |
| 4 | # include "atomic_64.h" | 296 | # include "atomic64_64.h" |
| 5 | #endif | 297 | #endif |
| 298 | |||
| 299 | #include <asm-generic/atomic-long.h> | ||
| 300 | #endif /* _ASM_X86_ATOMIC_H */ | ||
