diff options
| -rw-r--r-- | arch/x86/include/asm/atomic64_32.h | 160 | ||||
| -rw-r--r-- | arch/x86/include/asm/atomic64_64.h | 224 | ||||
| -rw-r--r-- | arch/x86/include/asm/atomic_32.h | 152 | ||||
| -rw-r--r-- | arch/x86/include/asm/atomic_64.h | 217 |
4 files changed, 386 insertions, 367 deletions
diff --git a/arch/x86/include/asm/atomic64_32.h b/arch/x86/include/asm/atomic64_32.h new file mode 100644 index 000000000000..03027bf28de5 --- /dev/null +++ b/arch/x86/include/asm/atomic64_32.h | |||
| @@ -0,0 +1,160 @@ | |||
| 1 | #ifndef _ASM_X86_ATOMIC64_32_H | ||
| 2 | #define _ASM_X86_ATOMIC64_32_H | ||
| 3 | |||
| 4 | #include <linux/compiler.h> | ||
| 5 | #include <linux/types.h> | ||
| 6 | #include <asm/processor.h> | ||
| 7 | //#include <asm/cmpxchg.h> | ||
| 8 | |||
| 9 | /* An 64bit atomic type */ | ||
| 10 | |||
| 11 | typedef struct { | ||
| 12 | u64 __aligned(8) counter; | ||
| 13 | } atomic64_t; | ||
| 14 | |||
| 15 | #define ATOMIC64_INIT(val) { (val) } | ||
| 16 | |||
| 17 | extern u64 atomic64_cmpxchg(atomic64_t *ptr, u64 old_val, u64 new_val); | ||
| 18 | |||
| 19 | /** | ||
| 20 | * atomic64_xchg - xchg atomic64 variable | ||
| 21 | * @ptr: pointer to type atomic64_t | ||
| 22 | * @new_val: value to assign | ||
| 23 | * | ||
| 24 | * Atomically xchgs the value of @ptr to @new_val and returns | ||
| 25 | * the old value. | ||
| 26 | */ | ||
| 27 | extern u64 atomic64_xchg(atomic64_t *ptr, u64 new_val); | ||
| 28 | |||
| 29 | /** | ||
| 30 | * atomic64_set - set atomic64 variable | ||
| 31 | * @ptr: pointer to type atomic64_t | ||
| 32 | * @new_val: value to assign | ||
| 33 | * | ||
| 34 | * Atomically sets the value of @ptr to @new_val. | ||
| 35 | */ | ||
| 36 | extern void atomic64_set(atomic64_t *ptr, u64 new_val); | ||
| 37 | |||
| 38 | /** | ||
| 39 | * atomic64_read - read atomic64 variable | ||
| 40 | * @ptr: pointer to type atomic64_t | ||
| 41 | * | ||
| 42 | * Atomically reads the value of @ptr and returns it. | ||
| 43 | */ | ||
| 44 | static inline u64 atomic64_read(atomic64_t *ptr) | ||
| 45 | { | ||
| 46 | u64 res; | ||
| 47 | |||
| 48 | /* | ||
| 49 | * Note, we inline this atomic64_t primitive because | ||
| 50 | * it only clobbers EAX/EDX and leaves the others | ||
| 51 | * untouched. We also (somewhat subtly) rely on the | ||
| 52 | * fact that cmpxchg8b returns the current 64-bit value | ||
| 53 | * of the memory location we are touching: | ||
| 54 | */ | ||
| 55 | asm volatile( | ||
| 56 | "mov %%ebx, %%eax\n\t" | ||
| 57 | "mov %%ecx, %%edx\n\t" | ||
| 58 | LOCK_PREFIX "cmpxchg8b %1\n" | ||
| 59 | : "=&A" (res) | ||
| 60 | : "m" (*ptr) | ||
| 61 | ); | ||
| 62 | |||
| 63 | return res; | ||
| 64 | } | ||
| 65 | |||
| 66 | extern u64 atomic64_read(atomic64_t *ptr); | ||
| 67 | |||
| 68 | /** | ||
| 69 | * atomic64_add_return - add and return | ||
| 70 | * @delta: integer value to add | ||
| 71 | * @ptr: pointer to type atomic64_t | ||
| 72 | * | ||
| 73 | * Atomically adds @delta to @ptr and returns @delta + *@ptr | ||
| 74 | */ | ||
| 75 | extern u64 atomic64_add_return(u64 delta, atomic64_t *ptr); | ||
| 76 | |||
| 77 | /* | ||
| 78 | * Other variants with different arithmetic operators: | ||
| 79 | */ | ||
| 80 | extern u64 atomic64_sub_return(u64 delta, atomic64_t *ptr); | ||
| 81 | extern u64 atomic64_inc_return(atomic64_t *ptr); | ||
| 82 | extern u64 atomic64_dec_return(atomic64_t *ptr); | ||
| 83 | |||
| 84 | /** | ||
| 85 | * atomic64_add - add integer to atomic64 variable | ||
| 86 | * @delta: integer value to add | ||
| 87 | * @ptr: pointer to type atomic64_t | ||
| 88 | * | ||
| 89 | * Atomically adds @delta to @ptr. | ||
| 90 | */ | ||
| 91 | extern void atomic64_add(u64 delta, atomic64_t *ptr); | ||
| 92 | |||
| 93 | /** | ||
| 94 | * atomic64_sub - subtract the atomic64 variable | ||
| 95 | * @delta: integer value to subtract | ||
| 96 | * @ptr: pointer to type atomic64_t | ||
| 97 | * | ||
| 98 | * Atomically subtracts @delta from @ptr. | ||
| 99 | */ | ||
| 100 | extern void atomic64_sub(u64 delta, atomic64_t *ptr); | ||
| 101 | |||
| 102 | /** | ||
| 103 | * atomic64_sub_and_test - subtract value from variable and test result | ||
| 104 | * @delta: integer value to subtract | ||
| 105 | * @ptr: pointer to type atomic64_t | ||
| 106 | * | ||
| 107 | * Atomically subtracts @delta from @ptr and returns | ||
| 108 | * true if the result is zero, or false for all | ||
| 109 | * other cases. | ||
| 110 | */ | ||
| 111 | extern int atomic64_sub_and_test(u64 delta, atomic64_t *ptr); | ||
| 112 | |||
| 113 | /** | ||
| 114 | * atomic64_inc - increment atomic64 variable | ||
| 115 | * @ptr: pointer to type atomic64_t | ||
| 116 | * | ||
| 117 | * Atomically increments @ptr by 1. | ||
| 118 | */ | ||
| 119 | extern void atomic64_inc(atomic64_t *ptr); | ||
| 120 | |||
| 121 | /** | ||
| 122 | * atomic64_dec - decrement atomic64 variable | ||
| 123 | * @ptr: pointer to type atomic64_t | ||
| 124 | * | ||
| 125 | * Atomically decrements @ptr by 1. | ||
| 126 | */ | ||
| 127 | extern void atomic64_dec(atomic64_t *ptr); | ||
| 128 | |||
| 129 | /** | ||
| 130 | * atomic64_dec_and_test - decrement and test | ||
| 131 | * @ptr: pointer to type atomic64_t | ||
| 132 | * | ||
| 133 | * Atomically decrements @ptr by 1 and | ||
| 134 | * returns true if the result is 0, or false for all other | ||
| 135 | * cases. | ||
| 136 | */ | ||
| 137 | extern int atomic64_dec_and_test(atomic64_t *ptr); | ||
| 138 | |||
| 139 | /** | ||
| 140 | * atomic64_inc_and_test - increment and test | ||
| 141 | * @ptr: pointer to type atomic64_t | ||
| 142 | * | ||
| 143 | * Atomically increments @ptr by 1 | ||
| 144 | * and returns true if the result is zero, or false for all | ||
| 145 | * other cases. | ||
| 146 | */ | ||
| 147 | extern int atomic64_inc_and_test(atomic64_t *ptr); | ||
| 148 | |||
| 149 | /** | ||
| 150 | * atomic64_add_negative - add and test if negative | ||
| 151 | * @delta: integer value to add | ||
| 152 | * @ptr: pointer to type atomic64_t | ||
| 153 | * | ||
| 154 | * Atomically adds @delta to @ptr and returns true | ||
| 155 | * if the result is negative, or false when | ||
| 156 | * result is greater than or equal to zero. | ||
| 157 | */ | ||
| 158 | extern int atomic64_add_negative(u64 delta, atomic64_t *ptr); | ||
| 159 | |||
| 160 | #endif /* _ASM_X86_ATOMIC64_32_H */ | ||
diff --git a/arch/x86/include/asm/atomic64_64.h b/arch/x86/include/asm/atomic64_64.h new file mode 100644 index 000000000000..51c5b4056929 --- /dev/null +++ b/arch/x86/include/asm/atomic64_64.h | |||
| @@ -0,0 +1,224 @@ | |||
| 1 | #ifndef _ASM_X86_ATOMIC64_64_H | ||
| 2 | #define _ASM_X86_ATOMIC64_64_H | ||
| 3 | |||
| 4 | #include <linux/types.h> | ||
| 5 | #include <asm/alternative.h> | ||
| 6 | #include <asm/cmpxchg.h> | ||
| 7 | |||
| 8 | /* The 64-bit atomic type */ | ||
| 9 | |||
| 10 | #define ATOMIC64_INIT(i) { (i) } | ||
| 11 | |||
| 12 | /** | ||
| 13 | * atomic64_read - read atomic64 variable | ||
| 14 | * @v: pointer of type atomic64_t | ||
| 15 | * | ||
| 16 | * Atomically reads the value of @v. | ||
| 17 | * Doesn't imply a read memory barrier. | ||
| 18 | */ | ||
| 19 | static inline long atomic64_read(const atomic64_t *v) | ||
| 20 | { | ||
| 21 | return v->counter; | ||
| 22 | } | ||
| 23 | |||
| 24 | /** | ||
| 25 | * atomic64_set - set atomic64 variable | ||
| 26 | * @v: pointer to type atomic64_t | ||
| 27 | * @i: required value | ||
| 28 | * | ||
| 29 | * Atomically sets the value of @v to @i. | ||
| 30 | */ | ||
| 31 | static inline void atomic64_set(atomic64_t *v, long i) | ||
| 32 | { | ||
| 33 | v->counter = i; | ||
| 34 | } | ||
| 35 | |||
| 36 | /** | ||
| 37 | * atomic64_add - add integer to atomic64 variable | ||
| 38 | * @i: integer value to add | ||
| 39 | * @v: pointer to type atomic64_t | ||
| 40 | * | ||
| 41 | * Atomically adds @i to @v. | ||
| 42 | */ | ||
| 43 | static inline void atomic64_add(long i, atomic64_t *v) | ||
| 44 | { | ||
| 45 | asm volatile(LOCK_PREFIX "addq %1,%0" | ||
| 46 | : "=m" (v->counter) | ||
| 47 | : "er" (i), "m" (v->counter)); | ||
| 48 | } | ||
| 49 | |||
| 50 | /** | ||
| 51 | * atomic64_sub - subtract the atomic64 variable | ||
| 52 | * @i: integer value to subtract | ||
| 53 | * @v: pointer to type atomic64_t | ||
| 54 | * | ||
| 55 | * Atomically subtracts @i from @v. | ||
| 56 | */ | ||
| 57 | static inline void atomic64_sub(long i, atomic64_t *v) | ||
| 58 | { | ||
| 59 | asm volatile(LOCK_PREFIX "subq %1,%0" | ||
| 60 | : "=m" (v->counter) | ||
| 61 | : "er" (i), "m" (v->counter)); | ||
| 62 | } | ||
| 63 | |||
| 64 | /** | ||
| 65 | * atomic64_sub_and_test - subtract value from variable and test result | ||
| 66 | * @i: integer value to subtract | ||
| 67 | * @v: pointer to type atomic64_t | ||
| 68 | * | ||
| 69 | * Atomically subtracts @i from @v and returns | ||
| 70 | * true if the result is zero, or false for all | ||
| 71 | * other cases. | ||
| 72 | */ | ||
| 73 | static inline int atomic64_sub_and_test(long i, atomic64_t *v) | ||
| 74 | { | ||
| 75 | unsigned char c; | ||
| 76 | |||
| 77 | asm volatile(LOCK_PREFIX "subq %2,%0; sete %1" | ||
| 78 | : "=m" (v->counter), "=qm" (c) | ||
| 79 | : "er" (i), "m" (v->counter) : "memory"); | ||
| 80 | return c; | ||
| 81 | } | ||
| 82 | |||
| 83 | /** | ||
| 84 | * atomic64_inc - increment atomic64 variable | ||
| 85 | * @v: pointer to type atomic64_t | ||
| 86 | * | ||
| 87 | * Atomically increments @v by 1. | ||
| 88 | */ | ||
| 89 | static inline void atomic64_inc(atomic64_t *v) | ||
| 90 | { | ||
| 91 | asm volatile(LOCK_PREFIX "incq %0" | ||
| 92 | : "=m" (v->counter) | ||
| 93 | : "m" (v->counter)); | ||
| 94 | } | ||
| 95 | |||
| 96 | /** | ||
| 97 | * atomic64_dec - decrement atomic64 variable | ||
| 98 | * @v: pointer to type atomic64_t | ||
| 99 | * | ||
| 100 | * Atomically decrements @v by 1. | ||
| 101 | */ | ||
| 102 | static inline void atomic64_dec(atomic64_t *v) | ||
| 103 | { | ||
| 104 | asm volatile(LOCK_PREFIX "decq %0" | ||
| 105 | : "=m" (v->counter) | ||
| 106 | : "m" (v->counter)); | ||
| 107 | } | ||
| 108 | |||
| 109 | /** | ||
| 110 | * atomic64_dec_and_test - decrement and test | ||
| 111 | * @v: pointer to type atomic64_t | ||
| 112 | * | ||
| 113 | * Atomically decrements @v by 1 and | ||
| 114 | * returns true if the result is 0, or false for all other | ||
| 115 | * cases. | ||
| 116 | */ | ||
| 117 | static inline int atomic64_dec_and_test(atomic64_t *v) | ||
| 118 | { | ||
| 119 | unsigned char c; | ||
| 120 | |||
| 121 | asm volatile(LOCK_PREFIX "decq %0; sete %1" | ||
| 122 | : "=m" (v->counter), "=qm" (c) | ||
| 123 | : "m" (v->counter) : "memory"); | ||
| 124 | return c != 0; | ||
| 125 | } | ||
| 126 | |||
| 127 | /** | ||
| 128 | * atomic64_inc_and_test - increment and test | ||
| 129 | * @v: pointer to type atomic64_t | ||
| 130 | * | ||
| 131 | * Atomically increments @v by 1 | ||
| 132 | * and returns true if the result is zero, or false for all | ||
| 133 | * other cases. | ||
| 134 | */ | ||
| 135 | static inline int atomic64_inc_and_test(atomic64_t *v) | ||
| 136 | { | ||
| 137 | unsigned char c; | ||
| 138 | |||
| 139 | asm volatile(LOCK_PREFIX "incq %0; sete %1" | ||
| 140 | : "=m" (v->counter), "=qm" (c) | ||
| 141 | : "m" (v->counter) : "memory"); | ||
| 142 | return c != 0; | ||
| 143 | } | ||
| 144 | |||
| 145 | /** | ||
| 146 | * atomic64_add_negative - add and test if negative | ||
| 147 | * @i: integer value to add | ||
| 148 | * @v: pointer to type atomic64_t | ||
| 149 | * | ||
| 150 | * Atomically adds @i to @v and returns true | ||
| 151 | * if the result is negative, or false when | ||
| 152 | * result is greater than or equal to zero. | ||
| 153 | */ | ||
| 154 | static inline int atomic64_add_negative(long i, atomic64_t *v) | ||
| 155 | { | ||
| 156 | unsigned char c; | ||
| 157 | |||
| 158 | asm volatile(LOCK_PREFIX "addq %2,%0; sets %1" | ||
| 159 | : "=m" (v->counter), "=qm" (c) | ||
| 160 | : "er" (i), "m" (v->counter) : "memory"); | ||
| 161 | return c; | ||
| 162 | } | ||
| 163 | |||
| 164 | /** | ||
| 165 | * atomic64_add_return - add and return | ||
| 166 | * @i: integer value to add | ||
| 167 | * @v: pointer to type atomic64_t | ||
| 168 | * | ||
| 169 | * Atomically adds @i to @v and returns @i + @v | ||
| 170 | */ | ||
| 171 | static inline long atomic64_add_return(long i, atomic64_t *v) | ||
| 172 | { | ||
| 173 | long __i = i; | ||
| 174 | asm volatile(LOCK_PREFIX "xaddq %0, %1;" | ||
| 175 | : "+r" (i), "+m" (v->counter) | ||
| 176 | : : "memory"); | ||
| 177 | return i + __i; | ||
| 178 | } | ||
| 179 | |||
| 180 | static inline long atomic64_sub_return(long i, atomic64_t *v) | ||
| 181 | { | ||
| 182 | return atomic64_add_return(-i, v); | ||
| 183 | } | ||
| 184 | |||
| 185 | #define atomic64_inc_return(v) (atomic64_add_return(1, (v))) | ||
| 186 | #define atomic64_dec_return(v) (atomic64_sub_return(1, (v))) | ||
| 187 | |||
| 188 | static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new) | ||
| 189 | { | ||
| 190 | return cmpxchg(&v->counter, old, new); | ||
| 191 | } | ||
| 192 | |||
| 193 | static inline long atomic64_xchg(atomic64_t *v, long new) | ||
| 194 | { | ||
| 195 | return xchg(&v->counter, new); | ||
| 196 | } | ||
| 197 | |||
| 198 | /** | ||
| 199 | * atomic64_add_unless - add unless the number is a given value | ||
| 200 | * @v: pointer of type atomic64_t | ||
| 201 | * @a: the amount to add to v... | ||
| 202 | * @u: ...unless v is equal to u. | ||
| 203 | * | ||
| 204 | * Atomically adds @a to @v, so long as it was not @u. | ||
| 205 | * Returns non-zero if @v was not @u, and zero otherwise. | ||
| 206 | */ | ||
| 207 | static inline int atomic64_add_unless(atomic64_t *v, long a, long u) | ||
| 208 | { | ||
| 209 | long c, old; | ||
| 210 | c = atomic64_read(v); | ||
| 211 | for (;;) { | ||
| 212 | if (unlikely(c == (u))) | ||
| 213 | break; | ||
| 214 | old = atomic64_cmpxchg((v), c, c + (a)); | ||
| 215 | if (likely(old == c)) | ||
| 216 | break; | ||
| 217 | c = old; | ||
| 218 | } | ||
| 219 | return c != (u); | ||
| 220 | } | ||
| 221 | |||
| 222 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) | ||
| 223 | |||
| 224 | #endif /* _ASM_X86_ATOMIC64_64_H */ | ||
diff --git a/arch/x86/include/asm/atomic_32.h b/arch/x86/include/asm/atomic_32.h index dc5a667ff791..e128ae988cc9 100644 --- a/arch/x86/include/asm/atomic_32.h +++ b/arch/x86/include/asm/atomic_32.h | |||
| @@ -260,156 +260,6 @@ static inline int atomic_add_unless(atomic_t *v, int a, int u) | |||
| 260 | #define smp_mb__before_atomic_inc() barrier() | 260 | #define smp_mb__before_atomic_inc() barrier() |
| 261 | #define smp_mb__after_atomic_inc() barrier() | 261 | #define smp_mb__after_atomic_inc() barrier() |
| 262 | 262 | ||
| 263 | /* An 64bit atomic type */ | 263 | #include <asm/atomic64_32.h> |
| 264 | |||
| 265 | typedef struct { | ||
| 266 | u64 __aligned(8) counter; | ||
| 267 | } atomic64_t; | ||
| 268 | |||
| 269 | #define ATOMIC64_INIT(val) { (val) } | ||
| 270 | |||
| 271 | extern u64 atomic64_cmpxchg(atomic64_t *ptr, u64 old_val, u64 new_val); | ||
| 272 | |||
| 273 | /** | ||
| 274 | * atomic64_xchg - xchg atomic64 variable | ||
| 275 | * @ptr: pointer to type atomic64_t | ||
| 276 | * @new_val: value to assign | ||
| 277 | * | ||
| 278 | * Atomically xchgs the value of @ptr to @new_val and returns | ||
| 279 | * the old value. | ||
| 280 | */ | ||
| 281 | extern u64 atomic64_xchg(atomic64_t *ptr, u64 new_val); | ||
| 282 | |||
| 283 | /** | ||
| 284 | * atomic64_set - set atomic64 variable | ||
| 285 | * @ptr: pointer to type atomic64_t | ||
| 286 | * @new_val: value to assign | ||
| 287 | * | ||
| 288 | * Atomically sets the value of @ptr to @new_val. | ||
| 289 | */ | ||
| 290 | extern void atomic64_set(atomic64_t *ptr, u64 new_val); | ||
| 291 | |||
| 292 | /** | ||
| 293 | * atomic64_read - read atomic64 variable | ||
| 294 | * @ptr: pointer to type atomic64_t | ||
| 295 | * | ||
| 296 | * Atomically reads the value of @ptr and returns it. | ||
| 297 | */ | ||
| 298 | static inline u64 atomic64_read(atomic64_t *ptr) | ||
| 299 | { | ||
| 300 | u64 res; | ||
| 301 | |||
| 302 | /* | ||
| 303 | * Note, we inline this atomic64_t primitive because | ||
| 304 | * it only clobbers EAX/EDX and leaves the others | ||
| 305 | * untouched. We also (somewhat subtly) rely on the | ||
| 306 | * fact that cmpxchg8b returns the current 64-bit value | ||
| 307 | * of the memory location we are touching: | ||
| 308 | */ | ||
| 309 | asm volatile( | ||
| 310 | "mov %%ebx, %%eax\n\t" | ||
| 311 | "mov %%ecx, %%edx\n\t" | ||
| 312 | LOCK_PREFIX "cmpxchg8b %1\n" | ||
| 313 | : "=&A" (res) | ||
| 314 | : "m" (*ptr) | ||
| 315 | ); | ||
| 316 | |||
| 317 | return res; | ||
| 318 | } | ||
| 319 | |||
| 320 | extern u64 atomic64_read(atomic64_t *ptr); | ||
| 321 | |||
| 322 | /** | ||
| 323 | * atomic64_add_return - add and return | ||
| 324 | * @delta: integer value to add | ||
| 325 | * @ptr: pointer to type atomic64_t | ||
| 326 | * | ||
| 327 | * Atomically adds @delta to @ptr and returns @delta + *@ptr | ||
| 328 | */ | ||
| 329 | extern u64 atomic64_add_return(u64 delta, atomic64_t *ptr); | ||
| 330 | |||
| 331 | /* | ||
| 332 | * Other variants with different arithmetic operators: | ||
| 333 | */ | ||
| 334 | extern u64 atomic64_sub_return(u64 delta, atomic64_t *ptr); | ||
| 335 | extern u64 atomic64_inc_return(atomic64_t *ptr); | ||
| 336 | extern u64 atomic64_dec_return(atomic64_t *ptr); | ||
| 337 | |||
| 338 | /** | ||
| 339 | * atomic64_add - add integer to atomic64 variable | ||
| 340 | * @delta: integer value to add | ||
| 341 | * @ptr: pointer to type atomic64_t | ||
| 342 | * | ||
| 343 | * Atomically adds @delta to @ptr. | ||
| 344 | */ | ||
| 345 | extern void atomic64_add(u64 delta, atomic64_t *ptr); | ||
| 346 | |||
| 347 | /** | ||
| 348 | * atomic64_sub - subtract the atomic64 variable | ||
| 349 | * @delta: integer value to subtract | ||
| 350 | * @ptr: pointer to type atomic64_t | ||
| 351 | * | ||
| 352 | * Atomically subtracts @delta from @ptr. | ||
| 353 | */ | ||
| 354 | extern void atomic64_sub(u64 delta, atomic64_t *ptr); | ||
| 355 | |||
| 356 | /** | ||
| 357 | * atomic64_sub_and_test - subtract value from variable and test result | ||
| 358 | * @delta: integer value to subtract | ||
| 359 | * @ptr: pointer to type atomic64_t | ||
| 360 | * | ||
| 361 | * Atomically subtracts @delta from @ptr and returns | ||
| 362 | * true if the result is zero, or false for all | ||
| 363 | * other cases. | ||
| 364 | */ | ||
| 365 | extern int atomic64_sub_and_test(u64 delta, atomic64_t *ptr); | ||
| 366 | |||
| 367 | /** | ||
| 368 | * atomic64_inc - increment atomic64 variable | ||
| 369 | * @ptr: pointer to type atomic64_t | ||
| 370 | * | ||
| 371 | * Atomically increments @ptr by 1. | ||
| 372 | */ | ||
| 373 | extern void atomic64_inc(atomic64_t *ptr); | ||
| 374 | |||
| 375 | /** | ||
| 376 | * atomic64_dec - decrement atomic64 variable | ||
| 377 | * @ptr: pointer to type atomic64_t | ||
| 378 | * | ||
| 379 | * Atomically decrements @ptr by 1. | ||
| 380 | */ | ||
| 381 | extern void atomic64_dec(atomic64_t *ptr); | ||
| 382 | |||
| 383 | /** | ||
| 384 | * atomic64_dec_and_test - decrement and test | ||
| 385 | * @ptr: pointer to type atomic64_t | ||
| 386 | * | ||
| 387 | * Atomically decrements @ptr by 1 and | ||
| 388 | * returns true if the result is 0, or false for all other | ||
| 389 | * cases. | ||
| 390 | */ | ||
| 391 | extern int atomic64_dec_and_test(atomic64_t *ptr); | ||
| 392 | |||
| 393 | /** | ||
| 394 | * atomic64_inc_and_test - increment and test | ||
| 395 | * @ptr: pointer to type atomic64_t | ||
| 396 | * | ||
| 397 | * Atomically increments @ptr by 1 | ||
| 398 | * and returns true if the result is zero, or false for all | ||
| 399 | * other cases. | ||
| 400 | */ | ||
| 401 | extern int atomic64_inc_and_test(atomic64_t *ptr); | ||
| 402 | |||
| 403 | /** | ||
| 404 | * atomic64_add_negative - add and test if negative | ||
| 405 | * @delta: integer value to add | ||
| 406 | * @ptr: pointer to type atomic64_t | ||
| 407 | * | ||
| 408 | * Atomically adds @delta to @ptr and returns true | ||
| 409 | * if the result is negative, or false when | ||
| 410 | * result is greater than or equal to zero. | ||
| 411 | */ | ||
| 412 | extern int atomic64_add_negative(u64 delta, atomic64_t *ptr); | ||
| 413 | |||
| 414 | #include <asm-generic/atomic-long.h> | 264 | #include <asm-generic/atomic-long.h> |
| 415 | #endif /* _ASM_X86_ATOMIC_32_H */ | 265 | #endif /* _ASM_X86_ATOMIC_32_H */ |
diff --git a/arch/x86/include/asm/atomic_64.h b/arch/x86/include/asm/atomic_64.h index d605dc268e79..042c33100c69 100644 --- a/arch/x86/include/asm/atomic_64.h +++ b/arch/x86/include/asm/atomic_64.h | |||
| @@ -187,196 +187,6 @@ static inline int atomic_sub_return(int i, atomic_t *v) | |||
| 187 | #define atomic_inc_return(v) (atomic_add_return(1, v)) | 187 | #define atomic_inc_return(v) (atomic_add_return(1, v)) |
| 188 | #define atomic_dec_return(v) (atomic_sub_return(1, v)) | 188 | #define atomic_dec_return(v) (atomic_sub_return(1, v)) |
| 189 | 189 | ||
| 190 | /* The 64-bit atomic type */ | ||
| 191 | |||
| 192 | #define ATOMIC64_INIT(i) { (i) } | ||
| 193 | |||
| 194 | /** | ||
| 195 | * atomic64_read - read atomic64 variable | ||
| 196 | * @v: pointer of type atomic64_t | ||
| 197 | * | ||
| 198 | * Atomically reads the value of @v. | ||
| 199 | * Doesn't imply a read memory barrier. | ||
| 200 | */ | ||
| 201 | static inline long atomic64_read(const atomic64_t *v) | ||
| 202 | { | ||
| 203 | return v->counter; | ||
| 204 | } | ||
| 205 | |||
| 206 | /** | ||
| 207 | * atomic64_set - set atomic64 variable | ||
| 208 | * @v: pointer to type atomic64_t | ||
| 209 | * @i: required value | ||
| 210 | * | ||
| 211 | * Atomically sets the value of @v to @i. | ||
| 212 | */ | ||
| 213 | static inline void atomic64_set(atomic64_t *v, long i) | ||
| 214 | { | ||
| 215 | v->counter = i; | ||
| 216 | } | ||
| 217 | |||
| 218 | /** | ||
| 219 | * atomic64_add - add integer to atomic64 variable | ||
| 220 | * @i: integer value to add | ||
| 221 | * @v: pointer to type atomic64_t | ||
| 222 | * | ||
| 223 | * Atomically adds @i to @v. | ||
| 224 | */ | ||
| 225 | static inline void atomic64_add(long i, atomic64_t *v) | ||
| 226 | { | ||
| 227 | asm volatile(LOCK_PREFIX "addq %1,%0" | ||
| 228 | : "=m" (v->counter) | ||
| 229 | : "er" (i), "m" (v->counter)); | ||
| 230 | } | ||
| 231 | |||
| 232 | /** | ||
| 233 | * atomic64_sub - subtract the atomic64 variable | ||
| 234 | * @i: integer value to subtract | ||
| 235 | * @v: pointer to type atomic64_t | ||
| 236 | * | ||
| 237 | * Atomically subtracts @i from @v. | ||
| 238 | */ | ||
| 239 | static inline void atomic64_sub(long i, atomic64_t *v) | ||
| 240 | { | ||
| 241 | asm volatile(LOCK_PREFIX "subq %1,%0" | ||
| 242 | : "=m" (v->counter) | ||
| 243 | : "er" (i), "m" (v->counter)); | ||
| 244 | } | ||
| 245 | |||
| 246 | /** | ||
| 247 | * atomic64_sub_and_test - subtract value from variable and test result | ||
| 248 | * @i: integer value to subtract | ||
| 249 | * @v: pointer to type atomic64_t | ||
| 250 | * | ||
| 251 | * Atomically subtracts @i from @v and returns | ||
| 252 | * true if the result is zero, or false for all | ||
| 253 | * other cases. | ||
| 254 | */ | ||
| 255 | static inline int atomic64_sub_and_test(long i, atomic64_t *v) | ||
| 256 | { | ||
| 257 | unsigned char c; | ||
| 258 | |||
| 259 | asm volatile(LOCK_PREFIX "subq %2,%0; sete %1" | ||
| 260 | : "=m" (v->counter), "=qm" (c) | ||
| 261 | : "er" (i), "m" (v->counter) : "memory"); | ||
| 262 | return c; | ||
| 263 | } | ||
| 264 | |||
| 265 | /** | ||
| 266 | * atomic64_inc - increment atomic64 variable | ||
| 267 | * @v: pointer to type atomic64_t | ||
| 268 | * | ||
| 269 | * Atomically increments @v by 1. | ||
| 270 | */ | ||
| 271 | static inline void atomic64_inc(atomic64_t *v) | ||
| 272 | { | ||
| 273 | asm volatile(LOCK_PREFIX "incq %0" | ||
| 274 | : "=m" (v->counter) | ||
| 275 | : "m" (v->counter)); | ||
| 276 | } | ||
| 277 | |||
| 278 | /** | ||
| 279 | * atomic64_dec - decrement atomic64 variable | ||
| 280 | * @v: pointer to type atomic64_t | ||
| 281 | * | ||
| 282 | * Atomically decrements @v by 1. | ||
| 283 | */ | ||
| 284 | static inline void atomic64_dec(atomic64_t *v) | ||
| 285 | { | ||
| 286 | asm volatile(LOCK_PREFIX "decq %0" | ||
| 287 | : "=m" (v->counter) | ||
| 288 | : "m" (v->counter)); | ||
| 289 | } | ||
| 290 | |||
| 291 | /** | ||
| 292 | * atomic64_dec_and_test - decrement and test | ||
| 293 | * @v: pointer to type atomic64_t | ||
| 294 | * | ||
| 295 | * Atomically decrements @v by 1 and | ||
| 296 | * returns true if the result is 0, or false for all other | ||
| 297 | * cases. | ||
| 298 | */ | ||
| 299 | static inline int atomic64_dec_and_test(atomic64_t *v) | ||
| 300 | { | ||
| 301 | unsigned char c; | ||
| 302 | |||
| 303 | asm volatile(LOCK_PREFIX "decq %0; sete %1" | ||
| 304 | : "=m" (v->counter), "=qm" (c) | ||
| 305 | : "m" (v->counter) : "memory"); | ||
| 306 | return c != 0; | ||
| 307 | } | ||
| 308 | |||
| 309 | /** | ||
| 310 | * atomic64_inc_and_test - increment and test | ||
| 311 | * @v: pointer to type atomic64_t | ||
| 312 | * | ||
| 313 | * Atomically increments @v by 1 | ||
| 314 | * and returns true if the result is zero, or false for all | ||
| 315 | * other cases. | ||
| 316 | */ | ||
| 317 | static inline int atomic64_inc_and_test(atomic64_t *v) | ||
| 318 | { | ||
| 319 | unsigned char c; | ||
| 320 | |||
| 321 | asm volatile(LOCK_PREFIX "incq %0; sete %1" | ||
| 322 | : "=m" (v->counter), "=qm" (c) | ||
| 323 | : "m" (v->counter) : "memory"); | ||
| 324 | return c != 0; | ||
| 325 | } | ||
| 326 | |||
| 327 | /** | ||
| 328 | * atomic64_add_negative - add and test if negative | ||
| 329 | * @i: integer value to add | ||
| 330 | * @v: pointer to type atomic64_t | ||
| 331 | * | ||
| 332 | * Atomically adds @i to @v and returns true | ||
| 333 | * if the result is negative, or false when | ||
| 334 | * result is greater than or equal to zero. | ||
| 335 | */ | ||
| 336 | static inline int atomic64_add_negative(long i, atomic64_t *v) | ||
| 337 | { | ||
| 338 | unsigned char c; | ||
| 339 | |||
| 340 | asm volatile(LOCK_PREFIX "addq %2,%0; sets %1" | ||
| 341 | : "=m" (v->counter), "=qm" (c) | ||
| 342 | : "er" (i), "m" (v->counter) : "memory"); | ||
| 343 | return c; | ||
| 344 | } | ||
| 345 | |||
| 346 | /** | ||
| 347 | * atomic64_add_return - add and return | ||
| 348 | * @i: integer value to add | ||
| 349 | * @v: pointer to type atomic64_t | ||
| 350 | * | ||
| 351 | * Atomically adds @i to @v and returns @i + @v | ||
| 352 | */ | ||
| 353 | static inline long atomic64_add_return(long i, atomic64_t *v) | ||
| 354 | { | ||
| 355 | long __i = i; | ||
| 356 | asm volatile(LOCK_PREFIX "xaddq %0, %1;" | ||
| 357 | : "+r" (i), "+m" (v->counter) | ||
| 358 | : : "memory"); | ||
| 359 | return i + __i; | ||
| 360 | } | ||
| 361 | |||
| 362 | static inline long atomic64_sub_return(long i, atomic64_t *v) | ||
| 363 | { | ||
| 364 | return atomic64_add_return(-i, v); | ||
| 365 | } | ||
| 366 | |||
| 367 | #define atomic64_inc_return(v) (atomic64_add_return(1, (v))) | ||
| 368 | #define atomic64_dec_return(v) (atomic64_sub_return(1, (v))) | ||
| 369 | |||
| 370 | static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new) | ||
| 371 | { | ||
| 372 | return cmpxchg(&v->counter, old, new); | ||
| 373 | } | ||
| 374 | |||
| 375 | static inline long atomic64_xchg(atomic64_t *v, long new) | ||
| 376 | { | ||
| 377 | return xchg(&v->counter, new); | ||
| 378 | } | ||
| 379 | |||
| 380 | static inline long atomic_cmpxchg(atomic_t *v, int old, int new) | 190 | static inline long atomic_cmpxchg(atomic_t *v, int old, int new) |
| 381 | { | 191 | { |
| 382 | return cmpxchg(&v->counter, old, new); | 192 | return cmpxchg(&v->counter, old, new); |
| @@ -414,30 +224,6 @@ static inline int atomic_add_unless(atomic_t *v, int a, int u) | |||
| 414 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | 224 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) |
| 415 | 225 | ||
| 416 | /** | 226 | /** |
| 417 | * atomic64_add_unless - add unless the number is a given value | ||
| 418 | * @v: pointer of type atomic64_t | ||
| 419 | * @a: the amount to add to v... | ||
| 420 | * @u: ...unless v is equal to u. | ||
| 421 | * | ||
| 422 | * Atomically adds @a to @v, so long as it was not @u. | ||
| 423 | * Returns non-zero if @v was not @u, and zero otherwise. | ||
| 424 | */ | ||
| 425 | static inline int atomic64_add_unless(atomic64_t *v, long a, long u) | ||
| 426 | { | ||
| 427 | long c, old; | ||
| 428 | c = atomic64_read(v); | ||
| 429 | for (;;) { | ||
| 430 | if (unlikely(c == (u))) | ||
| 431 | break; | ||
| 432 | old = atomic64_cmpxchg((v), c, c + (a)); | ||
| 433 | if (likely(old == c)) | ||
| 434 | break; | ||
| 435 | c = old; | ||
| 436 | } | ||
| 437 | return c != (u); | ||
| 438 | } | ||
| 439 | |||
| 440 | /** | ||
| 441 | * atomic_inc_short - increment of a short integer | 227 | * atomic_inc_short - increment of a short integer |
| 442 | * @v: pointer to type int | 228 | * @v: pointer to type int |
| 443 | * | 229 | * |
| @@ -463,8 +249,6 @@ static inline void atomic_or_long(unsigned long *v1, unsigned long v2) | |||
| 463 | asm(LOCK_PREFIX "orq %1, %0" : "+m" (*v1) : "r" (v2)); | 249 | asm(LOCK_PREFIX "orq %1, %0" : "+m" (*v1) : "r" (v2)); |
| 464 | } | 250 | } |
| 465 | 251 | ||
| 466 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) | ||
| 467 | |||
| 468 | /* These are x86-specific, used by some header files */ | 252 | /* These are x86-specific, used by some header files */ |
| 469 | #define atomic_clear_mask(mask, addr) \ | 253 | #define atomic_clear_mask(mask, addr) \ |
| 470 | asm volatile(LOCK_PREFIX "andl %0,%1" \ | 254 | asm volatile(LOCK_PREFIX "andl %0,%1" \ |
| @@ -481,5 +265,6 @@ static inline void atomic_or_long(unsigned long *v1, unsigned long v2) | |||
| 481 | #define smp_mb__before_atomic_inc() barrier() | 265 | #define smp_mb__before_atomic_inc() barrier() |
| 482 | #define smp_mb__after_atomic_inc() barrier() | 266 | #define smp_mb__after_atomic_inc() barrier() |
| 483 | 267 | ||
| 268 | #include <asm/atomic64_64.h> | ||
| 484 | #include <asm-generic/atomic-long.h> | 269 | #include <asm-generic/atomic-long.h> |
| 485 | #endif /* _ASM_X86_ATOMIC_64_H */ | 270 | #endif /* _ASM_X86_ATOMIC_64_H */ |
