aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-x86_64/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-x86_64/atomic.h')
-rw-r--r--include/asm-x86_64/atomic.h43
1 files changed, 21 insertions, 22 deletions
diff --git a/include/asm-x86_64/atomic.h b/include/asm-x86_64/atomic.h
index cecbf7baa6a..007e88d6d43 100644
--- a/include/asm-x86_64/atomic.h
+++ b/include/asm-x86_64/atomic.h
@@ -1,8 +1,7 @@
1#ifndef __ARCH_X86_64_ATOMIC__ 1#ifndef __ARCH_X86_64_ATOMIC__
2#define __ARCH_X86_64_ATOMIC__ 2#define __ARCH_X86_64_ATOMIC__
3 3
4#include <linux/config.h> 4#include <asm/alternative.h>
5#include <asm/types.h>
6 5
7/* atomic_t should be 32 bit signed type */ 6/* atomic_t should be 32 bit signed type */
8 7
@@ -53,7 +52,7 @@ typedef struct { volatile int counter; } atomic_t;
53static __inline__ void atomic_add(int i, atomic_t *v) 52static __inline__ void atomic_add(int i, atomic_t *v)
54{ 53{
55 __asm__ __volatile__( 54 __asm__ __volatile__(
56 LOCK "addl %1,%0" 55 LOCK_PREFIX "addl %1,%0"
57 :"=m" (v->counter) 56 :"=m" (v->counter)
58 :"ir" (i), "m" (v->counter)); 57 :"ir" (i), "m" (v->counter));
59} 58}
@@ -68,7 +67,7 @@ static __inline__ void atomic_add(int i, atomic_t *v)
68static __inline__ void atomic_sub(int i, atomic_t *v) 67static __inline__ void atomic_sub(int i, atomic_t *v)
69{ 68{
70 __asm__ __volatile__( 69 __asm__ __volatile__(
71 LOCK "subl %1,%0" 70 LOCK_PREFIX "subl %1,%0"
72 :"=m" (v->counter) 71 :"=m" (v->counter)
73 :"ir" (i), "m" (v->counter)); 72 :"ir" (i), "m" (v->counter));
74} 73}
@@ -87,7 +86,7 @@ static __inline__ int atomic_sub_and_test(int i, atomic_t *v)
87 unsigned char c; 86 unsigned char c;
88 87
89 __asm__ __volatile__( 88 __asm__ __volatile__(
90 LOCK "subl %2,%0; sete %1" 89 LOCK_PREFIX "subl %2,%0; sete %1"
91 :"=m" (v->counter), "=qm" (c) 90 :"=m" (v->counter), "=qm" (c)
92 :"ir" (i), "m" (v->counter) : "memory"); 91 :"ir" (i), "m" (v->counter) : "memory");
93 return c; 92 return c;
@@ -102,7 +101,7 @@ static __inline__ int atomic_sub_and_test(int i, atomic_t *v)
102static __inline__ void atomic_inc(atomic_t *v) 101static __inline__ void atomic_inc(atomic_t *v)
103{ 102{
104 __asm__ __volatile__( 103 __asm__ __volatile__(
105 LOCK "incl %0" 104 LOCK_PREFIX "incl %0"
106 :"=m" (v->counter) 105 :"=m" (v->counter)
107 :"m" (v->counter)); 106 :"m" (v->counter));
108} 107}
@@ -116,7 +115,7 @@ static __inline__ void atomic_inc(atomic_t *v)
116static __inline__ void atomic_dec(atomic_t *v) 115static __inline__ void atomic_dec(atomic_t *v)
117{ 116{
118 __asm__ __volatile__( 117 __asm__ __volatile__(
119 LOCK "decl %0" 118 LOCK_PREFIX "decl %0"
120 :"=m" (v->counter) 119 :"=m" (v->counter)
121 :"m" (v->counter)); 120 :"m" (v->counter));
122} 121}
@@ -134,7 +133,7 @@ static __inline__ int atomic_dec_and_test(atomic_t *v)
134 unsigned char c; 133 unsigned char c;
135 134
136 __asm__ __volatile__( 135 __asm__ __volatile__(
137 LOCK "decl %0; sete %1" 136 LOCK_PREFIX "decl %0; sete %1"
138 :"=m" (v->counter), "=qm" (c) 137 :"=m" (v->counter), "=qm" (c)
139 :"m" (v->counter) : "memory"); 138 :"m" (v->counter) : "memory");
140 return c != 0; 139 return c != 0;
@@ -153,7 +152,7 @@ static __inline__ int atomic_inc_and_test(atomic_t *v)
153 unsigned char c; 152 unsigned char c;
154 153
155 __asm__ __volatile__( 154 __asm__ __volatile__(
156 LOCK "incl %0; sete %1" 155 LOCK_PREFIX "incl %0; sete %1"
157 :"=m" (v->counter), "=qm" (c) 156 :"=m" (v->counter), "=qm" (c)
158 :"m" (v->counter) : "memory"); 157 :"m" (v->counter) : "memory");
159 return c != 0; 158 return c != 0;
@@ -173,7 +172,7 @@ static __inline__ int atomic_add_negative(int i, atomic_t *v)
173 unsigned char c; 172 unsigned char c;
174 173
175 __asm__ __volatile__( 174 __asm__ __volatile__(
176 LOCK "addl %2,%0; sets %1" 175 LOCK_PREFIX "addl %2,%0; sets %1"
177 :"=m" (v->counter), "=qm" (c) 176 :"=m" (v->counter), "=qm" (c)
178 :"ir" (i), "m" (v->counter) : "memory"); 177 :"ir" (i), "m" (v->counter) : "memory");
179 return c; 178 return c;
@@ -190,7 +189,7 @@ static __inline__ int atomic_add_return(int i, atomic_t *v)
190{ 189{
191 int __i = i; 190 int __i = i;
192 __asm__ __volatile__( 191 __asm__ __volatile__(
193 LOCK "xaddl %0, %1;" 192 LOCK_PREFIX "xaddl %0, %1;"
194 :"=r"(i) 193 :"=r"(i)
195 :"m"(v->counter), "0"(i)); 194 :"m"(v->counter), "0"(i));
196 return i + __i; 195 return i + __i;
@@ -238,7 +237,7 @@ typedef struct { volatile long counter; } atomic64_t;
238static __inline__ void atomic64_add(long i, atomic64_t *v) 237static __inline__ void atomic64_add(long i, atomic64_t *v)
239{ 238{
240 __asm__ __volatile__( 239 __asm__ __volatile__(
241 LOCK "addq %1,%0" 240 LOCK_PREFIX "addq %1,%0"
242 :"=m" (v->counter) 241 :"=m" (v->counter)
243 :"ir" (i), "m" (v->counter)); 242 :"ir" (i), "m" (v->counter));
244} 243}
@@ -253,7 +252,7 @@ static __inline__ void atomic64_add(long i, atomic64_t *v)
253static __inline__ void atomic64_sub(long i, atomic64_t *v) 252static __inline__ void atomic64_sub(long i, atomic64_t *v)
254{ 253{
255 __asm__ __volatile__( 254 __asm__ __volatile__(
256 LOCK "subq %1,%0" 255 LOCK_PREFIX "subq %1,%0"
257 :"=m" (v->counter) 256 :"=m" (v->counter)
258 :"ir" (i), "m" (v->counter)); 257 :"ir" (i), "m" (v->counter));
259} 258}
@@ -272,7 +271,7 @@ static __inline__ int atomic64_sub_and_test(long i, atomic64_t *v)
272 unsigned char c; 271 unsigned char c;
273 272
274 __asm__ __volatile__( 273 __asm__ __volatile__(
275 LOCK "subq %2,%0; sete %1" 274 LOCK_PREFIX "subq %2,%0; sete %1"
276 :"=m" (v->counter), "=qm" (c) 275 :"=m" (v->counter), "=qm" (c)
277 :"ir" (i), "m" (v->counter) : "memory"); 276 :"ir" (i), "m" (v->counter) : "memory");
278 return c; 277 return c;
@@ -287,7 +286,7 @@ static __inline__ int atomic64_sub_and_test(long i, atomic64_t *v)
287static __inline__ void atomic64_inc(atomic64_t *v) 286static __inline__ void atomic64_inc(atomic64_t *v)
288{ 287{
289 __asm__ __volatile__( 288 __asm__ __volatile__(
290 LOCK "incq %0" 289 LOCK_PREFIX "incq %0"
291 :"=m" (v->counter) 290 :"=m" (v->counter)
292 :"m" (v->counter)); 291 :"m" (v->counter));
293} 292}
@@ -301,7 +300,7 @@ static __inline__ void atomic64_inc(atomic64_t *v)
301static __inline__ void atomic64_dec(atomic64_t *v) 300static __inline__ void atomic64_dec(atomic64_t *v)
302{ 301{
303 __asm__ __volatile__( 302 __asm__ __volatile__(
304 LOCK "decq %0" 303 LOCK_PREFIX "decq %0"
305 :"=m" (v->counter) 304 :"=m" (v->counter)
306 :"m" (v->counter)); 305 :"m" (v->counter));
307} 306}
@@ -319,7 +318,7 @@ static __inline__ int atomic64_dec_and_test(atomic64_t *v)
319 unsigned char c; 318 unsigned char c;
320 319
321 __asm__ __volatile__( 320 __asm__ __volatile__(
322 LOCK "decq %0; sete %1" 321 LOCK_PREFIX "decq %0; sete %1"
323 :"=m" (v->counter), "=qm" (c) 322 :"=m" (v->counter), "=qm" (c)
324 :"m" (v->counter) : "memory"); 323 :"m" (v->counter) : "memory");
325 return c != 0; 324 return c != 0;
@@ -338,7 +337,7 @@ static __inline__ int atomic64_inc_and_test(atomic64_t *v)
338 unsigned char c; 337 unsigned char c;
339 338
340 __asm__ __volatile__( 339 __asm__ __volatile__(
341 LOCK "incq %0; sete %1" 340 LOCK_PREFIX "incq %0; sete %1"
342 :"=m" (v->counter), "=qm" (c) 341 :"=m" (v->counter), "=qm" (c)
343 :"m" (v->counter) : "memory"); 342 :"m" (v->counter) : "memory");
344 return c != 0; 343 return c != 0;
@@ -358,7 +357,7 @@ static __inline__ int atomic64_add_negative(long i, atomic64_t *v)
358 unsigned char c; 357 unsigned char c;
359 358
360 __asm__ __volatile__( 359 __asm__ __volatile__(
361 LOCK "addq %2,%0; sets %1" 360 LOCK_PREFIX "addq %2,%0; sets %1"
362 :"=m" (v->counter), "=qm" (c) 361 :"=m" (v->counter), "=qm" (c)
363 :"ir" (i), "m" (v->counter) : "memory"); 362 :"ir" (i), "m" (v->counter) : "memory");
364 return c; 363 return c;
@@ -375,7 +374,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t *v)
375{ 374{
376 long __i = i; 375 long __i = i;
377 __asm__ __volatile__( 376 __asm__ __volatile__(
378 LOCK "xaddq %0, %1;" 377 LOCK_PREFIX "xaddq %0, %1;"
379 :"=r"(i) 378 :"=r"(i)
380 :"m"(v->counter), "0"(i)); 379 :"m"(v->counter), "0"(i));
381 return i + __i; 380 return i + __i;
@@ -419,11 +418,11 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t *v)
419 418
420/* These are x86-specific, used by some header files */ 419/* These are x86-specific, used by some header files */
421#define atomic_clear_mask(mask, addr) \ 420#define atomic_clear_mask(mask, addr) \
422__asm__ __volatile__(LOCK "andl %0,%1" \ 421__asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \
423: : "r" (~(mask)),"m" (*addr) : "memory") 422: : "r" (~(mask)),"m" (*addr) : "memory")
424 423
425#define atomic_set_mask(mask, addr) \ 424#define atomic_set_mask(mask, addr) \
426__asm__ __volatile__(LOCK "orl %0,%1" \ 425__asm__ __volatile__(LOCK_PREFIX "orl %0,%1" \
427: : "r" ((unsigned)mask),"m" (*(addr)) : "memory") 426: : "r" ((unsigned)mask),"m" (*(addr)) : "memory")
428 427
429/* Atomic operations are already serializing on x86 */ 428/* Atomic operations are already serializing on x86 */