aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/include/asm/atomic64_64.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/x86/include/asm/atomic64_64.h')
-rw-r--r--arch/x86/include/asm/atomic64_64.h28
1 files changed, 4 insertions, 24 deletions
diff --git a/arch/x86/include/asm/atomic64_64.h b/arch/x86/include/asm/atomic64_64.h
index 0e1cbfc8ee06..3f065c985aee 100644
--- a/arch/x86/include/asm/atomic64_64.h
+++ b/arch/x86/include/asm/atomic64_64.h
@@ -72,12 +72,7 @@ static inline void atomic64_sub(long i, atomic64_t *v)
72 */ 72 */
73static inline int atomic64_sub_and_test(long i, atomic64_t *v) 73static inline int atomic64_sub_and_test(long i, atomic64_t *v)
74{ 74{
75 unsigned char c; 75 GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, i, "%0", "e");
76
77 asm volatile(LOCK_PREFIX "subq %2,%0; sete %1"
78 : "=m" (v->counter), "=qm" (c)
79 : "er" (i), "m" (v->counter) : "memory");
80 return c;
81} 76}
82 77
83/** 78/**
@@ -116,12 +111,7 @@ static inline void atomic64_dec(atomic64_t *v)
116 */ 111 */
117static inline int atomic64_dec_and_test(atomic64_t *v) 112static inline int atomic64_dec_and_test(atomic64_t *v)
118{ 113{
119 unsigned char c; 114 GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", "e");
120
121 asm volatile(LOCK_PREFIX "decq %0; sete %1"
122 : "=m" (v->counter), "=qm" (c)
123 : "m" (v->counter) : "memory");
124 return c != 0;
125} 115}
126 116
127/** 117/**
@@ -134,12 +124,7 @@ static inline int atomic64_dec_and_test(atomic64_t *v)
134 */ 124 */
135static inline int atomic64_inc_and_test(atomic64_t *v) 125static inline int atomic64_inc_and_test(atomic64_t *v)
136{ 126{
137 unsigned char c; 127 GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", "e");
138
139 asm volatile(LOCK_PREFIX "incq %0; sete %1"
140 : "=m" (v->counter), "=qm" (c)
141 : "m" (v->counter) : "memory");
142 return c != 0;
143} 128}
144 129
145/** 130/**
@@ -153,12 +138,7 @@ static inline int atomic64_inc_and_test(atomic64_t *v)
153 */ 138 */
154static inline int atomic64_add_negative(long i, atomic64_t *v) 139static inline int atomic64_add_negative(long i, atomic64_t *v)
155{ 140{
156 unsigned char c; 141 GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, i, "%0", "s");
157
158 asm volatile(LOCK_PREFIX "addq %2,%0; sets %1"
159 : "=m" (v->counter), "=qm" (c)
160 : "er" (i), "m" (v->counter) : "memory");
161 return c;
162} 142}
163 143
164/** 144/**