aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorJan Beulich <JBeulich@suse.com>2018-02-26 06:11:51 -0500
committerThomas Gleixner <tglx@linutronix.de>2018-02-28 09:18:41 -0500
commit22636f8c9511245cb3c8412039f1dd95afb3aa59 (patch)
tree3685d0de50afaf363a466c05f421e7955da8c1f9
parenta368d7fd2a3c6babb852fe974018dd97916bcd3b (diff)
x86/asm: Add instruction suffixes to bitops
Omitting suffixes from instructions in AT&T mode is bad practice when operand size cannot be determined by the assembler from register operands, and is likely going to be warned about by upstream gas in the future (mine does already). Add the missing suffixes here. Note that for 64-bit this means some operations change from being 32-bit to 64-bit. Signed-off-by: Jan Beulich <jbeulich@suse.com> Signed-off-by: Thomas Gleixner <tglx@linutronix.de> Link: https://lkml.kernel.org/r/5A93F98702000078001ABACC@prv-mh.provo.novell.com
-rw-r--r--arch/x86/include/asm/bitops.h29
-rw-r--r--arch/x86/include/asm/percpu.h2
2 files changed, 17 insertions, 14 deletions
diff --git a/arch/x86/include/asm/bitops.h b/arch/x86/include/asm/bitops.h
index 3fa039855b8f..9f645ba57dbb 100644
--- a/arch/x86/include/asm/bitops.h
+++ b/arch/x86/include/asm/bitops.h
@@ -78,7 +78,7 @@ set_bit(long nr, volatile unsigned long *addr)
78 : "iq" ((u8)CONST_MASK(nr)) 78 : "iq" ((u8)CONST_MASK(nr))
79 : "memory"); 79 : "memory");
80 } else { 80 } else {
81 asm volatile(LOCK_PREFIX "bts %1,%0" 81 asm volatile(LOCK_PREFIX __ASM_SIZE(bts) " %1,%0"
82 : BITOP_ADDR(addr) : "Ir" (nr) : "memory"); 82 : BITOP_ADDR(addr) : "Ir" (nr) : "memory");
83 } 83 }
84} 84}
@@ -94,7 +94,7 @@ set_bit(long nr, volatile unsigned long *addr)
94 */ 94 */
95static __always_inline void __set_bit(long nr, volatile unsigned long *addr) 95static __always_inline void __set_bit(long nr, volatile unsigned long *addr)
96{ 96{
97 asm volatile("bts %1,%0" : ADDR : "Ir" (nr) : "memory"); 97 asm volatile(__ASM_SIZE(bts) " %1,%0" : ADDR : "Ir" (nr) : "memory");
98} 98}
99 99
100/** 100/**
@@ -115,7 +115,7 @@ clear_bit(long nr, volatile unsigned long *addr)
115 : CONST_MASK_ADDR(nr, addr) 115 : CONST_MASK_ADDR(nr, addr)
116 : "iq" ((u8)~CONST_MASK(nr))); 116 : "iq" ((u8)~CONST_MASK(nr)));
117 } else { 117 } else {
118 asm volatile(LOCK_PREFIX "btr %1,%0" 118 asm volatile(LOCK_PREFIX __ASM_SIZE(btr) " %1,%0"
119 : BITOP_ADDR(addr) 119 : BITOP_ADDR(addr)
120 : "Ir" (nr)); 120 : "Ir" (nr));
121 } 121 }
@@ -137,7 +137,7 @@ static __always_inline void clear_bit_unlock(long nr, volatile unsigned long *ad
137 137
138static __always_inline void __clear_bit(long nr, volatile unsigned long *addr) 138static __always_inline void __clear_bit(long nr, volatile unsigned long *addr)
139{ 139{
140 asm volatile("btr %1,%0" : ADDR : "Ir" (nr)); 140 asm volatile(__ASM_SIZE(btr) " %1,%0" : ADDR : "Ir" (nr));
141} 141}
142 142
143static __always_inline bool clear_bit_unlock_is_negative_byte(long nr, volatile unsigned long *addr) 143static __always_inline bool clear_bit_unlock_is_negative_byte(long nr, volatile unsigned long *addr)
@@ -182,7 +182,7 @@ static __always_inline void __clear_bit_unlock(long nr, volatile unsigned long *
182 */ 182 */
183static __always_inline void __change_bit(long nr, volatile unsigned long *addr) 183static __always_inline void __change_bit(long nr, volatile unsigned long *addr)
184{ 184{
185 asm volatile("btc %1,%0" : ADDR : "Ir" (nr)); 185 asm volatile(__ASM_SIZE(btc) " %1,%0" : ADDR : "Ir" (nr));
186} 186}
187 187
188/** 188/**
@@ -201,7 +201,7 @@ static __always_inline void change_bit(long nr, volatile unsigned long *addr)
201 : CONST_MASK_ADDR(nr, addr) 201 : CONST_MASK_ADDR(nr, addr)
202 : "iq" ((u8)CONST_MASK(nr))); 202 : "iq" ((u8)CONST_MASK(nr)));
203 } else { 203 } else {
204 asm volatile(LOCK_PREFIX "btc %1,%0" 204 asm volatile(LOCK_PREFIX __ASM_SIZE(btc) " %1,%0"
205 : BITOP_ADDR(addr) 205 : BITOP_ADDR(addr)
206 : "Ir" (nr)); 206 : "Ir" (nr));
207 } 207 }
@@ -217,7 +217,8 @@ static __always_inline void change_bit(long nr, volatile unsigned long *addr)
217 */ 217 */
218static __always_inline bool test_and_set_bit(long nr, volatile unsigned long *addr) 218static __always_inline bool test_and_set_bit(long nr, volatile unsigned long *addr)
219{ 219{
220 GEN_BINARY_RMWcc(LOCK_PREFIX "bts", *addr, "Ir", nr, "%0", c); 220 GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(bts),
221 *addr, "Ir", nr, "%0", c);
221} 222}
222 223
223/** 224/**
@@ -246,7 +247,7 @@ static __always_inline bool __test_and_set_bit(long nr, volatile unsigned long *
246{ 247{
247 bool oldbit; 248 bool oldbit;
248 249
249 asm("bts %2,%1" 250 asm(__ASM_SIZE(bts) " %2,%1"
250 CC_SET(c) 251 CC_SET(c)
251 : CC_OUT(c) (oldbit), ADDR 252 : CC_OUT(c) (oldbit), ADDR
252 : "Ir" (nr)); 253 : "Ir" (nr));
@@ -263,7 +264,8 @@ static __always_inline bool __test_and_set_bit(long nr, volatile unsigned long *
263 */ 264 */
264static __always_inline bool test_and_clear_bit(long nr, volatile unsigned long *addr) 265static __always_inline bool test_and_clear_bit(long nr, volatile unsigned long *addr)
265{ 266{
266 GEN_BINARY_RMWcc(LOCK_PREFIX "btr", *addr, "Ir", nr, "%0", c); 267 GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btr),
268 *addr, "Ir", nr, "%0", c);
267} 269}
268 270
269/** 271/**
@@ -286,7 +288,7 @@ static __always_inline bool __test_and_clear_bit(long nr, volatile unsigned long
286{ 288{
287 bool oldbit; 289 bool oldbit;
288 290
289 asm volatile("btr %2,%1" 291 asm volatile(__ASM_SIZE(btr) " %2,%1"
290 CC_SET(c) 292 CC_SET(c)
291 : CC_OUT(c) (oldbit), ADDR 293 : CC_OUT(c) (oldbit), ADDR
292 : "Ir" (nr)); 294 : "Ir" (nr));
@@ -298,7 +300,7 @@ static __always_inline bool __test_and_change_bit(long nr, volatile unsigned lon
298{ 300{
299 bool oldbit; 301 bool oldbit;
300 302
301 asm volatile("btc %2,%1" 303 asm volatile(__ASM_SIZE(btc) " %2,%1"
302 CC_SET(c) 304 CC_SET(c)
303 : CC_OUT(c) (oldbit), ADDR 305 : CC_OUT(c) (oldbit), ADDR
304 : "Ir" (nr) : "memory"); 306 : "Ir" (nr) : "memory");
@@ -316,7 +318,8 @@ static __always_inline bool __test_and_change_bit(long nr, volatile unsigned lon
316 */ 318 */
317static __always_inline bool test_and_change_bit(long nr, volatile unsigned long *addr) 319static __always_inline bool test_and_change_bit(long nr, volatile unsigned long *addr)
318{ 320{
319 GEN_BINARY_RMWcc(LOCK_PREFIX "btc", *addr, "Ir", nr, "%0", c); 321 GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btc),
322 *addr, "Ir", nr, "%0", c);
320} 323}
321 324
322static __always_inline bool constant_test_bit(long nr, const volatile unsigned long *addr) 325static __always_inline bool constant_test_bit(long nr, const volatile unsigned long *addr)
@@ -329,7 +332,7 @@ static __always_inline bool variable_test_bit(long nr, volatile const unsigned l
329{ 332{
330 bool oldbit; 333 bool oldbit;
331 334
332 asm volatile("bt %2,%1" 335 asm volatile(__ASM_SIZE(bt) " %2,%1"
333 CC_SET(c) 336 CC_SET(c)
334 : CC_OUT(c) (oldbit) 337 : CC_OUT(c) (oldbit)
335 : "m" (*(unsigned long *)addr), "Ir" (nr)); 338 : "m" (*(unsigned long *)addr), "Ir" (nr));
diff --git a/arch/x86/include/asm/percpu.h b/arch/x86/include/asm/percpu.h
index ba3c523aaf16..a06b07399d17 100644
--- a/arch/x86/include/asm/percpu.h
+++ b/arch/x86/include/asm/percpu.h
@@ -526,7 +526,7 @@ static inline bool x86_this_cpu_variable_test_bit(int nr,
526{ 526{
527 bool oldbit; 527 bool oldbit;
528 528
529 asm volatile("bt "__percpu_arg(2)",%1" 529 asm volatile("btl "__percpu_arg(2)",%1"
530 CC_SET(c) 530 CC_SET(c)
531 : CC_OUT(c) (oldbit) 531 : CC_OUT(c) (oldbit)
532 : "m" (*(unsigned long __percpu *)addr), "Ir" (nr)); 532 : "m" (*(unsigned long __percpu *)addr), "Ir" (nr));