aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorJan Beulich <JBeulich@suse.com>2019-03-27 11:15:19 -0400
committerIngo Molnar <mingo@kernel.org>2019-04-10 03:53:31 -0400
commit547571b5abe61bb33c6005d8981e86e3c61fedcc (patch)
treef06947ea9413a6a1a0251119379b3b2219cb1a0a
parent28e3ace70c3d2ea47a62dffe046011d1b74ee839 (diff)
x86/asm: Modernize sync_bitops.h
Add missing instruction suffixes and use rmwcc.h just like was (more or less) recently done for bitops.h as well, see: 22636f8c9511: x86/asm: Add instruction suffixes to bitops 288e4521f0f6: x86/asm: 'Simplify' GEN_*_RMWcc() macros No change in functionality intended. Signed-off-by: Jan Beulich <jbeulich@suse.com> Cc: Andy Lutomirski <luto@amacapital.net> Cc: Boris Ostrovsky <boris.ostrovsky@oracle.com> Cc: Borislav Petkov <bp@alien8.de> Cc: Brian Gerst <brgerst@gmail.com> Cc: Denys Vlasenko <dvlasenk@redhat.com> Cc: H. Peter Anvin <hpa@zytor.com> Cc: Juergen Gross <jgross@suse.com> Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Peter Zijlstra <peterz@infradead.org> Cc: Thomas Gleixner <tglx@linutronix.de> Link: http://lkml.kernel.org/r/5C9B93870200007800222289@prv1-mh.provo.novell.com [ Cleaned up the changelog a bit. ] Signed-off-by: Ingo Molnar <mingo@kernel.org>
-rw-r--r--arch/x86/include/asm/sync_bitops.h31
1 files changed, 9 insertions, 22 deletions
diff --git a/arch/x86/include/asm/sync_bitops.h b/arch/x86/include/asm/sync_bitops.h
index 2fe745356fb1..6d8d6bc183b7 100644
--- a/arch/x86/include/asm/sync_bitops.h
+++ b/arch/x86/include/asm/sync_bitops.h
@@ -14,6 +14,8 @@
14 * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1). 14 * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
15 */ 15 */
16 16
17#include <asm/rmwcc.h>
18
17#define ADDR (*(volatile long *)addr) 19#define ADDR (*(volatile long *)addr)
18 20
19/** 21/**
@@ -29,7 +31,7 @@
29 */ 31 */
30static inline void sync_set_bit(long nr, volatile unsigned long *addr) 32static inline void sync_set_bit(long nr, volatile unsigned long *addr)
31{ 33{
32 asm volatile("lock; bts %1,%0" 34 asm volatile("lock; " __ASM_SIZE(bts) " %1,%0"
33 : "+m" (ADDR) 35 : "+m" (ADDR)
34 : "Ir" (nr) 36 : "Ir" (nr)
35 : "memory"); 37 : "memory");
@@ -47,7 +49,7 @@ static inline void sync_set_bit(long nr, volatile unsigned long *addr)
47 */ 49 */
48static inline void sync_clear_bit(long nr, volatile unsigned long *addr) 50static inline void sync_clear_bit(long nr, volatile unsigned long *addr)
49{ 51{
50 asm volatile("lock; btr %1,%0" 52 asm volatile("lock; " __ASM_SIZE(btr) " %1,%0"
51 : "+m" (ADDR) 53 : "+m" (ADDR)
52 : "Ir" (nr) 54 : "Ir" (nr)
53 : "memory"); 55 : "memory");
@@ -64,7 +66,7 @@ static inline void sync_clear_bit(long nr, volatile unsigned long *addr)
64 */ 66 */
65static inline void sync_change_bit(long nr, volatile unsigned long *addr) 67static inline void sync_change_bit(long nr, volatile unsigned long *addr)
66{ 68{
67 asm volatile("lock; btc %1,%0" 69 asm volatile("lock; " __ASM_SIZE(btc) " %1,%0"
68 : "+m" (ADDR) 70 : "+m" (ADDR)
69 : "Ir" (nr) 71 : "Ir" (nr)
70 : "memory"); 72 : "memory");
@@ -78,14 +80,9 @@ static inline void sync_change_bit(long nr, volatile unsigned long *addr)
78 * This operation is atomic and cannot be reordered. 80 * This operation is atomic and cannot be reordered.
79 * It also implies a memory barrier. 81 * It also implies a memory barrier.
80 */ 82 */
81static inline int sync_test_and_set_bit(long nr, volatile unsigned long *addr) 83static inline bool sync_test_and_set_bit(long nr, volatile unsigned long *addr)
82{ 84{
83 unsigned char oldbit; 85 return GEN_BINARY_RMWcc("lock; " __ASM_SIZE(bts), *addr, c, "Ir", nr);
84
85 asm volatile("lock; bts %2,%1\n\tsetc %0"
86 : "=qm" (oldbit), "+m" (ADDR)
87 : "Ir" (nr) : "memory");
88 return oldbit;
89} 86}
90 87
91/** 88/**
@@ -98,12 +95,7 @@ static inline int sync_test_and_set_bit(long nr, volatile unsigned long *addr)
98 */ 95 */
99static inline int sync_test_and_clear_bit(long nr, volatile unsigned long *addr) 96static inline int sync_test_and_clear_bit(long nr, volatile unsigned long *addr)
100{ 97{
101 unsigned char oldbit; 98 return GEN_BINARY_RMWcc("lock; " __ASM_SIZE(btr), *addr, c, "Ir", nr);
102
103 asm volatile("lock; btr %2,%1\n\tsetc %0"
104 : "=qm" (oldbit), "+m" (ADDR)
105 : "Ir" (nr) : "memory");
106 return oldbit;
107} 99}
108 100
109/** 101/**
@@ -116,12 +108,7 @@ static inline int sync_test_and_clear_bit(long nr, volatile unsigned long *addr)
116 */ 108 */
117static inline int sync_test_and_change_bit(long nr, volatile unsigned long *addr) 109static inline int sync_test_and_change_bit(long nr, volatile unsigned long *addr)
118{ 110{
119 unsigned char oldbit; 111 return GEN_BINARY_RMWcc("lock; " __ASM_SIZE(btc), *addr, c, "Ir", nr);
120
121 asm volatile("lock; btc %2,%1\n\tsetc %0"
122 : "=qm" (oldbit), "+m" (ADDR)
123 : "Ir" (nr) : "memory");
124 return oldbit;
125} 112}
126 113
127#define sync_test_bit(nr, addr) test_bit(nr, addr) 114#define sync_test_bit(nr, addr) test_bit(nr, addr)