aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/include/asm/sync_bitops.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/x86/include/asm/sync_bitops.h')
-rw-r--r--arch/x86/include/asm/sync_bitops.h24
1 files changed, 12 insertions, 12 deletions
diff --git a/arch/x86/include/asm/sync_bitops.h b/arch/x86/include/asm/sync_bitops.h
index 9d09b4073b60..05af3b31d522 100644
--- a/arch/x86/include/asm/sync_bitops.h
+++ b/arch/x86/include/asm/sync_bitops.h
@@ -26,9 +26,9 @@
26 * Note that @nr may be almost arbitrarily large; this function is not 26 * Note that @nr may be almost arbitrarily large; this function is not
27 * restricted to acting on a single-word quantity. 27 * restricted to acting on a single-word quantity.
28 */ 28 */
29static inline void sync_set_bit(int nr, volatile unsigned long *addr) 29static inline void sync_set_bit(long nr, volatile unsigned long *addr)
30{ 30{
31 asm volatile("lock; btsl %1,%0" 31 asm volatile("lock; bts %1,%0"
32 : "+m" (ADDR) 32 : "+m" (ADDR)
33 : "Ir" (nr) 33 : "Ir" (nr)
34 : "memory"); 34 : "memory");
@@ -44,9 +44,9 @@ static inline void sync_set_bit(int nr, volatile unsigned long *addr)
44 * you should call smp_mb__before_clear_bit() and/or smp_mb__after_clear_bit() 44 * you should call smp_mb__before_clear_bit() and/or smp_mb__after_clear_bit()
45 * in order to ensure changes are visible on other processors. 45 * in order to ensure changes are visible on other processors.
46 */ 46 */
47static inline void sync_clear_bit(int nr, volatile unsigned long *addr) 47static inline void sync_clear_bit(long nr, volatile unsigned long *addr)
48{ 48{
49 asm volatile("lock; btrl %1,%0" 49 asm volatile("lock; btr %1,%0"
50 : "+m" (ADDR) 50 : "+m" (ADDR)
51 : "Ir" (nr) 51 : "Ir" (nr)
52 : "memory"); 52 : "memory");
@@ -61,9 +61,9 @@ static inline void sync_clear_bit(int nr, volatile unsigned long *addr)
61 * Note that @nr may be almost arbitrarily large; this function is not 61 * Note that @nr may be almost arbitrarily large; this function is not
62 * restricted to acting on a single-word quantity. 62 * restricted to acting on a single-word quantity.
63 */ 63 */
64static inline void sync_change_bit(int nr, volatile unsigned long *addr) 64static inline void sync_change_bit(long nr, volatile unsigned long *addr)
65{ 65{
66 asm volatile("lock; btcl %1,%0" 66 asm volatile("lock; btc %1,%0"
67 : "+m" (ADDR) 67 : "+m" (ADDR)
68 : "Ir" (nr) 68 : "Ir" (nr)
69 : "memory"); 69 : "memory");
@@ -77,11 +77,11 @@ static inline void sync_change_bit(int nr, volatile unsigned long *addr)
77 * This operation is atomic and cannot be reordered. 77 * This operation is atomic and cannot be reordered.
78 * It also implies a memory barrier. 78 * It also implies a memory barrier.
79 */ 79 */
80static inline int sync_test_and_set_bit(int nr, volatile unsigned long *addr) 80static inline int sync_test_and_set_bit(long nr, volatile unsigned long *addr)
81{ 81{
82 int oldbit; 82 int oldbit;
83 83
84 asm volatile("lock; btsl %2,%1\n\tsbbl %0,%0" 84 asm volatile("lock; bts %2,%1\n\tsbbl %0,%0"
85 : "=r" (oldbit), "+m" (ADDR) 85 : "=r" (oldbit), "+m" (ADDR)
86 : "Ir" (nr) : "memory"); 86 : "Ir" (nr) : "memory");
87 return oldbit; 87 return oldbit;
@@ -95,11 +95,11 @@ static inline int sync_test_and_set_bit(int nr, volatile unsigned long *addr)
95 * This operation is atomic and cannot be reordered. 95 * This operation is atomic and cannot be reordered.
96 * It also implies a memory barrier. 96 * It also implies a memory barrier.
97 */ 97 */
98static inline int sync_test_and_clear_bit(int nr, volatile unsigned long *addr) 98static inline int sync_test_and_clear_bit(long nr, volatile unsigned long *addr)
99{ 99{
100 int oldbit; 100 int oldbit;
101 101
102 asm volatile("lock; btrl %2,%1\n\tsbbl %0,%0" 102 asm volatile("lock; btr %2,%1\n\tsbbl %0,%0"
103 : "=r" (oldbit), "+m" (ADDR) 103 : "=r" (oldbit), "+m" (ADDR)
104 : "Ir" (nr) : "memory"); 104 : "Ir" (nr) : "memory");
105 return oldbit; 105 return oldbit;
@@ -113,11 +113,11 @@ static inline int sync_test_and_clear_bit(int nr, volatile unsigned long *addr)
113 * This operation is atomic and cannot be reordered. 113 * This operation is atomic and cannot be reordered.
114 * It also implies a memory barrier. 114 * It also implies a memory barrier.
115 */ 115 */
116static inline int sync_test_and_change_bit(int nr, volatile unsigned long *addr) 116static inline int sync_test_and_change_bit(long nr, volatile unsigned long *addr)
117{ 117{
118 int oldbit; 118 int oldbit;
119 119
120 asm volatile("lock; btcl %2,%1\n\tsbbl %0,%0" 120 asm volatile("lock; btc %2,%1\n\tsbbl %0,%0"
121 : "=r" (oldbit), "+m" (ADDR) 121 : "=r" (oldbit), "+m" (ADDR)
122 : "Ir" (nr) : "memory"); 122 : "Ir" (nr) : "memory");
123 return oldbit; 123 return oldbit;