diff options
Diffstat (limited to 'include/asm-x86/sync_bitops.h')
-rw-r--r-- | include/asm-x86/sync_bitops.h | 77 |
1 files changed, 29 insertions, 48 deletions
diff --git a/include/asm-x86/sync_bitops.h b/include/asm-x86/sync_bitops.h index 6b775c905666..b47a1d0b8a83 100644 --- a/include/asm-x86/sync_bitops.h +++ b/include/asm-x86/sync_bitops.h | |||
@@ -13,7 +13,7 @@ | |||
13 | * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1). | 13 | * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1). |
14 | */ | 14 | */ |
15 | 15 | ||
16 | #define ADDR (*(volatile long *) addr) | 16 | #define ADDR (*(volatile long *)addr) |
17 | 17 | ||
18 | /** | 18 | /** |
19 | * sync_set_bit - Atomically set a bit in memory | 19 | * sync_set_bit - Atomically set a bit in memory |
@@ -26,12 +26,12 @@ | |||
26 | * Note that @nr may be almost arbitrarily large; this function is not | 26 | * Note that @nr may be almost arbitrarily large; this function is not |
27 | * restricted to acting on a single-word quantity. | 27 | * restricted to acting on a single-word quantity. |
28 | */ | 28 | */ |
29 | static inline void sync_set_bit(int nr, volatile unsigned long * addr) | 29 | static inline void sync_set_bit(int nr, volatile unsigned long *addr) |
30 | { | 30 | { |
31 | __asm__ __volatile__("lock; btsl %1,%0" | 31 | asm volatile("lock; btsl %1,%0" |
32 | :"+m" (ADDR) | 32 | : "+m" (ADDR) |
33 | :"Ir" (nr) | 33 | : "Ir" (nr) |
34 | : "memory"); | 34 | : "memory"); |
35 | } | 35 | } |
36 | 36 | ||
37 | /** | 37 | /** |
@@ -44,12 +44,12 @@ static inline void sync_set_bit(int nr, volatile unsigned long * addr) | |||
44 | * you should call smp_mb__before_clear_bit() and/or smp_mb__after_clear_bit() | 44 | * you should call smp_mb__before_clear_bit() and/or smp_mb__after_clear_bit() |
45 | * in order to ensure changes are visible on other processors. | 45 | * in order to ensure changes are visible on other processors. |
46 | */ | 46 | */ |
47 | static inline void sync_clear_bit(int nr, volatile unsigned long * addr) | 47 | static inline void sync_clear_bit(int nr, volatile unsigned long *addr) |
48 | { | 48 | { |
49 | __asm__ __volatile__("lock; btrl %1,%0" | 49 | asm volatile("lock; btrl %1,%0" |
50 | :"+m" (ADDR) | 50 | : "+m" (ADDR) |
51 | :"Ir" (nr) | 51 | : "Ir" (nr) |
52 | : "memory"); | 52 | : "memory"); |
53 | } | 53 | } |
54 | 54 | ||
55 | /** | 55 | /** |
@@ -61,12 +61,12 @@ static inline void sync_clear_bit(int nr, volatile unsigned long * addr) | |||
61 | * Note that @nr may be almost arbitrarily large; this function is not | 61 | * Note that @nr may be almost arbitrarily large; this function is not |
62 | * restricted to acting on a single-word quantity. | 62 | * restricted to acting on a single-word quantity. |
63 | */ | 63 | */ |
64 | static inline void sync_change_bit(int nr, volatile unsigned long * addr) | 64 | static inline void sync_change_bit(int nr, volatile unsigned long *addr) |
65 | { | 65 | { |
66 | __asm__ __volatile__("lock; btcl %1,%0" | 66 | asm volatile("lock; btcl %1,%0" |
67 | :"+m" (ADDR) | 67 | : "+m" (ADDR) |
68 | :"Ir" (nr) | 68 | : "Ir" (nr) |
69 | : "memory"); | 69 | : "memory"); |
70 | } | 70 | } |
71 | 71 | ||
72 | /** | 72 | /** |
@@ -77,13 +77,13 @@ static inline void sync_change_bit(int nr, volatile unsigned long * addr) | |||
77 | * This operation is atomic and cannot be reordered. | 77 | * This operation is atomic and cannot be reordered. |
78 | * It also implies a memory barrier. | 78 | * It also implies a memory barrier. |
79 | */ | 79 | */ |
80 | static inline int sync_test_and_set_bit(int nr, volatile unsigned long * addr) | 80 | static inline int sync_test_and_set_bit(int nr, volatile unsigned long *addr) |
81 | { | 81 | { |
82 | int oldbit; | 82 | int oldbit; |
83 | 83 | ||
84 | __asm__ __volatile__("lock; btsl %2,%1\n\tsbbl %0,%0" | 84 | asm volatile("lock; btsl %2,%1\n\tsbbl %0,%0" |
85 | :"=r" (oldbit),"+m" (ADDR) | 85 | : "=r" (oldbit), "+m" (ADDR) |
86 | :"Ir" (nr) : "memory"); | 86 | : "Ir" (nr) : "memory"); |
87 | return oldbit; | 87 | return oldbit; |
88 | } | 88 | } |
89 | 89 | ||
@@ -95,13 +95,13 @@ static inline int sync_test_and_set_bit(int nr, volatile unsigned long * addr) | |||
95 | * This operation is atomic and cannot be reordered. | 95 | * This operation is atomic and cannot be reordered. |
96 | * It also implies a memory barrier. | 96 | * It also implies a memory barrier. |
97 | */ | 97 | */ |
98 | static inline int sync_test_and_clear_bit(int nr, volatile unsigned long * addr) | 98 | static inline int sync_test_and_clear_bit(int nr, volatile unsigned long *addr) |
99 | { | 99 | { |
100 | int oldbit; | 100 | int oldbit; |
101 | 101 | ||
102 | __asm__ __volatile__("lock; btrl %2,%1\n\tsbbl %0,%0" | 102 | asm volatile("lock; btrl %2,%1\n\tsbbl %0,%0" |
103 | :"=r" (oldbit),"+m" (ADDR) | 103 | : "=r" (oldbit), "+m" (ADDR) |
104 | :"Ir" (nr) : "memory"); | 104 | : "Ir" (nr) : "memory"); |
105 | return oldbit; | 105 | return oldbit; |
106 | } | 106 | } |
107 | 107 | ||
@@ -113,36 +113,17 @@ static inline int sync_test_and_clear_bit(int nr, volatile unsigned long * addr) | |||
113 | * This operation is atomic and cannot be reordered. | 113 | * This operation is atomic and cannot be reordered. |
114 | * It also implies a memory barrier. | 114 | * It also implies a memory barrier. |
115 | */ | 115 | */ |
116 | static inline int sync_test_and_change_bit(int nr, volatile unsigned long* addr) | 116 | static inline int sync_test_and_change_bit(int nr, volatile unsigned long *addr) |
117 | { | 117 | { |
118 | int oldbit; | 118 | int oldbit; |
119 | 119 | ||
120 | __asm__ __volatile__("lock; btcl %2,%1\n\tsbbl %0,%0" | 120 | asm volatile("lock; btcl %2,%1\n\tsbbl %0,%0" |
121 | :"=r" (oldbit),"+m" (ADDR) | 121 | : "=r" (oldbit), "+m" (ADDR) |
122 | :"Ir" (nr) : "memory"); | 122 | : "Ir" (nr) : "memory"); |
123 | return oldbit; | 123 | return oldbit; |
124 | } | 124 | } |
125 | 125 | ||
126 | static __always_inline int sync_constant_test_bit(int nr, const volatile unsigned long *addr) | 126 | #define sync_test_bit(nr, addr) test_bit(nr, addr) |
127 | { | ||
128 | return ((1UL << (nr & 31)) & | ||
129 | (((const volatile unsigned int *)addr)[nr >> 5])) != 0; | ||
130 | } | ||
131 | |||
132 | static inline int sync_var_test_bit(int nr, const volatile unsigned long * addr) | ||
133 | { | ||
134 | int oldbit; | ||
135 | |||
136 | __asm__ __volatile__("btl %2,%1\n\tsbbl %0,%0" | ||
137 | :"=r" (oldbit) | ||
138 | :"m" (ADDR),"Ir" (nr)); | ||
139 | return oldbit; | ||
140 | } | ||
141 | |||
142 | #define sync_test_bit(nr,addr) \ | ||
143 | (__builtin_constant_p(nr) ? \ | ||
144 | sync_constant_test_bit((nr),(addr)) : \ | ||
145 | sync_var_test_bit((nr),(addr))) | ||
146 | 127 | ||
147 | #undef ADDR | 128 | #undef ADDR |
148 | 129 | ||