aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-ia64/bitops.h
diff options
context:
space:
mode:
authorZoltan Menyhart <Zoltan.Menyhart@bull.net>2008-02-04 18:19:16 -0500
committerTony Luck <tony.luck@intel.com>2008-02-04 18:19:16 -0500
commit5302ac5019367470e123cb91844a28d6941e6912 (patch)
tree33b7ede4df3f5c476cec45b997471ef7a1567f9c /include/asm-ia64/bitops.h
parent97075c4b3b7fdd6a083eea075c3a4a601f0d64d8 (diff)
[IA64] Slim-down __clear_bit_unlock
- I removed the unnecessary barrier() from __clear_bit_unlock(). ia64_st4_rel_nta() makes sure all the modifications are globally seen before the bit is seen to be off. - I made __clear_bit() modeled after __set_bit() and __change_bit(). - I corrected some comments sating that a memory barrier is provided, yet in reality, it is the acquisition side of the memory barrier only. - I corrected some comments, e.g. test_and_clear_bit() was peaking about "bit to set". Signed-off-by: Zoltan Menyhart, <Zoltan.Menyhart@bull.net> Acked-by: Nick Piggin <npiggin@suse.de> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Tony Luck <tony.luck@intel.com>
Diffstat (limited to 'include/asm-ia64/bitops.h')
-rw-r--r--include/asm-ia64/bitops.h50
1 files changed, 28 insertions, 22 deletions
diff --git a/include/asm-ia64/bitops.h b/include/asm-ia64/bitops.h
index a1b9719f5fbb..953d3df9dd22 100644
--- a/include/asm-ia64/bitops.h
+++ b/include/asm-ia64/bitops.h
@@ -122,38 +122,40 @@ clear_bit_unlock (int nr, volatile void *addr)
122} 122}
123 123
124/** 124/**
125 * __clear_bit_unlock - Non-atomically clear a bit with release 125 * __clear_bit_unlock - Non-atomically clears a bit in memory with release
126 * @nr: Bit to clear
127 * @addr: Address to start counting from
126 * 128 *
127 * This is like clear_bit_unlock, but the implementation uses a store 129 * Similarly to clear_bit_unlock, the implementation uses a store
128 * with release semantics. See also __raw_spin_unlock(). 130 * with release semantics. See also __raw_spin_unlock().
129 */ 131 */
130static __inline__ void 132static __inline__ void
131__clear_bit_unlock(int nr, volatile void *addr) 133__clear_bit_unlock(int nr, void *addr)
132{ 134{
133 __u32 mask, new; 135 __u32 * const m = (__u32 *) addr + (nr >> 5);
134 volatile __u32 *m; 136 __u32 const new = *m & ~(1 << (nr & 31));
135 137
136 m = (volatile __u32 *)addr + (nr >> 5);
137 mask = ~(1 << (nr & 31));
138 new = *m & mask;
139 barrier();
140 ia64_st4_rel_nta(m, new); 138 ia64_st4_rel_nta(m, new);
141} 139}
142 140
143/** 141/**
144 * __clear_bit - Clears a bit in memory (non-atomic version) 142 * __clear_bit - Clears a bit in memory (non-atomic version)
143 * @nr: the bit to clear
144 * @addr: the address to start counting from
145 *
146 * Unlike clear_bit(), this function is non-atomic and may be reordered.
147 * If it's called on the same region of memory simultaneously, the effect
148 * may be that only one operation succeeds.
145 */ 149 */
146static __inline__ void 150static __inline__ void
147__clear_bit (int nr, volatile void *addr) 151__clear_bit (int nr, volatile void *addr)
148{ 152{
149 volatile __u32 *p = (__u32 *) addr + (nr >> 5); 153 *((__u32 *) addr + (nr >> 5)) &= ~(1 << (nr & 31));
150 __u32 m = 1 << (nr & 31);
151 *p &= ~m;
152} 154}
153 155
154/** 156/**
155 * change_bit - Toggle a bit in memory 157 * change_bit - Toggle a bit in memory
156 * @nr: Bit to clear 158 * @nr: Bit to toggle
157 * @addr: Address to start counting from 159 * @addr: Address to start counting from
158 * 160 *
159 * change_bit() is atomic and may not be reordered. 161 * change_bit() is atomic and may not be reordered.
@@ -178,7 +180,7 @@ change_bit (int nr, volatile void *addr)
178 180
179/** 181/**
180 * __change_bit - Toggle a bit in memory 182 * __change_bit - Toggle a bit in memory
181 * @nr: the bit to set 183 * @nr: the bit to toggle
182 * @addr: the address to start counting from 184 * @addr: the address to start counting from
183 * 185 *
184 * Unlike change_bit(), this function is non-atomic and may be reordered. 186 * Unlike change_bit(), this function is non-atomic and may be reordered.
@@ -197,7 +199,7 @@ __change_bit (int nr, volatile void *addr)
197 * @addr: Address to count from 199 * @addr: Address to count from
198 * 200 *
199 * This operation is atomic and cannot be reordered. 201 * This operation is atomic and cannot be reordered.
200 * It also implies a memory barrier. 202 * It also implies the acquisition side of the memory barrier.
201 */ 203 */
202static __inline__ int 204static __inline__ int
203test_and_set_bit (int nr, volatile void *addr) 205test_and_set_bit (int nr, volatile void *addr)
@@ -247,11 +249,11 @@ __test_and_set_bit (int nr, volatile void *addr)
247 249
248/** 250/**
249 * test_and_clear_bit - Clear a bit and return its old value 251 * test_and_clear_bit - Clear a bit and return its old value
250 * @nr: Bit to set 252 * @nr: Bit to clear
251 * @addr: Address to count from 253 * @addr: Address to count from
252 * 254 *
253 * This operation is atomic and cannot be reordered. 255 * This operation is atomic and cannot be reordered.
254 * It also implies a memory barrier. 256 * It also implies the acquisition side of the memory barrier.
255 */ 257 */
256static __inline__ int 258static __inline__ int
257test_and_clear_bit (int nr, volatile void *addr) 259test_and_clear_bit (int nr, volatile void *addr)
@@ -272,7 +274,7 @@ test_and_clear_bit (int nr, volatile void *addr)
272 274
273/** 275/**
274 * __test_and_clear_bit - Clear a bit and return its old value 276 * __test_and_clear_bit - Clear a bit and return its old value
275 * @nr: Bit to set 277 * @nr: Bit to clear
276 * @addr: Address to count from 278 * @addr: Address to count from
277 * 279 *
278 * This operation is non-atomic and can be reordered. 280 * This operation is non-atomic and can be reordered.
@@ -292,11 +294,11 @@ __test_and_clear_bit(int nr, volatile void * addr)
292 294
293/** 295/**
294 * test_and_change_bit - Change a bit and return its old value 296 * test_and_change_bit - Change a bit and return its old value
295 * @nr: Bit to set 297 * @nr: Bit to change
296 * @addr: Address to count from 298 * @addr: Address to count from
297 * 299 *
298 * This operation is atomic and cannot be reordered. 300 * This operation is atomic and cannot be reordered.
299 * It also implies a memory barrier. 301 * It also implies the acquisition side of the memory barrier.
300 */ 302 */
301static __inline__ int 303static __inline__ int
302test_and_change_bit (int nr, volatile void *addr) 304test_and_change_bit (int nr, volatile void *addr)
@@ -315,8 +317,12 @@ test_and_change_bit (int nr, volatile void *addr)
315 return (old & bit) != 0; 317 return (old & bit) != 0;
316} 318}
317 319
318/* 320/**
319 * WARNING: non atomic version. 321 * __test_and_change_bit - Change a bit and return its old value
322 * @nr: Bit to change
323 * @addr: Address to count from
324 *
325 * This operation is non-atomic and can be reordered.
320 */ 326 */
321static __inline__ int 327static __inline__ int
322__test_and_change_bit (int nr, void *addr) 328__test_and_change_bit (int nr, void *addr)