diff options
Diffstat (limited to 'include/asm-mips/bitops.h')
-rw-r--r-- | include/asm-mips/bitops.h | 106 |
1 files changed, 106 insertions, 0 deletions
diff --git a/include/asm-mips/bitops.h b/include/asm-mips/bitops.h index 899357a72ac4..ec75ce4cdb8c 100644 --- a/include/asm-mips/bitops.h +++ b/include/asm-mips/bitops.h | |||
@@ -9,6 +9,10 @@ | |||
9 | #ifndef _ASM_BITOPS_H | 9 | #ifndef _ASM_BITOPS_H |
10 | #define _ASM_BITOPS_H | 10 | #define _ASM_BITOPS_H |
11 | 11 | ||
12 | #ifndef _LINUX_BITOPS_H | ||
13 | #error only <linux/bitops.h> can be included directly | ||
14 | #endif | ||
15 | |||
12 | #include <linux/compiler.h> | 16 | #include <linux/compiler.h> |
13 | #include <linux/irqflags.h> | 17 | #include <linux/irqflags.h> |
14 | #include <linux/types.h> | 18 | #include <linux/types.h> |
@@ -172,6 +176,20 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) | |||
172 | } | 176 | } |
173 | 177 | ||
174 | /* | 178 | /* |
179 | * clear_bit_unlock - Clears a bit in memory | ||
180 | * @nr: Bit to clear | ||
181 | * @addr: Address to start counting from | ||
182 | * | ||
183 | * clear_bit() is atomic and implies release semantics before the memory | ||
184 | * operation. It can be used for an unlock. | ||
185 | */ | ||
186 | static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr) | ||
187 | { | ||
188 | smp_mb__before_clear_bit(); | ||
189 | clear_bit(nr, addr); | ||
190 | } | ||
191 | |||
192 | /* | ||
175 | * change_bit - Toggle a bit in memory | 193 | * change_bit - Toggle a bit in memory |
176 | * @nr: Bit to change | 194 | * @nr: Bit to change |
177 | * @addr: Address to start counting from | 195 | * @addr: Address to start counting from |
@@ -240,6 +258,8 @@ static inline int test_and_set_bit(unsigned long nr, | |||
240 | unsigned short bit = nr & SZLONG_MASK; | 258 | unsigned short bit = nr & SZLONG_MASK; |
241 | unsigned long res; | 259 | unsigned long res; |
242 | 260 | ||
261 | smp_llsc_mb(); | ||
262 | |||
243 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 263 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
244 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 264 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
245 | unsigned long temp; | 265 | unsigned long temp; |
@@ -295,6 +315,73 @@ static inline int test_and_set_bit(unsigned long nr, | |||
295 | } | 315 | } |
296 | 316 | ||
297 | /* | 317 | /* |
318 | * test_and_set_bit_lock - Set a bit and return its old value | ||
319 | * @nr: Bit to set | ||
320 | * @addr: Address to count from | ||
321 | * | ||
322 | * This operation is atomic and implies acquire ordering semantics | ||
323 | * after the memory operation. | ||
324 | */ | ||
325 | static inline int test_and_set_bit_lock(unsigned long nr, | ||
326 | volatile unsigned long *addr) | ||
327 | { | ||
328 | unsigned short bit = nr & SZLONG_MASK; | ||
329 | unsigned long res; | ||
330 | |||
331 | if (cpu_has_llsc && R10000_LLSC_WAR) { | ||
332 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | ||
333 | unsigned long temp; | ||
334 | |||
335 | __asm__ __volatile__( | ||
336 | " .set mips3 \n" | ||
337 | "1: " __LL "%0, %1 # test_and_set_bit \n" | ||
338 | " or %2, %0, %3 \n" | ||
339 | " " __SC "%2, %1 \n" | ||
340 | " beqzl %2, 1b \n" | ||
341 | " and %2, %0, %3 \n" | ||
342 | " .set mips0 \n" | ||
343 | : "=&r" (temp), "=m" (*m), "=&r" (res) | ||
344 | : "r" (1UL << bit), "m" (*m) | ||
345 | : "memory"); | ||
346 | } else if (cpu_has_llsc) { | ||
347 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | ||
348 | unsigned long temp; | ||
349 | |||
350 | __asm__ __volatile__( | ||
351 | " .set push \n" | ||
352 | " .set noreorder \n" | ||
353 | " .set mips3 \n" | ||
354 | "1: " __LL "%0, %1 # test_and_set_bit \n" | ||
355 | " or %2, %0, %3 \n" | ||
356 | " " __SC "%2, %1 \n" | ||
357 | " beqz %2, 2f \n" | ||
358 | " and %2, %0, %3 \n" | ||
359 | " .subsection 2 \n" | ||
360 | "2: b 1b \n" | ||
361 | " nop \n" | ||
362 | " .previous \n" | ||
363 | " .set pop \n" | ||
364 | : "=&r" (temp), "=m" (*m), "=&r" (res) | ||
365 | : "r" (1UL << bit), "m" (*m) | ||
366 | : "memory"); | ||
367 | } else { | ||
368 | volatile unsigned long *a = addr; | ||
369 | unsigned long mask; | ||
370 | unsigned long flags; | ||
371 | |||
372 | a += nr >> SZLONG_LOG; | ||
373 | mask = 1UL << bit; | ||
374 | raw_local_irq_save(flags); | ||
375 | res = (mask & *a); | ||
376 | *a |= mask; | ||
377 | raw_local_irq_restore(flags); | ||
378 | } | ||
379 | |||
380 | smp_llsc_mb(); | ||
381 | |||
382 | return res != 0; | ||
383 | } | ||
384 | /* | ||
298 | * test_and_clear_bit - Clear a bit and return its old value | 385 | * test_and_clear_bit - Clear a bit and return its old value |
299 | * @nr: Bit to clear | 386 | * @nr: Bit to clear |
300 | * @addr: Address to count from | 387 | * @addr: Address to count from |
@@ -308,6 +395,8 @@ static inline int test_and_clear_bit(unsigned long nr, | |||
308 | unsigned short bit = nr & SZLONG_MASK; | 395 | unsigned short bit = nr & SZLONG_MASK; |
309 | unsigned long res; | 396 | unsigned long res; |
310 | 397 | ||
398 | smp_llsc_mb(); | ||
399 | |||
311 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 400 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
312 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 401 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
313 | unsigned long temp; | 402 | unsigned long temp; |
@@ -396,6 +485,8 @@ static inline int test_and_change_bit(unsigned long nr, | |||
396 | unsigned short bit = nr & SZLONG_MASK; | 485 | unsigned short bit = nr & SZLONG_MASK; |
397 | unsigned long res; | 486 | unsigned long res; |
398 | 487 | ||
488 | smp_llsc_mb(); | ||
489 | |||
399 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 490 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
400 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 491 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
401 | unsigned long temp; | 492 | unsigned long temp; |
@@ -453,6 +544,21 @@ static inline int test_and_change_bit(unsigned long nr, | |||
453 | #include <asm-generic/bitops/non-atomic.h> | 544 | #include <asm-generic/bitops/non-atomic.h> |
454 | 545 | ||
455 | /* | 546 | /* |
547 | * __clear_bit_unlock - Clears a bit in memory | ||
548 | * @nr: Bit to clear | ||
549 | * @addr: Address to start counting from | ||
550 | * | ||
551 | * __clear_bit() is non-atomic and implies release semantics before the memory | ||
552 | * operation. It can be used for an unlock if no other CPUs can concurrently | ||
553 | * modify other bits in the word. | ||
554 | */ | ||
555 | static inline void __clear_bit_unlock(unsigned long nr, volatile unsigned long *addr) | ||
556 | { | ||
557 | smp_mb(); | ||
558 | __clear_bit(nr, addr); | ||
559 | } | ||
560 | |||
561 | /* | ||
456 | * Return the bit position (0..63) of the most significant 1 bit in a word | 562 | * Return the bit position (0..63) of the most significant 1 bit in a word |
457 | * Returns -1 if no 1 bit exists | 563 | * Returns -1 if no 1 bit exists |
458 | */ | 564 | */ |