aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-mips
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-mips')
-rw-r--r--include/asm-mips/bitops.h102
1 files changed, 102 insertions, 0 deletions
diff --git a/include/asm-mips/bitops.h b/include/asm-mips/bitops.h
index 899357a72ac4..77ed0c79830b 100644
--- a/include/asm-mips/bitops.h
+++ b/include/asm-mips/bitops.h
@@ -172,6 +172,20 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
172} 172}
173 173
174/* 174/*
175 * clear_bit_unlock - Clears a bit in memory
176 * @nr: Bit to clear
177 * @addr: Address to start counting from
178 *
179 * clear_bit() is atomic and implies release semantics before the memory
180 * operation. It can be used for an unlock.
181 */
182static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
183{
184 smp_mb__before_clear_bit();
185 clear_bit(nr, addr);
186}
187
188/*
175 * change_bit - Toggle a bit in memory 189 * change_bit - Toggle a bit in memory
176 * @nr: Bit to change 190 * @nr: Bit to change
177 * @addr: Address to start counting from 191 * @addr: Address to start counting from
@@ -240,6 +254,8 @@ static inline int test_and_set_bit(unsigned long nr,
240 unsigned short bit = nr & SZLONG_MASK; 254 unsigned short bit = nr & SZLONG_MASK;
241 unsigned long res; 255 unsigned long res;
242 256
257 smp_llsc_mb();
258
243 if (cpu_has_llsc && R10000_LLSC_WAR) { 259 if (cpu_has_llsc && R10000_LLSC_WAR) {
244 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 260 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
245 unsigned long temp; 261 unsigned long temp;
@@ -295,6 +311,73 @@ static inline int test_and_set_bit(unsigned long nr,
295} 311}
296 312
297/* 313/*
314 * test_and_set_bit_lock - Set a bit and return its old value
315 * @nr: Bit to set
316 * @addr: Address to count from
317 *
318 * This operation is atomic and implies acquire ordering semantics
319 * after the memory operation.
320 */
321static inline int test_and_set_bit_lock(unsigned long nr,
322 volatile unsigned long *addr)
323{
324 unsigned short bit = nr & SZLONG_MASK;
325 unsigned long res;
326
327 if (cpu_has_llsc && R10000_LLSC_WAR) {
328 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
329 unsigned long temp;
330
331 __asm__ __volatile__(
332 " .set mips3 \n"
333 "1: " __LL "%0, %1 # test_and_set_bit \n"
334 " or %2, %0, %3 \n"
335 " " __SC "%2, %1 \n"
336 " beqzl %2, 1b \n"
337 " and %2, %0, %3 \n"
338 " .set mips0 \n"
339 : "=&r" (temp), "=m" (*m), "=&r" (res)
340 : "r" (1UL << bit), "m" (*m)
341 : "memory");
342 } else if (cpu_has_llsc) {
343 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
344 unsigned long temp;
345
346 __asm__ __volatile__(
347 " .set push \n"
348 " .set noreorder \n"
349 " .set mips3 \n"
350 "1: " __LL "%0, %1 # test_and_set_bit \n"
351 " or %2, %0, %3 \n"
352 " " __SC "%2, %1 \n"
353 " beqz %2, 2f \n"
354 " and %2, %0, %3 \n"
355 " .subsection 2 \n"
356 "2: b 1b \n"
357 " nop \n"
358 " .previous \n"
359 " .set pop \n"
360 : "=&r" (temp), "=m" (*m), "=&r" (res)
361 : "r" (1UL << bit), "m" (*m)
362 : "memory");
363 } else {
364 volatile unsigned long *a = addr;
365 unsigned long mask;
366 unsigned long flags;
367
368 a += nr >> SZLONG_LOG;
369 mask = 1UL << bit;
370 raw_local_irq_save(flags);
371 res = (mask & *a);
372 *a |= mask;
373 raw_local_irq_restore(flags);
374 }
375
376 smp_llsc_mb();
377
378 return res != 0;
379}
380/*
298 * test_and_clear_bit - Clear a bit and return its old value 381 * test_and_clear_bit - Clear a bit and return its old value
299 * @nr: Bit to clear 382 * @nr: Bit to clear
300 * @addr: Address to count from 383 * @addr: Address to count from
@@ -308,6 +391,8 @@ static inline int test_and_clear_bit(unsigned long nr,
308 unsigned short bit = nr & SZLONG_MASK; 391 unsigned short bit = nr & SZLONG_MASK;
309 unsigned long res; 392 unsigned long res;
310 393
394 smp_llsc_mb();
395
311 if (cpu_has_llsc && R10000_LLSC_WAR) { 396 if (cpu_has_llsc && R10000_LLSC_WAR) {
312 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 397 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
313 unsigned long temp; 398 unsigned long temp;
@@ -396,6 +481,8 @@ static inline int test_and_change_bit(unsigned long nr,
396 unsigned short bit = nr & SZLONG_MASK; 481 unsigned short bit = nr & SZLONG_MASK;
397 unsigned long res; 482 unsigned long res;
398 483
484 smp_llsc_mb();
485
399 if (cpu_has_llsc && R10000_LLSC_WAR) { 486 if (cpu_has_llsc && R10000_LLSC_WAR) {
400 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 487 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
401 unsigned long temp; 488 unsigned long temp;
@@ -453,6 +540,21 @@ static inline int test_and_change_bit(unsigned long nr,
453#include <asm-generic/bitops/non-atomic.h> 540#include <asm-generic/bitops/non-atomic.h>
454 541
455/* 542/*
543 * __clear_bit_unlock - Clears a bit in memory
544 * @nr: Bit to clear
545 * @addr: Address to start counting from
546 *
547 * __clear_bit() is non-atomic and implies release semantics before the memory
548 * operation. It can be used for an unlock if no other CPUs can concurrently
549 * modify other bits in the word.
550 */
551static inline void __clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
552{
553 smp_mb();
554 __clear_bit(nr, addr);
555}
556
557/*
456 * Return the bit position (0..63) of the most significant 1 bit in a word 558 * Return the bit position (0..63) of the most significant 1 bit in a word
457 * Returns -1 if no 1 bit exists 559 * Returns -1 if no 1 bit exists
458 */ 560 */