aboutsummaryrefslogtreecommitdiffstats
path: root/arch/mips/include/asm/bitops.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/mips/include/asm/bitops.h')
-rw-r--r--arch/mips/include/asm/bitops.h128
1 files changed, 39 insertions, 89 deletions
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h
index 82ad35ce2b45..46ac73abd5ee 100644
--- a/arch/mips/include/asm/bitops.h
+++ b/arch/mips/include/asm/bitops.h
@@ -14,7 +14,6 @@
14#endif 14#endif
15 15
16#include <linux/compiler.h> 16#include <linux/compiler.h>
17#include <linux/irqflags.h>
18#include <linux/types.h> 17#include <linux/types.h>
19#include <asm/barrier.h> 18#include <asm/barrier.h>
20#include <asm/byteorder.h> /* sigh ... */ 19#include <asm/byteorder.h> /* sigh ... */
@@ -44,6 +43,24 @@
44#define smp_mb__before_clear_bit() smp_mb__before_llsc() 43#define smp_mb__before_clear_bit() smp_mb__before_llsc()
45#define smp_mb__after_clear_bit() smp_llsc_mb() 44#define smp_mb__after_clear_bit() smp_llsc_mb()
46 45
46
47/*
48 * These are the "slower" versions of the functions and are in bitops.c.
49 * These functions call raw_local_irq_{save,restore}().
50 */
51void __mips_set_bit(unsigned long nr, volatile unsigned long *addr);
52void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr);
53void __mips_change_bit(unsigned long nr, volatile unsigned long *addr);
54int __mips_test_and_set_bit(unsigned long nr,
55 volatile unsigned long *addr);
56int __mips_test_and_set_bit_lock(unsigned long nr,
57 volatile unsigned long *addr);
58int __mips_test_and_clear_bit(unsigned long nr,
59 volatile unsigned long *addr);
60int __mips_test_and_change_bit(unsigned long nr,
61 volatile unsigned long *addr);
62
63
47/* 64/*
48 * set_bit - Atomically set a bit in memory 65 * set_bit - Atomically set a bit in memory
49 * @nr: the bit to set 66 * @nr: the bit to set
@@ -57,7 +74,7 @@
57static inline void set_bit(unsigned long nr, volatile unsigned long *addr) 74static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
58{ 75{
59 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 76 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
60 unsigned short bit = nr & SZLONG_MASK; 77 int bit = nr & SZLONG_MASK;
61 unsigned long temp; 78 unsigned long temp;
62 79
63 if (kernel_uses_llsc && R10000_LLSC_WAR) { 80 if (kernel_uses_llsc && R10000_LLSC_WAR) {
@@ -92,17 +109,8 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
92 : "=&r" (temp), "+m" (*m) 109 : "=&r" (temp), "+m" (*m)
93 : "ir" (1UL << bit)); 110 : "ir" (1UL << bit));
94 } while (unlikely(!temp)); 111 } while (unlikely(!temp));
95 } else { 112 } else
96 volatile unsigned long *a = addr; 113 __mips_set_bit(nr, addr);
97 unsigned long mask;
98 unsigned long flags;
99
100 a += nr >> SZLONG_LOG;
101 mask = 1UL << bit;
102 raw_local_irq_save(flags);
103 *a |= mask;
104 raw_local_irq_restore(flags);
105 }
106} 114}
107 115
108/* 116/*
@@ -118,7 +126,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
118static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) 126static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
119{ 127{
120 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 128 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
121 unsigned short bit = nr & SZLONG_MASK; 129 int bit = nr & SZLONG_MASK;
122 unsigned long temp; 130 unsigned long temp;
123 131
124 if (kernel_uses_llsc && R10000_LLSC_WAR) { 132 if (kernel_uses_llsc && R10000_LLSC_WAR) {
@@ -153,17 +161,8 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
153 : "=&r" (temp), "+m" (*m) 161 : "=&r" (temp), "+m" (*m)
154 : "ir" (~(1UL << bit))); 162 : "ir" (~(1UL << bit)));
155 } while (unlikely(!temp)); 163 } while (unlikely(!temp));
156 } else { 164 } else
157 volatile unsigned long *a = addr; 165 __mips_clear_bit(nr, addr);
158 unsigned long mask;
159 unsigned long flags;
160
161 a += nr >> SZLONG_LOG;
162 mask = 1UL << bit;
163 raw_local_irq_save(flags);
164 *a &= ~mask;
165 raw_local_irq_restore(flags);
166 }
167} 166}
168 167
169/* 168/*
@@ -191,7 +190,7 @@ static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *ad
191 */ 190 */
192static inline void change_bit(unsigned long nr, volatile unsigned long *addr) 191static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
193{ 192{
194 unsigned short bit = nr & SZLONG_MASK; 193 int bit = nr & SZLONG_MASK;
195 194
196 if (kernel_uses_llsc && R10000_LLSC_WAR) { 195 if (kernel_uses_llsc && R10000_LLSC_WAR) {
197 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 196 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
@@ -220,17 +219,8 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
220 : "=&r" (temp), "+m" (*m) 219 : "=&r" (temp), "+m" (*m)
221 : "ir" (1UL << bit)); 220 : "ir" (1UL << bit));
222 } while (unlikely(!temp)); 221 } while (unlikely(!temp));
223 } else { 222 } else
224 volatile unsigned long *a = addr; 223 __mips_change_bit(nr, addr);
225 unsigned long mask;
226 unsigned long flags;
227
228 a += nr >> SZLONG_LOG;
229 mask = 1UL << bit;
230 raw_local_irq_save(flags);
231 *a ^= mask;
232 raw_local_irq_restore(flags);
233 }
234} 224}
235 225
236/* 226/*
@@ -244,7 +234,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
244static inline int test_and_set_bit(unsigned long nr, 234static inline int test_and_set_bit(unsigned long nr,
245 volatile unsigned long *addr) 235 volatile unsigned long *addr)
246{ 236{
247 unsigned short bit = nr & SZLONG_MASK; 237 int bit = nr & SZLONG_MASK;
248 unsigned long res; 238 unsigned long res;
249 239
250 smp_mb__before_llsc(); 240 smp_mb__before_llsc();
@@ -281,18 +271,8 @@ static inline int test_and_set_bit(unsigned long nr,
281 } while (unlikely(!res)); 271 } while (unlikely(!res));
282 272
283 res = temp & (1UL << bit); 273 res = temp & (1UL << bit);
284 } else { 274 } else
285 volatile unsigned long *a = addr; 275 res = __mips_test_and_set_bit(nr, addr);
286 unsigned long mask;
287 unsigned long flags;
288
289 a += nr >> SZLONG_LOG;
290 mask = 1UL << bit;
291 raw_local_irq_save(flags);
292 res = (mask & *a);
293 *a |= mask;
294 raw_local_irq_restore(flags);
295 }
296 276
297 smp_llsc_mb(); 277 smp_llsc_mb();
298 278
@@ -310,7 +290,7 @@ static inline int test_and_set_bit(unsigned long nr,
310static inline int test_and_set_bit_lock(unsigned long nr, 290static inline int test_and_set_bit_lock(unsigned long nr,
311 volatile unsigned long *addr) 291 volatile unsigned long *addr)
312{ 292{
313 unsigned short bit = nr & SZLONG_MASK; 293 int bit = nr & SZLONG_MASK;
314 unsigned long res; 294 unsigned long res;
315 295
316 if (kernel_uses_llsc && R10000_LLSC_WAR) { 296 if (kernel_uses_llsc && R10000_LLSC_WAR) {
@@ -345,18 +325,8 @@ static inline int test_and_set_bit_lock(unsigned long nr,
345 } while (unlikely(!res)); 325 } while (unlikely(!res));
346 326
347 res = temp & (1UL << bit); 327 res = temp & (1UL << bit);
348 } else { 328 } else
349 volatile unsigned long *a = addr; 329 res = __mips_test_and_set_bit_lock(nr, addr);
350 unsigned long mask;
351 unsigned long flags;
352
353 a += nr >> SZLONG_LOG;
354 mask = 1UL << bit;
355 raw_local_irq_save(flags);
356 res = (mask & *a);
357 *a |= mask;
358 raw_local_irq_restore(flags);
359 }
360 330
361 smp_llsc_mb(); 331 smp_llsc_mb();
362 332
@@ -373,7 +343,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,
373static inline int test_and_clear_bit(unsigned long nr, 343static inline int test_and_clear_bit(unsigned long nr,
374 volatile unsigned long *addr) 344 volatile unsigned long *addr)
375{ 345{
376 unsigned short bit = nr & SZLONG_MASK; 346 int bit = nr & SZLONG_MASK;
377 unsigned long res; 347 unsigned long res;
378 348
379 smp_mb__before_llsc(); 349 smp_mb__before_llsc();
@@ -428,18 +398,8 @@ static inline int test_and_clear_bit(unsigned long nr,
428 } while (unlikely(!res)); 398 } while (unlikely(!res));
429 399
430 res = temp & (1UL << bit); 400 res = temp & (1UL << bit);
431 } else { 401 } else
432 volatile unsigned long *a = addr; 402 res = __mips_test_and_clear_bit(nr, addr);
433 unsigned long mask;
434 unsigned long flags;
435
436 a += nr >> SZLONG_LOG;
437 mask = 1UL << bit;
438 raw_local_irq_save(flags);
439 res = (mask & *a);
440 *a &= ~mask;
441 raw_local_irq_restore(flags);
442 }
443 403
444 smp_llsc_mb(); 404 smp_llsc_mb();
445 405
@@ -457,7 +417,7 @@ static inline int test_and_clear_bit(unsigned long nr,
457static inline int test_and_change_bit(unsigned long nr, 417static inline int test_and_change_bit(unsigned long nr,
458 volatile unsigned long *addr) 418 volatile unsigned long *addr)
459{ 419{
460 unsigned short bit = nr & SZLONG_MASK; 420 int bit = nr & SZLONG_MASK;
461 unsigned long res; 421 unsigned long res;
462 422
463 smp_mb__before_llsc(); 423 smp_mb__before_llsc();
@@ -494,18 +454,8 @@ static inline int test_and_change_bit(unsigned long nr,
494 } while (unlikely(!res)); 454 } while (unlikely(!res));
495 455
496 res = temp & (1UL << bit); 456 res = temp & (1UL << bit);
497 } else { 457 } else
498 volatile unsigned long *a = addr; 458 res = __mips_test_and_change_bit(nr, addr);
499 unsigned long mask;
500 unsigned long flags;
501
502 a += nr >> SZLONG_LOG;
503 mask = 1UL << bit;
504 raw_local_irq_save(flags);
505 res = (mask & *a);
506 *a ^= mask;
507 raw_local_irq_restore(flags);
508 }
509 459
510 smp_llsc_mb(); 460 smp_llsc_mb();
511 461