aboutsummaryrefslogtreecommitdiffstats
path: root/arch/mips/include/asm/bitops.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/mips/include/asm/bitops.h')
-rw-r--r--arch/mips/include/asm/bitops.h270
1 files changed, 121 insertions, 149 deletions
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h
index b0ce7ca2851f..50b4ef288c53 100644
--- a/arch/mips/include/asm/bitops.h
+++ b/arch/mips/include/asm/bitops.h
@@ -73,30 +73,26 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
73 : "ir" (1UL << bit), "m" (*m)); 73 : "ir" (1UL << bit), "m" (*m));
74#ifdef CONFIG_CPU_MIPSR2 74#ifdef CONFIG_CPU_MIPSR2
75 } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { 75 } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
76 __asm__ __volatile__( 76 do {
77 "1: " __LL "%0, %1 # set_bit \n" 77 __asm__ __volatile__(
78 " " __INS "%0, %4, %2, 1 \n" 78 " " __LL "%0, %1 # set_bit \n"
79 " " __SC "%0, %1 \n" 79 " " __INS "%0, %3, %2, 1 \n"
80 " beqz %0, 2f \n" 80 " " __SC "%0, %1 \n"
81 " .subsection 2 \n" 81 : "=&r" (temp), "+m" (*m)
82 "2: b 1b \n" 82 : "ir" (bit), "r" (~0));
83 " .previous \n" 83 } while (unlikely(!temp));
84 : "=&r" (temp), "=m" (*m)
85 : "ir" (bit), "m" (*m), "r" (~0));
86#endif /* CONFIG_CPU_MIPSR2 */ 84#endif /* CONFIG_CPU_MIPSR2 */
87 } else if (kernel_uses_llsc) { 85 } else if (kernel_uses_llsc) {
88 __asm__ __volatile__( 86 do {
89 " .set mips3 \n" 87 __asm__ __volatile__(
90 "1: " __LL "%0, %1 # set_bit \n" 88 " .set mips3 \n"
91 " or %0, %2 \n" 89 " " __LL "%0, %1 # set_bit \n"
92 " " __SC "%0, %1 \n" 90 " or %0, %2 \n"
93 " beqz %0, 2f \n" 91 " " __SC "%0, %1 \n"
94 " .subsection 2 \n" 92 " .set mips0 \n"
95 "2: b 1b \n" 93 : "=&r" (temp), "+m" (*m)
96 " .previous \n" 94 : "ir" (1UL << bit));
97 " .set mips0 \n" 95 } while (unlikely(!temp));
98 : "=&r" (temp), "=m" (*m)
99 : "ir" (1UL << bit), "m" (*m));
100 } else { 96 } else {
101 volatile unsigned long *a = addr; 97 volatile unsigned long *a = addr;
102 unsigned long mask; 98 unsigned long mask;
@@ -134,34 +130,30 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
134 " " __SC "%0, %1 \n" 130 " " __SC "%0, %1 \n"
135 " beqzl %0, 1b \n" 131 " beqzl %0, 1b \n"
136 " .set mips0 \n" 132 " .set mips0 \n"
137 : "=&r" (temp), "=m" (*m) 133 : "=&r" (temp), "+m" (*m)
138 : "ir" (~(1UL << bit)), "m" (*m)); 134 : "ir" (~(1UL << bit)));
139#ifdef CONFIG_CPU_MIPSR2 135#ifdef CONFIG_CPU_MIPSR2
140 } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { 136 } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
141 __asm__ __volatile__( 137 do {
142 "1: " __LL "%0, %1 # clear_bit \n" 138 __asm__ __volatile__(
143 " " __INS "%0, $0, %2, 1 \n" 139 " " __LL "%0, %1 # clear_bit \n"
144 " " __SC "%0, %1 \n" 140 " " __INS "%0, $0, %2, 1 \n"
145 " beqz %0, 2f \n" 141 " " __SC "%0, %1 \n"
146 " .subsection 2 \n" 142 : "=&r" (temp), "+m" (*m)
147 "2: b 1b \n" 143 : "ir" (bit));
148 " .previous \n" 144 } while (unlikely(!temp));
149 : "=&r" (temp), "=m" (*m)
150 : "ir" (bit), "m" (*m));
151#endif /* CONFIG_CPU_MIPSR2 */ 145#endif /* CONFIG_CPU_MIPSR2 */
152 } else if (kernel_uses_llsc) { 146 } else if (kernel_uses_llsc) {
153 __asm__ __volatile__( 147 do {
154 " .set mips3 \n" 148 __asm__ __volatile__(
155 "1: " __LL "%0, %1 # clear_bit \n" 149 " .set mips3 \n"
156 " and %0, %2 \n" 150 " " __LL "%0, %1 # clear_bit \n"
157 " " __SC "%0, %1 \n" 151 " and %0, %2 \n"
158 " beqz %0, 2f \n" 152 " " __SC "%0, %1 \n"
159 " .subsection 2 \n" 153 " .set mips0 \n"
160 "2: b 1b \n" 154 : "=&r" (temp), "+m" (*m)
161 " .previous \n" 155 : "ir" (~(1UL << bit)));
162 " .set mips0 \n" 156 } while (unlikely(!temp));
163 : "=&r" (temp), "=m" (*m)
164 : "ir" (~(1UL << bit)), "m" (*m));
165 } else { 157 } else {
166 volatile unsigned long *a = addr; 158 volatile unsigned long *a = addr;
167 unsigned long mask; 159 unsigned long mask;
@@ -213,24 +205,22 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
213 " " __SC "%0, %1 \n" 205 " " __SC "%0, %1 \n"
214 " beqzl %0, 1b \n" 206 " beqzl %0, 1b \n"
215 " .set mips0 \n" 207 " .set mips0 \n"
216 : "=&r" (temp), "=m" (*m) 208 : "=&r" (temp), "+m" (*m)
217 : "ir" (1UL << bit), "m" (*m)); 209 : "ir" (1UL << bit));
218 } else if (kernel_uses_llsc) { 210 } else if (kernel_uses_llsc) {
219 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 211 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
220 unsigned long temp; 212 unsigned long temp;
221 213
222 __asm__ __volatile__( 214 do {
223 " .set mips3 \n" 215 __asm__ __volatile__(
224 "1: " __LL "%0, %1 # change_bit \n" 216 " .set mips3 \n"
225 " xor %0, %2 \n" 217 " " __LL "%0, %1 # change_bit \n"
226 " " __SC "%0, %1 \n" 218 " xor %0, %2 \n"
227 " beqz %0, 2f \n" 219 " " __SC "%0, %1 \n"
228 " .subsection 2 \n" 220 " .set mips0 \n"
229 "2: b 1b \n" 221 : "=&r" (temp), "+m" (*m)
230 " .previous \n" 222 : "ir" (1UL << bit));
231 " .set mips0 \n" 223 } while (unlikely(!temp));
232 : "=&r" (temp), "=m" (*m)
233 : "ir" (1UL << bit), "m" (*m));
234 } else { 224 } else {
235 volatile unsigned long *a = addr; 225 volatile unsigned long *a = addr;
236 unsigned long mask; 226 unsigned long mask;
@@ -272,30 +262,26 @@ static inline int test_and_set_bit(unsigned long nr,
272 " beqzl %2, 1b \n" 262 " beqzl %2, 1b \n"
273 " and %2, %0, %3 \n" 263 " and %2, %0, %3 \n"
274 " .set mips0 \n" 264 " .set mips0 \n"
275 : "=&r" (temp), "=m" (*m), "=&r" (res) 265 : "=&r" (temp), "+m" (*m), "=&r" (res)
276 : "r" (1UL << bit), "m" (*m) 266 : "r" (1UL << bit)
277 : "memory"); 267 : "memory");
278 } else if (kernel_uses_llsc) { 268 } else if (kernel_uses_llsc) {
279 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 269 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
280 unsigned long temp; 270 unsigned long temp;
281 271
282 __asm__ __volatile__( 272 do {
283 " .set push \n" 273 __asm__ __volatile__(
284 " .set noreorder \n" 274 " .set mips3 \n"
285 " .set mips3 \n" 275 " " __LL "%0, %1 # test_and_set_bit \n"
286 "1: " __LL "%0, %1 # test_and_set_bit \n" 276 " or %2, %0, %3 \n"
287 " or %2, %0, %3 \n" 277 " " __SC "%2, %1 \n"
288 " " __SC "%2, %1 \n" 278 " .set mips0 \n"
289 " beqz %2, 2f \n" 279 : "=&r" (temp), "+m" (*m), "=&r" (res)
290 " and %2, %0, %3 \n" 280 : "r" (1UL << bit)
291 " .subsection 2 \n" 281 : "memory");
292 "2: b 1b \n" 282 } while (unlikely(!res));
293 " nop \n" 283
294 " .previous \n" 284 res = temp & (1UL << bit);
295 " .set pop \n"
296 : "=&r" (temp), "=m" (*m), "=&r" (res)
297 : "r" (1UL << bit), "m" (*m)
298 : "memory");
299 } else { 285 } else {
300 volatile unsigned long *a = addr; 286 volatile unsigned long *a = addr;
301 unsigned long mask; 287 unsigned long mask;
@@ -340,30 +326,26 @@ static inline int test_and_set_bit_lock(unsigned long nr,
340 " beqzl %2, 1b \n" 326 " beqzl %2, 1b \n"
341 " and %2, %0, %3 \n" 327 " and %2, %0, %3 \n"
342 " .set mips0 \n" 328 " .set mips0 \n"
343 : "=&r" (temp), "=m" (*m), "=&r" (res) 329 : "=&r" (temp), "+m" (*m), "=&r" (res)
344 : "r" (1UL << bit), "m" (*m) 330 : "r" (1UL << bit)
345 : "memory"); 331 : "memory");
346 } else if (kernel_uses_llsc) { 332 } else if (kernel_uses_llsc) {
347 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 333 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
348 unsigned long temp; 334 unsigned long temp;
349 335
350 __asm__ __volatile__( 336 do {
351 " .set push \n" 337 __asm__ __volatile__(
352 " .set noreorder \n" 338 " .set mips3 \n"
353 " .set mips3 \n" 339 " " __LL "%0, %1 # test_and_set_bit \n"
354 "1: " __LL "%0, %1 # test_and_set_bit \n" 340 " or %2, %0, %3 \n"
355 " or %2, %0, %3 \n" 341 " " __SC "%2, %1 \n"
356 " " __SC "%2, %1 \n" 342 " .set mips0 \n"
357 " beqz %2, 2f \n" 343 : "=&r" (temp), "+m" (*m), "=&r" (res)
358 " and %2, %0, %3 \n" 344 : "r" (1UL << bit)
359 " .subsection 2 \n" 345 : "memory");
360 "2: b 1b \n" 346 } while (unlikely(!res));
361 " nop \n" 347
362 " .previous \n" 348 res = temp & (1UL << bit);
363 " .set pop \n"
364 : "=&r" (temp), "=m" (*m), "=&r" (res)
365 : "r" (1UL << bit), "m" (*m)
366 : "memory");
367 } else { 349 } else {
368 volatile unsigned long *a = addr; 350 volatile unsigned long *a = addr;
369 unsigned long mask; 351 unsigned long mask;
@@ -410,49 +392,43 @@ static inline int test_and_clear_bit(unsigned long nr,
410 " beqzl %2, 1b \n" 392 " beqzl %2, 1b \n"
411 " and %2, %0, %3 \n" 393 " and %2, %0, %3 \n"
412 " .set mips0 \n" 394 " .set mips0 \n"
413 : "=&r" (temp), "=m" (*m), "=&r" (res) 395 : "=&r" (temp), "+m" (*m), "=&r" (res)
414 : "r" (1UL << bit), "m" (*m) 396 : "r" (1UL << bit)
415 : "memory"); 397 : "memory");
416#ifdef CONFIG_CPU_MIPSR2 398#ifdef CONFIG_CPU_MIPSR2
417 } else if (kernel_uses_llsc && __builtin_constant_p(nr)) { 399 } else if (kernel_uses_llsc && __builtin_constant_p(nr)) {
418 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 400 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
419 unsigned long temp; 401 unsigned long temp;
420 402
421 __asm__ __volatile__( 403 do {
422 "1: " __LL "%0, %1 # test_and_clear_bit \n" 404 __asm__ __volatile__(
423 " " __EXT "%2, %0, %3, 1 \n" 405 " " __LL "%0, %1 # test_and_clear_bit \n"
424 " " __INS "%0, $0, %3, 1 \n" 406 " " __EXT "%2, %0, %3, 1 \n"
425 " " __SC "%0, %1 \n" 407 " " __INS "%0, $0, %3, 1 \n"
426 " beqz %0, 2f \n" 408 " " __SC "%0, %1 \n"
427 " .subsection 2 \n" 409 : "=&r" (temp), "+m" (*m), "=&r" (res)
428 "2: b 1b \n" 410 : "ir" (bit)
429 " .previous \n" 411 : "memory");
430 : "=&r" (temp), "=m" (*m), "=&r" (res) 412 } while (unlikely(!temp));
431 : "ir" (bit), "m" (*m)
432 : "memory");
433#endif 413#endif
434 } else if (kernel_uses_llsc) { 414 } else if (kernel_uses_llsc) {
435 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 415 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
436 unsigned long temp; 416 unsigned long temp;
437 417
438 __asm__ __volatile__( 418 do {
439 " .set push \n" 419 __asm__ __volatile__(
440 " .set noreorder \n" 420 " .set mips3 \n"
441 " .set mips3 \n" 421 " " __LL "%0, %1 # test_and_clear_bit \n"
442 "1: " __LL "%0, %1 # test_and_clear_bit \n" 422 " or %2, %0, %3 \n"
443 " or %2, %0, %3 \n" 423 " xor %2, %3 \n"
444 " xor %2, %3 \n" 424 " " __SC "%2, %1 \n"
445 " " __SC "%2, %1 \n" 425 " .set mips0 \n"
446 " beqz %2, 2f \n" 426 : "=&r" (temp), "+m" (*m), "=&r" (res)
447 " and %2, %0, %3 \n" 427 : "r" (1UL << bit)
448 " .subsection 2 \n" 428 : "memory");
449 "2: b 1b \n" 429 } while (unlikely(!res));
450 " nop \n" 430
451 " .previous \n" 431 res = temp & (1UL << bit);
452 " .set pop \n"
453 : "=&r" (temp), "=m" (*m), "=&r" (res)
454 : "r" (1UL << bit), "m" (*m)
455 : "memory");
456 } else { 432 } else {
457 volatile unsigned long *a = addr; 433 volatile unsigned long *a = addr;
458 unsigned long mask; 434 unsigned long mask;
@@ -499,30 +475,26 @@ static inline int test_and_change_bit(unsigned long nr,
499 " beqzl %2, 1b \n" 475 " beqzl %2, 1b \n"
500 " and %2, %0, %3 \n" 476 " and %2, %0, %3 \n"
501 " .set mips0 \n" 477 " .set mips0 \n"
502 : "=&r" (temp), "=m" (*m), "=&r" (res) 478 : "=&r" (temp), "+m" (*m), "=&r" (res)
503 : "r" (1UL << bit), "m" (*m) 479 : "r" (1UL << bit)
504 : "memory"); 480 : "memory");
505 } else if (kernel_uses_llsc) { 481 } else if (kernel_uses_llsc) {
506 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 482 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
507 unsigned long temp; 483 unsigned long temp;
508 484
509 __asm__ __volatile__( 485 do {
510 " .set push \n" 486 __asm__ __volatile__(
511 " .set noreorder \n" 487 " .set mips3 \n"
512 " .set mips3 \n" 488 " " __LL "%0, %1 # test_and_change_bit \n"
513 "1: " __LL "%0, %1 # test_and_change_bit \n" 489 " xor %2, %0, %3 \n"
514 " xor %2, %0, %3 \n" 490 " " __SC "\t%2, %1 \n"
515 " " __SC "\t%2, %1 \n" 491 " .set mips0 \n"
516 " beqz %2, 2f \n" 492 : "=&r" (temp), "+m" (*m), "=&r" (res)
517 " and %2, %0, %3 \n" 493 : "r" (1UL << bit)
518 " .subsection 2 \n" 494 : "memory");
519 "2: b 1b \n" 495 } while (unlikely(!res));
520 " nop \n" 496
521 " .previous \n" 497 res = temp & (1UL << bit);
522 " .set pop \n"
523 : "=&r" (temp), "=m" (*m), "=&r" (res)
524 : "r" (1UL << bit), "m" (*m)
525 : "memory");
526 } else { 498 } else {
527 volatile unsigned long *a = addr; 499 volatile unsigned long *a = addr;
528 unsigned long mask; 500 unsigned long mask;