diff options
Diffstat (limited to 'include/asm-mips/bitops.h')
-rw-r--r-- | include/asm-mips/bitops.h | 74 |
1 files changed, 50 insertions, 24 deletions
diff --git a/include/asm-mips/bitops.h b/include/asm-mips/bitops.h index eb8d79dba11c..1dc35879b362 100644 --- a/include/asm-mips/bitops.h +++ b/include/asm-mips/bitops.h | |||
@@ -18,14 +18,16 @@ | |||
18 | #if (_MIPS_SZLONG == 32) | 18 | #if (_MIPS_SZLONG == 32) |
19 | #define SZLONG_LOG 5 | 19 | #define SZLONG_LOG 5 |
20 | #define SZLONG_MASK 31UL | 20 | #define SZLONG_MASK 31UL |
21 | #define __LL "ll " | 21 | #define __LL "ll " |
22 | #define __SC "sc " | 22 | #define __SC "sc " |
23 | #define __SET_MIPS ".set mips2 " | ||
23 | #define cpu_to_lelongp(x) cpu_to_le32p((__u32 *) (x)) | 24 | #define cpu_to_lelongp(x) cpu_to_le32p((__u32 *) (x)) |
24 | #elif (_MIPS_SZLONG == 64) | 25 | #elif (_MIPS_SZLONG == 64) |
25 | #define SZLONG_LOG 6 | 26 | #define SZLONG_LOG 6 |
26 | #define SZLONG_MASK 63UL | 27 | #define SZLONG_MASK 63UL |
27 | #define __LL "lld " | 28 | #define __LL "lld " |
28 | #define __SC "scd " | 29 | #define __SC "scd " |
30 | #define __SET_MIPS ".set mips3 " | ||
29 | #define cpu_to_lelongp(x) cpu_to_le64p((__u64 *) (x)) | 31 | #define cpu_to_lelongp(x) cpu_to_le64p((__u64 *) (x)) |
30 | #endif | 32 | #endif |
31 | 33 | ||
@@ -72,18 +74,22 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) | |||
72 | 74 | ||
73 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 75 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
74 | __asm__ __volatile__( | 76 | __asm__ __volatile__( |
77 | " " __SET_MIPS " \n" | ||
75 | "1: " __LL "%0, %1 # set_bit \n" | 78 | "1: " __LL "%0, %1 # set_bit \n" |
76 | " or %0, %2 \n" | 79 | " or %0, %2 \n" |
77 | " "__SC "%0, %1 \n" | 80 | " " __SC "%0, %1 \n" |
78 | " beqzl %0, 1b \n" | 81 | " beqzl %0, 1b \n" |
82 | " .set mips0 \n" | ||
79 | : "=&r" (temp), "=m" (*m) | 83 | : "=&r" (temp), "=m" (*m) |
80 | : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); | 84 | : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); |
81 | } else if (cpu_has_llsc) { | 85 | } else if (cpu_has_llsc) { |
82 | __asm__ __volatile__( | 86 | __asm__ __volatile__( |
87 | " " __SET_MIPS " \n" | ||
83 | "1: " __LL "%0, %1 # set_bit \n" | 88 | "1: " __LL "%0, %1 # set_bit \n" |
84 | " or %0, %2 \n" | 89 | " or %0, %2 \n" |
85 | " "__SC "%0, %1 \n" | 90 | " " __SC "%0, %1 \n" |
86 | " beqz %0, 1b \n" | 91 | " beqz %0, 1b \n" |
92 | " .set mips0 \n" | ||
87 | : "=&r" (temp), "=m" (*m) | 93 | : "=&r" (temp), "=m" (*m) |
88 | : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); | 94 | : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); |
89 | } else { | 95 | } else { |
@@ -132,18 +138,22 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) | |||
132 | 138 | ||
133 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 139 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
134 | __asm__ __volatile__( | 140 | __asm__ __volatile__( |
141 | " " __SET_MIPS " \n" | ||
135 | "1: " __LL "%0, %1 # clear_bit \n" | 142 | "1: " __LL "%0, %1 # clear_bit \n" |
136 | " and %0, %2 \n" | 143 | " and %0, %2 \n" |
137 | " " __SC "%0, %1 \n" | 144 | " " __SC "%0, %1 \n" |
138 | " beqzl %0, 1b \n" | 145 | " beqzl %0, 1b \n" |
146 | " .set mips0 \n" | ||
139 | : "=&r" (temp), "=m" (*m) | 147 | : "=&r" (temp), "=m" (*m) |
140 | : "ir" (~(1UL << (nr & SZLONG_MASK))), "m" (*m)); | 148 | : "ir" (~(1UL << (nr & SZLONG_MASK))), "m" (*m)); |
141 | } else if (cpu_has_llsc) { | 149 | } else if (cpu_has_llsc) { |
142 | __asm__ __volatile__( | 150 | __asm__ __volatile__( |
151 | " " __SET_MIPS " \n" | ||
143 | "1: " __LL "%0, %1 # clear_bit \n" | 152 | "1: " __LL "%0, %1 # clear_bit \n" |
144 | " and %0, %2 \n" | 153 | " and %0, %2 \n" |
145 | " " __SC "%0, %1 \n" | 154 | " " __SC "%0, %1 \n" |
146 | " beqz %0, 1b \n" | 155 | " beqz %0, 1b \n" |
156 | " .set mips0 \n" | ||
147 | : "=&r" (temp), "=m" (*m) | 157 | : "=&r" (temp), "=m" (*m) |
148 | : "ir" (~(1UL << (nr & SZLONG_MASK))), "m" (*m)); | 158 | : "ir" (~(1UL << (nr & SZLONG_MASK))), "m" (*m)); |
149 | } else { | 159 | } else { |
@@ -191,10 +201,12 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) | |||
191 | unsigned long temp; | 201 | unsigned long temp; |
192 | 202 | ||
193 | __asm__ __volatile__( | 203 | __asm__ __volatile__( |
204 | " " __SET_MIPS " \n" | ||
194 | "1: " __LL "%0, %1 # change_bit \n" | 205 | "1: " __LL "%0, %1 # change_bit \n" |
195 | " xor %0, %2 \n" | 206 | " xor %0, %2 \n" |
196 | " "__SC "%0, %1 \n" | 207 | " " __SC "%0, %1 \n" |
197 | " beqzl %0, 1b \n" | 208 | " beqzl %0, 1b \n" |
209 | " .set mips0 \n" | ||
198 | : "=&r" (temp), "=m" (*m) | 210 | : "=&r" (temp), "=m" (*m) |
199 | : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); | 211 | : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); |
200 | } else if (cpu_has_llsc) { | 212 | } else if (cpu_has_llsc) { |
@@ -202,10 +214,12 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) | |||
202 | unsigned long temp; | 214 | unsigned long temp; |
203 | 215 | ||
204 | __asm__ __volatile__( | 216 | __asm__ __volatile__( |
217 | " " __SET_MIPS " \n" | ||
205 | "1: " __LL "%0, %1 # change_bit \n" | 218 | "1: " __LL "%0, %1 # change_bit \n" |
206 | " xor %0, %2 \n" | 219 | " xor %0, %2 \n" |
207 | " "__SC "%0, %1 \n" | 220 | " " __SC "%0, %1 \n" |
208 | " beqz %0, 1b \n" | 221 | " beqz %0, 1b \n" |
222 | " .set mips0 \n" | ||
209 | : "=&r" (temp), "=m" (*m) | 223 | : "=&r" (temp), "=m" (*m) |
210 | : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); | 224 | : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); |
211 | } else { | 225 | } else { |
@@ -253,14 +267,16 @@ static inline int test_and_set_bit(unsigned long nr, | |||
253 | unsigned long temp, res; | 267 | unsigned long temp, res; |
254 | 268 | ||
255 | __asm__ __volatile__( | 269 | __asm__ __volatile__( |
270 | " " __SET_MIPS " \n" | ||
256 | "1: " __LL "%0, %1 # test_and_set_bit \n" | 271 | "1: " __LL "%0, %1 # test_and_set_bit \n" |
257 | " or %2, %0, %3 \n" | 272 | " or %2, %0, %3 \n" |
258 | " " __SC "%2, %1 \n" | 273 | " " __SC "%2, %1 \n" |
259 | " beqzl %2, 1b \n" | 274 | " beqzl %2, 1b \n" |
260 | " and %2, %0, %3 \n" | 275 | " and %2, %0, %3 \n" |
261 | #ifdef CONFIG_SMP | 276 | #ifdef CONFIG_SMP |
262 | "sync \n" | 277 | " sync \n" |
263 | #endif | 278 | #endif |
279 | " .set mips0 \n" | ||
264 | : "=&r" (temp), "=m" (*m), "=&r" (res) | 280 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
265 | : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) | 281 | : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) |
266 | : "memory"); | 282 | : "memory"); |
@@ -271,16 +287,18 @@ static inline int test_and_set_bit(unsigned long nr, | |||
271 | unsigned long temp, res; | 287 | unsigned long temp, res; |
272 | 288 | ||
273 | __asm__ __volatile__( | 289 | __asm__ __volatile__( |
274 | " .set noreorder # test_and_set_bit \n" | 290 | " .set push \n" |
275 | "1: " __LL "%0, %1 \n" | 291 | " .set noreorder \n" |
292 | " " __SET_MIPS " \n" | ||
293 | "1: " __LL "%0, %1 # test_and_set_bit \n" | ||
276 | " or %2, %0, %3 \n" | 294 | " or %2, %0, %3 \n" |
277 | " " __SC "%2, %1 \n" | 295 | " " __SC "%2, %1 \n" |
278 | " beqz %2, 1b \n" | 296 | " beqz %2, 1b \n" |
279 | " and %2, %0, %3 \n" | 297 | " and %2, %0, %3 \n" |
280 | #ifdef CONFIG_SMP | 298 | #ifdef CONFIG_SMP |
281 | "sync \n" | 299 | " sync \n" |
282 | #endif | 300 | #endif |
283 | ".set\treorder" | 301 | " .set pop \n" |
284 | : "=&r" (temp), "=m" (*m), "=&r" (res) | 302 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
285 | : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) | 303 | : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) |
286 | : "memory"); | 304 | : "memory"); |
@@ -343,15 +361,17 @@ static inline int test_and_clear_bit(unsigned long nr, | |||
343 | unsigned long temp, res; | 361 | unsigned long temp, res; |
344 | 362 | ||
345 | __asm__ __volatile__( | 363 | __asm__ __volatile__( |
364 | " " __SET_MIPS " \n" | ||
346 | "1: " __LL "%0, %1 # test_and_clear_bit \n" | 365 | "1: " __LL "%0, %1 # test_and_clear_bit \n" |
347 | " or %2, %0, %3 \n" | 366 | " or %2, %0, %3 \n" |
348 | " xor %2, %3 \n" | 367 | " xor %2, %3 \n" |
349 | __SC "%2, %1 \n" | 368 | " " __SC "%2, %1 \n" |
350 | " beqzl %2, 1b \n" | 369 | " beqzl %2, 1b \n" |
351 | " and %2, %0, %3 \n" | 370 | " and %2, %0, %3 \n" |
352 | #ifdef CONFIG_SMP | 371 | #ifdef CONFIG_SMP |
353 | " sync \n" | 372 | " sync \n" |
354 | #endif | 373 | #endif |
374 | " .set mips0 \n" | ||
355 | : "=&r" (temp), "=m" (*m), "=&r" (res) | 375 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
356 | : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) | 376 | : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) |
357 | : "memory"); | 377 | : "memory"); |
@@ -362,17 +382,19 @@ static inline int test_and_clear_bit(unsigned long nr, | |||
362 | unsigned long temp, res; | 382 | unsigned long temp, res; |
363 | 383 | ||
364 | __asm__ __volatile__( | 384 | __asm__ __volatile__( |
365 | " .set noreorder # test_and_clear_bit \n" | 385 | " .set push \n" |
366 | "1: " __LL "%0, %1 \n" | 386 | " .set noreorder \n" |
387 | " " __SET_MIPS " \n" | ||
388 | "1: " __LL "%0, %1 # test_and_clear_bit \n" | ||
367 | " or %2, %0, %3 \n" | 389 | " or %2, %0, %3 \n" |
368 | " xor %2, %3 \n" | 390 | " xor %2, %3 \n" |
369 | __SC "%2, %1 \n" | 391 | " " __SC "%2, %1 \n" |
370 | " beqz %2, 1b \n" | 392 | " beqz %2, 1b \n" |
371 | " and %2, %0, %3 \n" | 393 | " and %2, %0, %3 \n" |
372 | #ifdef CONFIG_SMP | 394 | #ifdef CONFIG_SMP |
373 | " sync \n" | 395 | " sync \n" |
374 | #endif | 396 | #endif |
375 | " .set reorder \n" | 397 | " .set pop \n" |
376 | : "=&r" (temp), "=m" (*m), "=&r" (res) | 398 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
377 | : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) | 399 | : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) |
378 | : "memory"); | 400 | : "memory"); |
@@ -435,14 +457,16 @@ static inline int test_and_change_bit(unsigned long nr, | |||
435 | unsigned long temp, res; | 457 | unsigned long temp, res; |
436 | 458 | ||
437 | __asm__ __volatile__( | 459 | __asm__ __volatile__( |
438 | "1: " __LL " %0, %1 # test_and_change_bit \n" | 460 | " " __SET_MIPS " \n" |
461 | "1: " __LL "%0, %1 # test_and_change_bit \n" | ||
439 | " xor %2, %0, %3 \n" | 462 | " xor %2, %0, %3 \n" |
440 | " "__SC "%2, %1 \n" | 463 | " " __SC "%2, %1 \n" |
441 | " beqzl %2, 1b \n" | 464 | " beqzl %2, 1b \n" |
442 | " and %2, %0, %3 \n" | 465 | " and %2, %0, %3 \n" |
443 | #ifdef CONFIG_SMP | 466 | #ifdef CONFIG_SMP |
444 | " sync \n" | 467 | " sync \n" |
445 | #endif | 468 | #endif |
469 | " .set mips0 \n" | ||
446 | : "=&r" (temp), "=m" (*m), "=&r" (res) | 470 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
447 | : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) | 471 | : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) |
448 | : "memory"); | 472 | : "memory"); |
@@ -453,16 +477,18 @@ static inline int test_and_change_bit(unsigned long nr, | |||
453 | unsigned long temp, res; | 477 | unsigned long temp, res; |
454 | 478 | ||
455 | __asm__ __volatile__( | 479 | __asm__ __volatile__( |
456 | " .set noreorder # test_and_change_bit \n" | 480 | " .set push \n" |
457 | "1: " __LL " %0, %1 \n" | 481 | " .set noreorder \n" |
482 | " " __SET_MIPS " \n" | ||
483 | "1: " __LL "%0, %1 # test_and_change_bit \n" | ||
458 | " xor %2, %0, %3 \n" | 484 | " xor %2, %0, %3 \n" |
459 | " "__SC "\t%2, %1 \n" | 485 | " " __SC "\t%2, %1 \n" |
460 | " beqz %2, 1b \n" | 486 | " beqz %2, 1b \n" |
461 | " and %2, %0, %3 \n" | 487 | " and %2, %0, %3 \n" |
462 | #ifdef CONFIG_SMP | 488 | #ifdef CONFIG_SMP |
463 | " sync \n" | 489 | " sync \n" |
464 | #endif | 490 | #endif |
465 | " .set reorder \n" | 491 | " .set pop \n" |
466 | : "=&r" (temp), "=m" (*m), "=&r" (res) | 492 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
467 | : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) | 493 | : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) |
468 | : "memory"); | 494 | : "memory"); |