aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-mips/bitops.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-mips/bitops.h')
-rw-r--r--include/asm-mips/bitops.h56
1 files changed, 33 insertions, 23 deletions
diff --git a/include/asm-mips/bitops.h b/include/asm-mips/bitops.h
index 89436b96ad66..8959da245cfb 100644
--- a/include/asm-mips/bitops.h
+++ b/include/asm-mips/bitops.h
@@ -54,6 +54,7 @@
54static inline void set_bit(unsigned long nr, volatile unsigned long *addr) 54static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
55{ 55{
56 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 56 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
57 unsigned short bit = nr & SZLONG_MASK;
57 unsigned long temp; 58 unsigned long temp;
58 59
59 if (cpu_has_llsc && R10000_LLSC_WAR) { 60 if (cpu_has_llsc && R10000_LLSC_WAR) {
@@ -65,9 +66,9 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
65 " beqzl %0, 1b \n" 66 " beqzl %0, 1b \n"
66 " .set mips0 \n" 67 " .set mips0 \n"
67 : "=&r" (temp), "=m" (*m) 68 : "=&r" (temp), "=m" (*m)
68 : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); 69 : "ir" (1UL << bit), "m" (*m));
69#ifdef CONFIG_CPU_MIPSR2 70#ifdef CONFIG_CPU_MIPSR2
70 } else if (__builtin_constant_p(nr)) { 71 } else if (__builtin_constant_p(bit)) {
71 __asm__ __volatile__( 72 __asm__ __volatile__(
72 "1: " __LL "%0, %1 # set_bit \n" 73 "1: " __LL "%0, %1 # set_bit \n"
73 " " __INS "%0, %4, %2, 1 \n" 74 " " __INS "%0, %4, %2, 1 \n"
@@ -77,7 +78,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
77 "2: b 1b \n" 78 "2: b 1b \n"
78 " .previous \n" 79 " .previous \n"
79 : "=&r" (temp), "=m" (*m) 80 : "=&r" (temp), "=m" (*m)
80 : "ir" (nr & SZLONG_MASK), "m" (*m), "r" (~0)); 81 : "ir" (bit), "m" (*m), "r" (~0));
81#endif /* CONFIG_CPU_MIPSR2 */ 82#endif /* CONFIG_CPU_MIPSR2 */
82 } else if (cpu_has_llsc) { 83 } else if (cpu_has_llsc) {
83 __asm__ __volatile__( 84 __asm__ __volatile__(
@@ -91,14 +92,14 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
91 " .previous \n" 92 " .previous \n"
92 " .set mips0 \n" 93 " .set mips0 \n"
93 : "=&r" (temp), "=m" (*m) 94 : "=&r" (temp), "=m" (*m)
94 : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); 95 : "ir" (1UL << bit), "m" (*m));
95 } else { 96 } else {
96 volatile unsigned long *a = addr; 97 volatile unsigned long *a = addr;
97 unsigned long mask; 98 unsigned long mask;
98 unsigned long flags; 99 unsigned long flags;
99 100
100 a += nr >> SZLONG_LOG; 101 a += nr >> SZLONG_LOG;
101 mask = 1UL << (nr & SZLONG_MASK); 102 mask = 1UL << bit;
102 local_irq_save(flags); 103 local_irq_save(flags);
103 *a |= mask; 104 *a |= mask;
104 local_irq_restore(flags); 105 local_irq_restore(flags);
@@ -118,6 +119,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
118static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) 119static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
119{ 120{
120 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 121 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
122 unsigned short bit = nr & SZLONG_MASK;
121 unsigned long temp; 123 unsigned long temp;
122 124
123 if (cpu_has_llsc && R10000_LLSC_WAR) { 125 if (cpu_has_llsc && R10000_LLSC_WAR) {
@@ -129,9 +131,9 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
129 " beqzl %0, 1b \n" 131 " beqzl %0, 1b \n"
130 " .set mips0 \n" 132 " .set mips0 \n"
131 : "=&r" (temp), "=m" (*m) 133 : "=&r" (temp), "=m" (*m)
132 : "ir" (~(1UL << (nr & SZLONG_MASK))), "m" (*m)); 134 : "ir" (~(1UL << bit)), "m" (*m));
133#ifdef CONFIG_CPU_MIPSR2 135#ifdef CONFIG_CPU_MIPSR2
134 } else if (__builtin_constant_p(nr)) { 136 } else if (__builtin_constant_p(bit)) {
135 __asm__ __volatile__( 137 __asm__ __volatile__(
136 "1: " __LL "%0, %1 # clear_bit \n" 138 "1: " __LL "%0, %1 # clear_bit \n"
137 " " __INS "%0, $0, %2, 1 \n" 139 " " __INS "%0, $0, %2, 1 \n"
@@ -141,7 +143,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
141 "2: b 1b \n" 143 "2: b 1b \n"
142 " .previous \n" 144 " .previous \n"
143 : "=&r" (temp), "=m" (*m) 145 : "=&r" (temp), "=m" (*m)
144 : "ir" (nr & SZLONG_MASK), "m" (*m)); 146 : "ir" (bit), "m" (*m));
145#endif /* CONFIG_CPU_MIPSR2 */ 147#endif /* CONFIG_CPU_MIPSR2 */
146 } else if (cpu_has_llsc) { 148 } else if (cpu_has_llsc) {
147 __asm__ __volatile__( 149 __asm__ __volatile__(
@@ -155,14 +157,14 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
155 " .previous \n" 157 " .previous \n"
156 " .set mips0 \n" 158 " .set mips0 \n"
157 : "=&r" (temp), "=m" (*m) 159 : "=&r" (temp), "=m" (*m)
158 : "ir" (~(1UL << (nr & SZLONG_MASK))), "m" (*m)); 160 : "ir" (~(1UL << bit)), "m" (*m));
159 } else { 161 } else {
160 volatile unsigned long *a = addr; 162 volatile unsigned long *a = addr;
161 unsigned long mask; 163 unsigned long mask;
162 unsigned long flags; 164 unsigned long flags;
163 165
164 a += nr >> SZLONG_LOG; 166 a += nr >> SZLONG_LOG;
165 mask = 1UL << (nr & SZLONG_MASK); 167 mask = 1UL << bit;
166 local_irq_save(flags); 168 local_irq_save(flags);
167 *a &= ~mask; 169 *a &= ~mask;
168 local_irq_restore(flags); 170 local_irq_restore(flags);
@@ -180,6 +182,8 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
180 */ 182 */
181static inline void change_bit(unsigned long nr, volatile unsigned long *addr) 183static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
182{ 184{
185 unsigned short bit = nr & SZLONG_MASK;
186
183 if (cpu_has_llsc && R10000_LLSC_WAR) { 187 if (cpu_has_llsc && R10000_LLSC_WAR) {
184 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 188 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
185 unsigned long temp; 189 unsigned long temp;
@@ -192,7 +196,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
192 " beqzl %0, 1b \n" 196 " beqzl %0, 1b \n"
193 " .set mips0 \n" 197 " .set mips0 \n"
194 : "=&r" (temp), "=m" (*m) 198 : "=&r" (temp), "=m" (*m)
195 : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); 199 : "ir" (1UL << bit), "m" (*m));
196 } else if (cpu_has_llsc) { 200 } else if (cpu_has_llsc) {
197 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 201 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
198 unsigned long temp; 202 unsigned long temp;
@@ -208,14 +212,14 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
208 " .previous \n" 212 " .previous \n"
209 " .set mips0 \n" 213 " .set mips0 \n"
210 : "=&r" (temp), "=m" (*m) 214 : "=&r" (temp), "=m" (*m)
211 : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); 215 : "ir" (1UL << bit), "m" (*m));
212 } else { 216 } else {
213 volatile unsigned long *a = addr; 217 volatile unsigned long *a = addr;
214 unsigned long mask; 218 unsigned long mask;
215 unsigned long flags; 219 unsigned long flags;
216 220
217 a += nr >> SZLONG_LOG; 221 a += nr >> SZLONG_LOG;
218 mask = 1UL << (nr & SZLONG_MASK); 222 mask = 1UL << bit;
219 local_irq_save(flags); 223 local_irq_save(flags);
220 *a ^= mask; 224 *a ^= mask;
221 local_irq_restore(flags); 225 local_irq_restore(flags);
@@ -233,6 +237,8 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
233static inline int test_and_set_bit(unsigned long nr, 237static inline int test_and_set_bit(unsigned long nr,
234 volatile unsigned long *addr) 238 volatile unsigned long *addr)
235{ 239{
240 unsigned short bit = nr & SZLONG_MASK;
241
236 if (cpu_has_llsc && R10000_LLSC_WAR) { 242 if (cpu_has_llsc && R10000_LLSC_WAR) {
237 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 243 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
238 unsigned long temp, res; 244 unsigned long temp, res;
@@ -246,7 +252,7 @@ static inline int test_and_set_bit(unsigned long nr,
246 " and %2, %0, %3 \n" 252 " and %2, %0, %3 \n"
247 " .set mips0 \n" 253 " .set mips0 \n"
248 : "=&r" (temp), "=m" (*m), "=&r" (res) 254 : "=&r" (temp), "=m" (*m), "=&r" (res)
249 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) 255 : "r" (1UL << bit), "m" (*m)
250 : "memory"); 256 : "memory");
251 257
252 return res != 0; 258 return res != 0;
@@ -269,7 +275,7 @@ static inline int test_and_set_bit(unsigned long nr,
269 " .previous \n" 275 " .previous \n"
270 " .set pop \n" 276 " .set pop \n"
271 : "=&r" (temp), "=m" (*m), "=&r" (res) 277 : "=&r" (temp), "=m" (*m), "=&r" (res)
272 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) 278 : "r" (1UL << bit), "m" (*m)
273 : "memory"); 279 : "memory");
274 280
275 return res != 0; 281 return res != 0;
@@ -280,7 +286,7 @@ static inline int test_and_set_bit(unsigned long nr,
280 unsigned long flags; 286 unsigned long flags;
281 287
282 a += nr >> SZLONG_LOG; 288 a += nr >> SZLONG_LOG;
283 mask = 1UL << (nr & SZLONG_MASK); 289 mask = 1UL << bit;
284 local_irq_save(flags); 290 local_irq_save(flags);
285 retval = (mask & *a) != 0; 291 retval = (mask & *a) != 0;
286 *a |= mask; 292 *a |= mask;
@@ -303,6 +309,8 @@ static inline int test_and_set_bit(unsigned long nr,
303static inline int test_and_clear_bit(unsigned long nr, 309static inline int test_and_clear_bit(unsigned long nr,
304 volatile unsigned long *addr) 310 volatile unsigned long *addr)
305{ 311{
312 unsigned short bit = nr & SZLONG_MASK;
313
306 if (cpu_has_llsc && R10000_LLSC_WAR) { 314 if (cpu_has_llsc && R10000_LLSC_WAR) {
307 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 315 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
308 unsigned long temp, res; 316 unsigned long temp, res;
@@ -317,7 +325,7 @@ static inline int test_and_clear_bit(unsigned long nr,
317 " and %2, %0, %3 \n" 325 " and %2, %0, %3 \n"
318 " .set mips0 \n" 326 " .set mips0 \n"
319 : "=&r" (temp), "=m" (*m), "=&r" (res) 327 : "=&r" (temp), "=m" (*m), "=&r" (res)
320 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) 328 : "r" (1UL << bit), "m" (*m)
321 : "memory"); 329 : "memory");
322 330
323 return res != 0; 331 return res != 0;
@@ -336,7 +344,7 @@ static inline int test_and_clear_bit(unsigned long nr,
336 "2: b 1b \n" 344 "2: b 1b \n"
337 " .previous \n" 345 " .previous \n"
338 : "=&r" (temp), "=m" (*m), "=&r" (res) 346 : "=&r" (temp), "=m" (*m), "=&r" (res)
339 : "ri" (nr & SZLONG_MASK), "m" (*m) 347 : "ri" (bit), "m" (*m)
340 : "memory"); 348 : "memory");
341 349
342 return res; 350 return res;
@@ -361,7 +369,7 @@ static inline int test_and_clear_bit(unsigned long nr,
361 " .previous \n" 369 " .previous \n"
362 " .set pop \n" 370 " .set pop \n"
363 : "=&r" (temp), "=m" (*m), "=&r" (res) 371 : "=&r" (temp), "=m" (*m), "=&r" (res)
364 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) 372 : "r" (1UL << bit), "m" (*m)
365 : "memory"); 373 : "memory");
366 374
367 return res != 0; 375 return res != 0;
@@ -372,7 +380,7 @@ static inline int test_and_clear_bit(unsigned long nr,
372 unsigned long flags; 380 unsigned long flags;
373 381
374 a += nr >> SZLONG_LOG; 382 a += nr >> SZLONG_LOG;
375 mask = 1UL << (nr & SZLONG_MASK); 383 mask = 1UL << bit;
376 local_irq_save(flags); 384 local_irq_save(flags);
377 retval = (mask & *a) != 0; 385 retval = (mask & *a) != 0;
378 *a &= ~mask; 386 *a &= ~mask;
@@ -395,6 +403,8 @@ static inline int test_and_clear_bit(unsigned long nr,
395static inline int test_and_change_bit(unsigned long nr, 403static inline int test_and_change_bit(unsigned long nr,
396 volatile unsigned long *addr) 404 volatile unsigned long *addr)
397{ 405{
406 unsigned short bit = nr & SZLONG_MASK;
407
398 if (cpu_has_llsc && R10000_LLSC_WAR) { 408 if (cpu_has_llsc && R10000_LLSC_WAR) {
399 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 409 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
400 unsigned long temp, res; 410 unsigned long temp, res;
@@ -408,7 +418,7 @@ static inline int test_and_change_bit(unsigned long nr,
408 " and %2, %0, %3 \n" 418 " and %2, %0, %3 \n"
409 " .set mips0 \n" 419 " .set mips0 \n"
410 : "=&r" (temp), "=m" (*m), "=&r" (res) 420 : "=&r" (temp), "=m" (*m), "=&r" (res)
411 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) 421 : "r" (1UL << bit), "m" (*m)
412 : "memory"); 422 : "memory");
413 423
414 return res != 0; 424 return res != 0;
@@ -431,7 +441,7 @@ static inline int test_and_change_bit(unsigned long nr,
431 " .previous \n" 441 " .previous \n"
432 " .set pop \n" 442 " .set pop \n"
433 : "=&r" (temp), "=m" (*m), "=&r" (res) 443 : "=&r" (temp), "=m" (*m), "=&r" (res)
434 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) 444 : "r" (1UL << bit), "m" (*m)
435 : "memory"); 445 : "memory");
436 446
437 return res != 0; 447 return res != 0;
@@ -441,7 +451,7 @@ static inline int test_and_change_bit(unsigned long nr,
441 unsigned long flags; 451 unsigned long flags;
442 452
443 a += nr >> SZLONG_LOG; 453 a += nr >> SZLONG_LOG;
444 mask = 1UL << (nr & SZLONG_MASK); 454 mask = 1UL << bit;
445 local_irq_save(flags); 455 local_irq_save(flags);
446 retval = (mask & *a) != 0; 456 retval = (mask & *a) != 0;
447 *a ^= mask; 457 *a ^= mask;