diff options
Diffstat (limited to 'arch/arc/include/asm/bitops.h')
-rw-r--r-- | arch/arc/include/asm/bitops.h | 516 |
1 files changed, 516 insertions, 0 deletions
diff --git a/arch/arc/include/asm/bitops.h b/arch/arc/include/asm/bitops.h new file mode 100644 index 000000000000..647a83a8e756 --- /dev/null +++ b/arch/arc/include/asm/bitops.h | |||
@@ -0,0 +1,516 @@ | |||
1 | /* | ||
2 | * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com) | ||
3 | * | ||
4 | * This program is free software; you can redistribute it and/or modify | ||
5 | * it under the terms of the GNU General Public License version 2 as | ||
6 | * published by the Free Software Foundation. | ||
7 | */ | ||
8 | |||
9 | #ifndef _ASM_BITOPS_H | ||
10 | #define _ASM_BITOPS_H | ||
11 | |||
12 | #ifndef _LINUX_BITOPS_H | ||
13 | #error only <linux/bitops.h> can be included directly | ||
14 | #endif | ||
15 | |||
16 | #ifdef __KERNEL__ | ||
17 | |||
18 | #ifndef __ASSEMBLY__ | ||
19 | |||
20 | #include <linux/types.h> | ||
21 | #include <linux/compiler.h> | ||
22 | |||
23 | /* | ||
24 | * Hardware assisted read-modify-write using ARC700 LLOCK/SCOND insns. | ||
25 | * The Kconfig glue ensures that in SMP, this is only set if the container | ||
26 | * SoC/platform has cross-core coherent LLOCK/SCOND | ||
27 | */ | ||
28 | #if defined(CONFIG_ARC_HAS_LLSC) | ||
29 | |||
30 | static inline void set_bit(unsigned long nr, volatile unsigned long *m) | ||
31 | { | ||
32 | unsigned int temp; | ||
33 | |||
34 | m += nr >> 5; | ||
35 | |||
36 | if (__builtin_constant_p(nr)) | ||
37 | nr &= 0x1f; | ||
38 | |||
39 | __asm__ __volatile__( | ||
40 | "1: llock %0, [%1] \n" | ||
41 | " bset %0, %0, %2 \n" | ||
42 | " scond %0, [%1] \n" | ||
43 | " bnz 1b \n" | ||
44 | : "=&r"(temp) | ||
45 | : "r"(m), "ir"(nr) | ||
46 | : "cc"); | ||
47 | } | ||
48 | |||
49 | static inline void clear_bit(unsigned long nr, volatile unsigned long *m) | ||
50 | { | ||
51 | unsigned int temp; | ||
52 | |||
53 | m += nr >> 5; | ||
54 | |||
55 | if (__builtin_constant_p(nr)) | ||
56 | nr &= 0x1f; | ||
57 | |||
58 | __asm__ __volatile__( | ||
59 | "1: llock %0, [%1] \n" | ||
60 | " bclr %0, %0, %2 \n" | ||
61 | " scond %0, [%1] \n" | ||
62 | " bnz 1b \n" | ||
63 | : "=&r"(temp) | ||
64 | : "r"(m), "ir"(nr) | ||
65 | : "cc"); | ||
66 | } | ||
67 | |||
68 | static inline void change_bit(unsigned long nr, volatile unsigned long *m) | ||
69 | { | ||
70 | unsigned int temp; | ||
71 | |||
72 | m += nr >> 5; | ||
73 | |||
74 | if (__builtin_constant_p(nr)) | ||
75 | nr &= 0x1f; | ||
76 | |||
77 | __asm__ __volatile__( | ||
78 | "1: llock %0, [%1] \n" | ||
79 | " bxor %0, %0, %2 \n" | ||
80 | " scond %0, [%1] \n" | ||
81 | " bnz 1b \n" | ||
82 | : "=&r"(temp) | ||
83 | : "r"(m), "ir"(nr) | ||
84 | : "cc"); | ||
85 | } | ||
86 | |||
87 | /* | ||
88 | * Semantically: | ||
89 | * Test the bit | ||
90 | * if clear | ||
91 | * set it and return 0 (old value) | ||
92 | * else | ||
93 | * return 1 (old value). | ||
94 | * | ||
95 | * Since ARC lacks a equivalent h/w primitive, the bit is set unconditionally | ||
96 | * and the old value of bit is returned | ||
97 | */ | ||
98 | static inline int test_and_set_bit(unsigned long nr, volatile unsigned long *m) | ||
99 | { | ||
100 | unsigned long old, temp; | ||
101 | |||
102 | m += nr >> 5; | ||
103 | |||
104 | if (__builtin_constant_p(nr)) | ||
105 | nr &= 0x1f; | ||
106 | |||
107 | __asm__ __volatile__( | ||
108 | "1: llock %0, [%2] \n" | ||
109 | " bset %1, %0, %3 \n" | ||
110 | " scond %1, [%2] \n" | ||
111 | " bnz 1b \n" | ||
112 | : "=&r"(old), "=&r"(temp) | ||
113 | : "r"(m), "ir"(nr) | ||
114 | : "cc"); | ||
115 | |||
116 | return (old & (1 << nr)) != 0; | ||
117 | } | ||
118 | |||
119 | static inline int | ||
120 | test_and_clear_bit(unsigned long nr, volatile unsigned long *m) | ||
121 | { | ||
122 | unsigned int old, temp; | ||
123 | |||
124 | m += nr >> 5; | ||
125 | |||
126 | if (__builtin_constant_p(nr)) | ||
127 | nr &= 0x1f; | ||
128 | |||
129 | __asm__ __volatile__( | ||
130 | "1: llock %0, [%2] \n" | ||
131 | " bclr %1, %0, %3 \n" | ||
132 | " scond %1, [%2] \n" | ||
133 | " bnz 1b \n" | ||
134 | : "=&r"(old), "=&r"(temp) | ||
135 | : "r"(m), "ir"(nr) | ||
136 | : "cc"); | ||
137 | |||
138 | return (old & (1 << nr)) != 0; | ||
139 | } | ||
140 | |||
141 | static inline int | ||
142 | test_and_change_bit(unsigned long nr, volatile unsigned long *m) | ||
143 | { | ||
144 | unsigned int old, temp; | ||
145 | |||
146 | m += nr >> 5; | ||
147 | |||
148 | if (__builtin_constant_p(nr)) | ||
149 | nr &= 0x1f; | ||
150 | |||
151 | __asm__ __volatile__( | ||
152 | "1: llock %0, [%2] \n" | ||
153 | " bxor %1, %0, %3 \n" | ||
154 | " scond %1, [%2] \n" | ||
155 | " bnz 1b \n" | ||
156 | : "=&r"(old), "=&r"(temp) | ||
157 | : "r"(m), "ir"(nr) | ||
158 | : "cc"); | ||
159 | |||
160 | return (old & (1 << nr)) != 0; | ||
161 | } | ||
162 | |||
163 | #else /* !CONFIG_ARC_HAS_LLSC */ | ||
164 | |||
165 | #include <asm/smp.h> | ||
166 | |||
167 | /* | ||
168 | * Non hardware assisted Atomic-R-M-W | ||
169 | * Locking would change to irq-disabling only (UP) and spinlocks (SMP) | ||
170 | * | ||
171 | * There's "significant" micro-optimization in writing our own variants of | ||
172 | * bitops (over generic variants) | ||
173 | * | ||
174 | * (1) The generic APIs have "signed" @nr while we have it "unsigned" | ||
175 | * This avoids extra code to be generated for pointer arithmatic, since | ||
176 | * is "not sure" that index is NOT -ve | ||
177 | * (2) Utilize the fact that ARCompact bit fidding insn (BSET/BCLR/ASL) etc | ||
178 | * only consider bottom 5 bits of @nr, so NO need to mask them off. | ||
179 | * (GCC Quirk: however for constant @nr we still need to do the masking | ||
180 | * at compile time) | ||
181 | */ | ||
182 | |||
183 | static inline void set_bit(unsigned long nr, volatile unsigned long *m) | ||
184 | { | ||
185 | unsigned long temp, flags; | ||
186 | m += nr >> 5; | ||
187 | |||
188 | if (__builtin_constant_p(nr)) | ||
189 | nr &= 0x1f; | ||
190 | |||
191 | bitops_lock(flags); | ||
192 | |||
193 | temp = *m; | ||
194 | *m = temp | (1UL << nr); | ||
195 | |||
196 | bitops_unlock(flags); | ||
197 | } | ||
198 | |||
199 | static inline void clear_bit(unsigned long nr, volatile unsigned long *m) | ||
200 | { | ||
201 | unsigned long temp, flags; | ||
202 | m += nr >> 5; | ||
203 | |||
204 | if (__builtin_constant_p(nr)) | ||
205 | nr &= 0x1f; | ||
206 | |||
207 | bitops_lock(flags); | ||
208 | |||
209 | temp = *m; | ||
210 | *m = temp & ~(1UL << nr); | ||
211 | |||
212 | bitops_unlock(flags); | ||
213 | } | ||
214 | |||
215 | static inline void change_bit(unsigned long nr, volatile unsigned long *m) | ||
216 | { | ||
217 | unsigned long temp, flags; | ||
218 | m += nr >> 5; | ||
219 | |||
220 | if (__builtin_constant_p(nr)) | ||
221 | nr &= 0x1f; | ||
222 | |||
223 | bitops_lock(flags); | ||
224 | |||
225 | temp = *m; | ||
226 | *m = temp ^ (1UL << nr); | ||
227 | |||
228 | bitops_unlock(flags); | ||
229 | } | ||
230 | |||
231 | static inline int test_and_set_bit(unsigned long nr, volatile unsigned long *m) | ||
232 | { | ||
233 | unsigned long old, flags; | ||
234 | m += nr >> 5; | ||
235 | |||
236 | if (__builtin_constant_p(nr)) | ||
237 | nr &= 0x1f; | ||
238 | |||
239 | bitops_lock(flags); | ||
240 | |||
241 | old = *m; | ||
242 | *m = old | (1 << nr); | ||
243 | |||
244 | bitops_unlock(flags); | ||
245 | |||
246 | return (old & (1 << nr)) != 0; | ||
247 | } | ||
248 | |||
249 | static inline int | ||
250 | test_and_clear_bit(unsigned long nr, volatile unsigned long *m) | ||
251 | { | ||
252 | unsigned long old, flags; | ||
253 | m += nr >> 5; | ||
254 | |||
255 | if (__builtin_constant_p(nr)) | ||
256 | nr &= 0x1f; | ||
257 | |||
258 | bitops_lock(flags); | ||
259 | |||
260 | old = *m; | ||
261 | *m = old & ~(1 << nr); | ||
262 | |||
263 | bitops_unlock(flags); | ||
264 | |||
265 | return (old & (1 << nr)) != 0; | ||
266 | } | ||
267 | |||
268 | static inline int | ||
269 | test_and_change_bit(unsigned long nr, volatile unsigned long *m) | ||
270 | { | ||
271 | unsigned long old, flags; | ||
272 | m += nr >> 5; | ||
273 | |||
274 | if (__builtin_constant_p(nr)) | ||
275 | nr &= 0x1f; | ||
276 | |||
277 | bitops_lock(flags); | ||
278 | |||
279 | old = *m; | ||
280 | *m = old ^ (1 << nr); | ||
281 | |||
282 | bitops_unlock(flags); | ||
283 | |||
284 | return (old & (1 << nr)) != 0; | ||
285 | } | ||
286 | |||
287 | #endif /* CONFIG_ARC_HAS_LLSC */ | ||
288 | |||
289 | /*************************************** | ||
290 | * Non atomic variants | ||
291 | **************************************/ | ||
292 | |||
293 | static inline void __set_bit(unsigned long nr, volatile unsigned long *m) | ||
294 | { | ||
295 | unsigned long temp; | ||
296 | m += nr >> 5; | ||
297 | |||
298 | if (__builtin_constant_p(nr)) | ||
299 | nr &= 0x1f; | ||
300 | |||
301 | temp = *m; | ||
302 | *m = temp | (1UL << nr); | ||
303 | } | ||
304 | |||
305 | static inline void __clear_bit(unsigned long nr, volatile unsigned long *m) | ||
306 | { | ||
307 | unsigned long temp; | ||
308 | m += nr >> 5; | ||
309 | |||
310 | if (__builtin_constant_p(nr)) | ||
311 | nr &= 0x1f; | ||
312 | |||
313 | temp = *m; | ||
314 | *m = temp & ~(1UL << nr); | ||
315 | } | ||
316 | |||
317 | static inline void __change_bit(unsigned long nr, volatile unsigned long *m) | ||
318 | { | ||
319 | unsigned long temp; | ||
320 | m += nr >> 5; | ||
321 | |||
322 | if (__builtin_constant_p(nr)) | ||
323 | nr &= 0x1f; | ||
324 | |||
325 | temp = *m; | ||
326 | *m = temp ^ (1UL << nr); | ||
327 | } | ||
328 | |||
329 | static inline int | ||
330 | __test_and_set_bit(unsigned long nr, volatile unsigned long *m) | ||
331 | { | ||
332 | unsigned long old; | ||
333 | m += nr >> 5; | ||
334 | |||
335 | if (__builtin_constant_p(nr)) | ||
336 | nr &= 0x1f; | ||
337 | |||
338 | old = *m; | ||
339 | *m = old | (1 << nr); | ||
340 | |||
341 | return (old & (1 << nr)) != 0; | ||
342 | } | ||
343 | |||
344 | static inline int | ||
345 | __test_and_clear_bit(unsigned long nr, volatile unsigned long *m) | ||
346 | { | ||
347 | unsigned long old; | ||
348 | m += nr >> 5; | ||
349 | |||
350 | if (__builtin_constant_p(nr)) | ||
351 | nr &= 0x1f; | ||
352 | |||
353 | old = *m; | ||
354 | *m = old & ~(1 << nr); | ||
355 | |||
356 | return (old & (1 << nr)) != 0; | ||
357 | } | ||
358 | |||
359 | static inline int | ||
360 | __test_and_change_bit(unsigned long nr, volatile unsigned long *m) | ||
361 | { | ||
362 | unsigned long old; | ||
363 | m += nr >> 5; | ||
364 | |||
365 | if (__builtin_constant_p(nr)) | ||
366 | nr &= 0x1f; | ||
367 | |||
368 | old = *m; | ||
369 | *m = old ^ (1 << nr); | ||
370 | |||
371 | return (old & (1 << nr)) != 0; | ||
372 | } | ||
373 | |||
374 | /* | ||
375 | * This routine doesn't need to be atomic. | ||
376 | */ | ||
377 | static inline int | ||
378 | __constant_test_bit(unsigned int nr, const volatile unsigned long *addr) | ||
379 | { | ||
380 | return ((1UL << (nr & 31)) & | ||
381 | (((const volatile unsigned int *)addr)[nr >> 5])) != 0; | ||
382 | } | ||
383 | |||
384 | static inline int | ||
385 | __test_bit(unsigned int nr, const volatile unsigned long *addr) | ||
386 | { | ||
387 | unsigned long mask; | ||
388 | |||
389 | addr += nr >> 5; | ||
390 | |||
391 | /* ARC700 only considers 5 bits in bit-fiddling insn */ | ||
392 | mask = 1 << nr; | ||
393 | |||
394 | return ((mask & *addr) != 0); | ||
395 | } | ||
396 | |||
397 | #define test_bit(nr, addr) (__builtin_constant_p(nr) ? \ | ||
398 | __constant_test_bit((nr), (addr)) : \ | ||
399 | __test_bit((nr), (addr))) | ||
400 | |||
401 | /* | ||
402 | * Count the number of zeros, starting from MSB | ||
403 | * Helper for fls( ) friends | ||
404 | * This is a pure count, so (1-32) or (0-31) doesn't apply | ||
405 | * It could be 0 to 32, based on num of 0's in there | ||
406 | * clz(0x8000_0000) = 0, clz(0xFFFF_FFFF)=0, clz(0) = 32, clz(1) = 31 | ||
407 | */ | ||
408 | static inline __attribute__ ((const)) int clz(unsigned int x) | ||
409 | { | ||
410 | unsigned int res; | ||
411 | |||
412 | __asm__ __volatile__( | ||
413 | " norm.f %0, %1 \n" | ||
414 | " mov.n %0, 0 \n" | ||
415 | " add.p %0, %0, 1 \n" | ||
416 | : "=r"(res) | ||
417 | : "r"(x) | ||
418 | : "cc"); | ||
419 | |||
420 | return res; | ||
421 | } | ||
422 | |||
423 | static inline int constant_fls(int x) | ||
424 | { | ||
425 | int r = 32; | ||
426 | |||
427 | if (!x) | ||
428 | return 0; | ||
429 | if (!(x & 0xffff0000u)) { | ||
430 | x <<= 16; | ||
431 | r -= 16; | ||
432 | } | ||
433 | if (!(x & 0xff000000u)) { | ||
434 | x <<= 8; | ||
435 | r -= 8; | ||
436 | } | ||
437 | if (!(x & 0xf0000000u)) { | ||
438 | x <<= 4; | ||
439 | r -= 4; | ||
440 | } | ||
441 | if (!(x & 0xc0000000u)) { | ||
442 | x <<= 2; | ||
443 | r -= 2; | ||
444 | } | ||
445 | if (!(x & 0x80000000u)) { | ||
446 | x <<= 1; | ||
447 | r -= 1; | ||
448 | } | ||
449 | return r; | ||
450 | } | ||
451 | |||
452 | /* | ||
453 | * fls = Find Last Set in word | ||
454 | * @result: [1-32] | ||
455 | * fls(1) = 1, fls(0x80000000) = 32, fls(0) = 0 | ||
456 | */ | ||
457 | static inline __attribute__ ((const)) int fls(unsigned long x) | ||
458 | { | ||
459 | if (__builtin_constant_p(x)) | ||
460 | return constant_fls(x); | ||
461 | |||
462 | return 32 - clz(x); | ||
463 | } | ||
464 | |||
465 | /* | ||
466 | * __fls: Similar to fls, but zero based (0-31) | ||
467 | */ | ||
468 | static inline __attribute__ ((const)) int __fls(unsigned long x) | ||
469 | { | ||
470 | if (!x) | ||
471 | return 0; | ||
472 | else | ||
473 | return fls(x) - 1; | ||
474 | } | ||
475 | |||
476 | /* | ||
477 | * ffs = Find First Set in word (LSB to MSB) | ||
478 | * @result: [1-32], 0 if all 0's | ||
479 | */ | ||
480 | #define ffs(x) ({ unsigned long __t = (x); fls(__t & -__t); }) | ||
481 | |||
482 | /* | ||
483 | * __ffs: Similar to ffs, but zero based (0-31) | ||
484 | */ | ||
485 | static inline __attribute__ ((const)) int __ffs(unsigned long word) | ||
486 | { | ||
487 | if (!word) | ||
488 | return word; | ||
489 | |||
490 | return ffs(word) - 1; | ||
491 | } | ||
492 | |||
493 | /* | ||
494 | * ffz = Find First Zero in word. | ||
495 | * @return:[0-31], 32 if all 1's | ||
496 | */ | ||
497 | #define ffz(x) __ffs(~(x)) | ||
498 | |||
499 | /* TODO does this affect uni-processor code */ | ||
500 | #define smp_mb__before_clear_bit() barrier() | ||
501 | #define smp_mb__after_clear_bit() barrier() | ||
502 | |||
503 | #include <asm-generic/bitops/hweight.h> | ||
504 | #include <asm-generic/bitops/fls64.h> | ||
505 | #include <asm-generic/bitops/sched.h> | ||
506 | #include <asm-generic/bitops/lock.h> | ||
507 | |||
508 | #include <asm-generic/bitops/find.h> | ||
509 | #include <asm-generic/bitops/le.h> | ||
510 | #include <asm-generic/bitops/ext2-atomic-setbit.h> | ||
511 | |||
512 | #endif /* !__ASSEMBLY__ */ | ||
513 | |||
514 | #endif /* __KERNEL__ */ | ||
515 | |||
516 | #endif | ||