aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-m68k
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-m68k')
-rw-r--r--include/asm-m68k/bitops.h86
1 files changed, 5 insertions, 81 deletions
diff --git a/include/asm-m68k/bitops.h b/include/asm-m68k/bitops.h
index b7955b39d963..e845daac48ad 100644
--- a/include/asm-m68k/bitops.h
+++ b/include/asm-m68k/bitops.h
@@ -310,36 +310,10 @@ static inline int fls(int x)
310 310
311 return 32 - cnt; 311 return 32 - cnt;
312} 312}
313#define fls64(x) generic_fls64(x)
314
315/*
316 * Every architecture must define this function. It's the fastest
317 * way of searching a 140-bit bitmap where the first 100 bits are
318 * unlikely to be set. It's guaranteed that at least one of the 140
319 * bits is cleared.
320 */
321static inline int sched_find_first_bit(const unsigned long *b)
322{
323 if (unlikely(b[0]))
324 return __ffs(b[0]);
325 if (unlikely(b[1]))
326 return __ffs(b[1]) + 32;
327 if (unlikely(b[2]))
328 return __ffs(b[2]) + 64;
329 if (b[3])
330 return __ffs(b[3]) + 96;
331 return __ffs(b[4]) + 128;
332}
333 313
334 314#include <asm-generic/bitops/fls64.h>
335/* 315#include <asm-generic/bitops/sched.h>
336 * hweightN: returns the hamming weight (i.e. the number 316#include <asm-generic/bitops/hweight.h>
337 * of bits set) of a N-bit word
338 */
339
340#define hweight32(x) generic_hweight32(x)
341#define hweight16(x) generic_hweight16(x)
342#define hweight8(x) generic_hweight8(x)
343 317
344/* Bitmap functions for the minix filesystem */ 318/* Bitmap functions for the minix filesystem */
345 319
@@ -377,61 +351,11 @@ static inline int minix_test_bit(int nr, const void *vaddr)
377 351
378/* Bitmap functions for the ext2 filesystem. */ 352/* Bitmap functions for the ext2 filesystem. */
379 353
380#define ext2_set_bit(nr, addr) __test_and_set_bit((nr) ^ 24, (unsigned long *)(addr)) 354#include <asm-generic/bitops/ext2-non-atomic.h>
355
381#define ext2_set_bit_atomic(lock, nr, addr) test_and_set_bit((nr) ^ 24, (unsigned long *)(addr)) 356#define ext2_set_bit_atomic(lock, nr, addr) test_and_set_bit((nr) ^ 24, (unsigned long *)(addr))
382#define ext2_clear_bit(nr, addr) __test_and_clear_bit((nr) ^ 24, (unsigned long *)(addr))
383#define ext2_clear_bit_atomic(lock, nr, addr) test_and_clear_bit((nr) ^ 24, (unsigned long *)(addr)) 357#define ext2_clear_bit_atomic(lock, nr, addr) test_and_clear_bit((nr) ^ 24, (unsigned long *)(addr))
384 358
385static inline int ext2_test_bit(int nr, const void *vaddr)
386{
387 const unsigned char *p = vaddr;
388 return (p[nr >> 3] & (1U << (nr & 7))) != 0;
389}
390
391static inline int ext2_find_first_zero_bit(const void *vaddr, unsigned size)
392{
393 const unsigned long *p = vaddr, *addr = vaddr;
394 int res;
395
396 if (!size)
397 return 0;
398
399 size = (size >> 5) + ((size & 31) > 0);
400 while (*p++ == ~0UL)
401 {
402 if (--size == 0)
403 return (p - addr) << 5;
404 }
405
406 --p;
407 for (res = 0; res < 32; res++)
408 if (!ext2_test_bit (res, p))
409 break;
410 return (p - addr) * 32 + res;
411}
412
413static inline int ext2_find_next_zero_bit(const void *vaddr, unsigned size,
414 unsigned offset)
415{
416 const unsigned long *addr = vaddr;
417 const unsigned long *p = addr + (offset >> 5);
418 int bit = offset & 31UL, res;
419
420 if (offset >= size)
421 return size;
422
423 if (bit) {
424 /* Look for zero in first longword */
425 for (res = bit; res < 32; res++)
426 if (!ext2_test_bit (res, p))
427 return (p - addr) * 32 + res;
428 p++;
429 }
430 /* No zero yet, search remaining full bytes for a zero */
431 res = ext2_find_first_zero_bit (p, size - 32 * (p - addr));
432 return (p - addr) * 32 + res;
433}
434
435#endif /* __KERNEL__ */ 359#endif /* __KERNEL__ */
436 360
437#endif /* _M68K_BITOPS_H */ 361#endif /* _M68K_BITOPS_H */