diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2012-03-28 20:19:27 -0400 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2012-03-28 20:19:28 -0400 |
commit | 532bfc851a7475fb6a36c1e953aa395798a7cca7 (patch) | |
tree | a7892e5a31330dd59f31959efbe9fda1803784fd /lib | |
parent | 0195c00244dc2e9f522475868fa278c473ba7339 (diff) | |
parent | 8da00edc1069f01c34510fa405dc15d96c090a3f (diff) |
Merge branch 'akpm' (Andrew's patch-bomb)
Merge third batch of patches from Andrew Morton:
- Some MM stragglers
- core SMP library cleanups (on_each_cpu_mask)
- Some IPI optimisations
- kexec
- kdump
- IPMI
- the radix-tree iterator work
- various other misc bits.
"That'll do for -rc1. I still have ~10 patches for 3.4, will send
those along when they've baked a little more."
* emailed from Andrew Morton <akpm@linux-foundation.org>: (35 commits)
backlight: fix typo in tosa_lcd.c
crc32: add help text for the algorithm select option
mm: move hugepage test examples to tools/testing/selftests/vm
mm: move slabinfo.c to tools/vm
mm: move page-types.c from Documentation to tools/vm
selftests/Makefile: make `run_tests' depend on `all'
selftests: launch individual selftests from the main Makefile
radix-tree: use iterators in find_get_pages* functions
radix-tree: rewrite gang lookup using iterator
radix-tree: introduce bit-optimized iterator
fs/proc/namespaces.c: prevent crash when ns_entries[] is empty
nbd: rename the nbd_device variable from lo to nbd
pidns: add reboot_pid_ns() to handle the reboot syscall
sysctl: use bitmap library functions
ipmi: use locks on watchdog timeout set on reboot
ipmi: simplify locking
ipmi: fix message handling during panics
ipmi: use a tasklet for handling received messages
ipmi: increase KCS timeouts
ipmi: decrease the IPMI message transaction time in interrupt mode
...
Diffstat (limited to 'lib')
-rw-r--r-- | lib/Kconfig | 4 | ||||
-rw-r--r-- | lib/cpumask.c | 12 | ||||
-rw-r--r-- | lib/radix-tree.c | 442 |
3 files changed, 188 insertions, 270 deletions
diff --git a/lib/Kconfig b/lib/Kconfig index a0e5900a9d85..4a8aba2e5cc0 100644 --- a/lib/Kconfig +++ b/lib/Kconfig | |||
@@ -88,6 +88,10 @@ choice | |||
88 | prompt "CRC32 implementation" | 88 | prompt "CRC32 implementation" |
89 | depends on CRC32 | 89 | depends on CRC32 |
90 | default CRC32_SLICEBY8 | 90 | default CRC32_SLICEBY8 |
91 | help | ||
92 | This option allows a kernel builder to override the default choice | ||
93 | of CRC32 algorithm. Choose the default ("slice by 8") unless you | ||
94 | know that you need one of the others. | ||
91 | 95 | ||
92 | config CRC32_SLICEBY8 | 96 | config CRC32_SLICEBY8 |
93 | bool "Slice by 8 bytes" | 97 | bool "Slice by 8 bytes" |
diff --git a/lib/cpumask.c b/lib/cpumask.c index 0b660118ed91..402a54ac35cb 100644 --- a/lib/cpumask.c +++ b/lib/cpumask.c | |||
@@ -26,18 +26,6 @@ int __next_cpu_nr(int n, const cpumask_t *srcp) | |||
26 | EXPORT_SYMBOL(__next_cpu_nr); | 26 | EXPORT_SYMBOL(__next_cpu_nr); |
27 | #endif | 27 | #endif |
28 | 28 | ||
29 | int __any_online_cpu(const cpumask_t *mask) | ||
30 | { | ||
31 | int cpu; | ||
32 | |||
33 | for_each_cpu(cpu, mask) { | ||
34 | if (cpu_online(cpu)) | ||
35 | break; | ||
36 | } | ||
37 | return cpu; | ||
38 | } | ||
39 | EXPORT_SYMBOL(__any_online_cpu); | ||
40 | |||
41 | /** | 29 | /** |
42 | * cpumask_next_and - get the next cpu in *src1p & *src2p | 30 | * cpumask_next_and - get the next cpu in *src1p & *src2p |
43 | * @n: the cpu prior to the place to search (ie. return will be > @n) | 31 | * @n: the cpu prior to the place to search (ie. return will be > @n) |
diff --git a/lib/radix-tree.c b/lib/radix-tree.c index 3e69c2b66c94..86516f5588e3 100644 --- a/lib/radix-tree.c +++ b/lib/radix-tree.c | |||
@@ -3,6 +3,7 @@ | |||
3 | * Portions Copyright (C) 2001 Christoph Hellwig | 3 | * Portions Copyright (C) 2001 Christoph Hellwig |
4 | * Copyright (C) 2005 SGI, Christoph Lameter | 4 | * Copyright (C) 2005 SGI, Christoph Lameter |
5 | * Copyright (C) 2006 Nick Piggin | 5 | * Copyright (C) 2006 Nick Piggin |
6 | * Copyright (C) 2012 Konstantin Khlebnikov | ||
6 | * | 7 | * |
7 | * This program is free software; you can redistribute it and/or | 8 | * This program is free software; you can redistribute it and/or |
8 | * modify it under the terms of the GNU General Public License as | 9 | * modify it under the terms of the GNU General Public License as |
@@ -146,6 +147,43 @@ static inline int any_tag_set(struct radix_tree_node *node, unsigned int tag) | |||
146 | } | 147 | } |
147 | return 0; | 148 | return 0; |
148 | } | 149 | } |
150 | |||
151 | /** | ||
152 | * radix_tree_find_next_bit - find the next set bit in a memory region | ||
153 | * | ||
154 | * @addr: The address to base the search on | ||
155 | * @size: The bitmap size in bits | ||
156 | * @offset: The bitnumber to start searching at | ||
157 | * | ||
158 | * Unrollable variant of find_next_bit() for constant size arrays. | ||
159 | * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero. | ||
160 | * Returns next bit offset, or size if nothing found. | ||
161 | */ | ||
162 | static __always_inline unsigned long | ||
163 | radix_tree_find_next_bit(const unsigned long *addr, | ||
164 | unsigned long size, unsigned long offset) | ||
165 | { | ||
166 | if (!__builtin_constant_p(size)) | ||
167 | return find_next_bit(addr, size, offset); | ||
168 | |||
169 | if (offset < size) { | ||
170 | unsigned long tmp; | ||
171 | |||
172 | addr += offset / BITS_PER_LONG; | ||
173 | tmp = *addr >> (offset % BITS_PER_LONG); | ||
174 | if (tmp) | ||
175 | return __ffs(tmp) + offset; | ||
176 | offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1); | ||
177 | while (offset < size) { | ||
178 | tmp = *++addr; | ||
179 | if (tmp) | ||
180 | return __ffs(tmp) + offset; | ||
181 | offset += BITS_PER_LONG; | ||
182 | } | ||
183 | } | ||
184 | return size; | ||
185 | } | ||
186 | |||
149 | /* | 187 | /* |
150 | * This assumes that the caller has performed appropriate preallocation, and | 188 | * This assumes that the caller has performed appropriate preallocation, and |
151 | * that the caller has pinned this thread of control to the current CPU. | 189 | * that the caller has pinned this thread of control to the current CPU. |
@@ -613,6 +651,119 @@ int radix_tree_tag_get(struct radix_tree_root *root, | |||
613 | EXPORT_SYMBOL(radix_tree_tag_get); | 651 | EXPORT_SYMBOL(radix_tree_tag_get); |
614 | 652 | ||
615 | /** | 653 | /** |
654 | * radix_tree_next_chunk - find next chunk of slots for iteration | ||
655 | * | ||
656 | * @root: radix tree root | ||
657 | * @iter: iterator state | ||
658 | * @flags: RADIX_TREE_ITER_* flags and tag index | ||
659 | * Returns: pointer to chunk first slot, or NULL if iteration is over | ||
660 | */ | ||
661 | void **radix_tree_next_chunk(struct radix_tree_root *root, | ||
662 | struct radix_tree_iter *iter, unsigned flags) | ||
663 | { | ||
664 | unsigned shift, tag = flags & RADIX_TREE_ITER_TAG_MASK; | ||
665 | struct radix_tree_node *rnode, *node; | ||
666 | unsigned long index, offset; | ||
667 | |||
668 | if ((flags & RADIX_TREE_ITER_TAGGED) && !root_tag_get(root, tag)) | ||
669 | return NULL; | ||
670 | |||
671 | /* | ||
672 | * Catch next_index overflow after ~0UL. iter->index never overflows | ||
673 | * during iterating; it can be zero only at the beginning. | ||
674 | * And we cannot overflow iter->next_index in a single step, | ||
675 | * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG. | ||
676 | */ | ||
677 | index = iter->next_index; | ||
678 | if (!index && iter->index) | ||
679 | return NULL; | ||
680 | |||
681 | rnode = rcu_dereference_raw(root->rnode); | ||
682 | if (radix_tree_is_indirect_ptr(rnode)) { | ||
683 | rnode = indirect_to_ptr(rnode); | ||
684 | } else if (rnode && !index) { | ||
685 | /* Single-slot tree */ | ||
686 | iter->index = 0; | ||
687 | iter->next_index = 1; | ||
688 | iter->tags = 1; | ||
689 | return (void **)&root->rnode; | ||
690 | } else | ||
691 | return NULL; | ||
692 | |||
693 | restart: | ||
694 | shift = (rnode->height - 1) * RADIX_TREE_MAP_SHIFT; | ||
695 | offset = index >> shift; | ||
696 | |||
697 | /* Index outside of the tree */ | ||
698 | if (offset >= RADIX_TREE_MAP_SIZE) | ||
699 | return NULL; | ||
700 | |||
701 | node = rnode; | ||
702 | while (1) { | ||
703 | if ((flags & RADIX_TREE_ITER_TAGGED) ? | ||
704 | !test_bit(offset, node->tags[tag]) : | ||
705 | !node->slots[offset]) { | ||
706 | /* Hole detected */ | ||
707 | if (flags & RADIX_TREE_ITER_CONTIG) | ||
708 | return NULL; | ||
709 | |||
710 | if (flags & RADIX_TREE_ITER_TAGGED) | ||
711 | offset = radix_tree_find_next_bit( | ||
712 | node->tags[tag], | ||
713 | RADIX_TREE_MAP_SIZE, | ||
714 | offset + 1); | ||
715 | else | ||
716 | while (++offset < RADIX_TREE_MAP_SIZE) { | ||
717 | if (node->slots[offset]) | ||
718 | break; | ||
719 | } | ||
720 | index &= ~((RADIX_TREE_MAP_SIZE << shift) - 1); | ||
721 | index += offset << shift; | ||
722 | /* Overflow after ~0UL */ | ||
723 | if (!index) | ||
724 | return NULL; | ||
725 | if (offset == RADIX_TREE_MAP_SIZE) | ||
726 | goto restart; | ||
727 | } | ||
728 | |||
729 | /* This is leaf-node */ | ||
730 | if (!shift) | ||
731 | break; | ||
732 | |||
733 | node = rcu_dereference_raw(node->slots[offset]); | ||
734 | if (node == NULL) | ||
735 | goto restart; | ||
736 | shift -= RADIX_TREE_MAP_SHIFT; | ||
737 | offset = (index >> shift) & RADIX_TREE_MAP_MASK; | ||
738 | } | ||
739 | |||
740 | /* Update the iterator state */ | ||
741 | iter->index = index; | ||
742 | iter->next_index = (index | RADIX_TREE_MAP_MASK) + 1; | ||
743 | |||
744 | /* Construct iter->tags bit-mask from node->tags[tag] array */ | ||
745 | if (flags & RADIX_TREE_ITER_TAGGED) { | ||
746 | unsigned tag_long, tag_bit; | ||
747 | |||
748 | tag_long = offset / BITS_PER_LONG; | ||
749 | tag_bit = offset % BITS_PER_LONG; | ||
750 | iter->tags = node->tags[tag][tag_long] >> tag_bit; | ||
751 | /* This never happens if RADIX_TREE_TAG_LONGS == 1 */ | ||
752 | if (tag_long < RADIX_TREE_TAG_LONGS - 1) { | ||
753 | /* Pick tags from next element */ | ||
754 | if (tag_bit) | ||
755 | iter->tags |= node->tags[tag][tag_long + 1] << | ||
756 | (BITS_PER_LONG - tag_bit); | ||
757 | /* Clip chunk size, here only BITS_PER_LONG tags */ | ||
758 | iter->next_index = index + BITS_PER_LONG; | ||
759 | } | ||
760 | } | ||
761 | |||
762 | return node->slots + offset; | ||
763 | } | ||
764 | EXPORT_SYMBOL(radix_tree_next_chunk); | ||
765 | |||
766 | /** | ||
616 | * radix_tree_range_tag_if_tagged - for each item in given range set given | 767 | * radix_tree_range_tag_if_tagged - for each item in given range set given |
617 | * tag if item has another tag set | 768 | * tag if item has another tag set |
618 | * @root: radix tree root | 769 | * @root: radix tree root |
@@ -817,57 +968,6 @@ unsigned long radix_tree_prev_hole(struct radix_tree_root *root, | |||
817 | } | 968 | } |
818 | EXPORT_SYMBOL(radix_tree_prev_hole); | 969 | EXPORT_SYMBOL(radix_tree_prev_hole); |
819 | 970 | ||
820 | static unsigned int | ||
821 | __lookup(struct radix_tree_node *slot, void ***results, unsigned long *indices, | ||
822 | unsigned long index, unsigned int max_items, unsigned long *next_index) | ||
823 | { | ||
824 | unsigned int nr_found = 0; | ||
825 | unsigned int shift, height; | ||
826 | unsigned long i; | ||
827 | |||
828 | height = slot->height; | ||
829 | if (height == 0) | ||
830 | goto out; | ||
831 | shift = (height-1) * RADIX_TREE_MAP_SHIFT; | ||
832 | |||
833 | for ( ; height > 1; height--) { | ||
834 | i = (index >> shift) & RADIX_TREE_MAP_MASK; | ||
835 | for (;;) { | ||
836 | if (slot->slots[i] != NULL) | ||
837 | break; | ||
838 | index &= ~((1UL << shift) - 1); | ||
839 | index += 1UL << shift; | ||
840 | if (index == 0) | ||
841 | goto out; /* 32-bit wraparound */ | ||
842 | i++; | ||
843 | if (i == RADIX_TREE_MAP_SIZE) | ||
844 | goto out; | ||
845 | } | ||
846 | |||
847 | shift -= RADIX_TREE_MAP_SHIFT; | ||
848 | slot = rcu_dereference_raw(slot->slots[i]); | ||
849 | if (slot == NULL) | ||
850 | goto out; | ||
851 | } | ||
852 | |||
853 | /* Bottom level: grab some items */ | ||
854 | for (i = index & RADIX_TREE_MAP_MASK; i < RADIX_TREE_MAP_SIZE; i++) { | ||
855 | if (slot->slots[i]) { | ||
856 | results[nr_found] = &(slot->slots[i]); | ||
857 | if (indices) | ||
858 | indices[nr_found] = index; | ||
859 | if (++nr_found == max_items) { | ||
860 | index++; | ||
861 | goto out; | ||
862 | } | ||
863 | } | ||
864 | index++; | ||
865 | } | ||
866 | out: | ||
867 | *next_index = index; | ||
868 | return nr_found; | ||
869 | } | ||
870 | |||
871 | /** | 971 | /** |
872 | * radix_tree_gang_lookup - perform multiple lookup on a radix tree | 972 | * radix_tree_gang_lookup - perform multiple lookup on a radix tree |
873 | * @root: radix tree root | 973 | * @root: radix tree root |
@@ -891,48 +991,19 @@ unsigned int | |||
891 | radix_tree_gang_lookup(struct radix_tree_root *root, void **results, | 991 | radix_tree_gang_lookup(struct radix_tree_root *root, void **results, |
892 | unsigned long first_index, unsigned int max_items) | 992 | unsigned long first_index, unsigned int max_items) |
893 | { | 993 | { |
894 | unsigned long max_index; | 994 | struct radix_tree_iter iter; |
895 | struct radix_tree_node *node; | 995 | void **slot; |
896 | unsigned long cur_index = first_index; | 996 | unsigned int ret = 0; |
897 | unsigned int ret; | ||
898 | 997 | ||
899 | node = rcu_dereference_raw(root->rnode); | 998 | if (unlikely(!max_items)) |
900 | if (!node) | ||
901 | return 0; | 999 | return 0; |
902 | 1000 | ||
903 | if (!radix_tree_is_indirect_ptr(node)) { | 1001 | radix_tree_for_each_slot(slot, root, &iter, first_index) { |
904 | if (first_index > 0) | 1002 | results[ret] = indirect_to_ptr(rcu_dereference_raw(*slot)); |
905 | return 0; | 1003 | if (!results[ret]) |
906 | results[0] = node; | 1004 | continue; |
907 | return 1; | 1005 | if (++ret == max_items) |
908 | } | ||
909 | node = indirect_to_ptr(node); | ||
910 | |||
911 | max_index = radix_tree_maxindex(node->height); | ||
912 | |||
913 | ret = 0; | ||
914 | while (ret < max_items) { | ||
915 | unsigned int nr_found, slots_found, i; | ||
916 | unsigned long next_index; /* Index of next search */ | ||
917 | |||
918 | if (cur_index > max_index) | ||
919 | break; | ||
920 | slots_found = __lookup(node, (void ***)results + ret, NULL, | ||
921 | cur_index, max_items - ret, &next_index); | ||
922 | nr_found = 0; | ||
923 | for (i = 0; i < slots_found; i++) { | ||
924 | struct radix_tree_node *slot; | ||
925 | slot = *(((void ***)results)[ret + i]); | ||
926 | if (!slot) | ||
927 | continue; | ||
928 | results[ret + nr_found] = | ||
929 | indirect_to_ptr(rcu_dereference_raw(slot)); | ||
930 | nr_found++; | ||
931 | } | ||
932 | ret += nr_found; | ||
933 | if (next_index == 0) | ||
934 | break; | 1006 | break; |
935 | cur_index = next_index; | ||
936 | } | 1007 | } |
937 | 1008 | ||
938 | return ret; | 1009 | return ret; |
@@ -962,112 +1033,25 @@ radix_tree_gang_lookup_slot(struct radix_tree_root *root, | |||
962 | void ***results, unsigned long *indices, | 1033 | void ***results, unsigned long *indices, |
963 | unsigned long first_index, unsigned int max_items) | 1034 | unsigned long first_index, unsigned int max_items) |
964 | { | 1035 | { |
965 | unsigned long max_index; | 1036 | struct radix_tree_iter iter; |
966 | struct radix_tree_node *node; | 1037 | void **slot; |
967 | unsigned long cur_index = first_index; | 1038 | unsigned int ret = 0; |
968 | unsigned int ret; | ||
969 | 1039 | ||
970 | node = rcu_dereference_raw(root->rnode); | 1040 | if (unlikely(!max_items)) |
971 | if (!node) | ||
972 | return 0; | 1041 | return 0; |
973 | 1042 | ||
974 | if (!radix_tree_is_indirect_ptr(node)) { | 1043 | radix_tree_for_each_slot(slot, root, &iter, first_index) { |
975 | if (first_index > 0) | 1044 | results[ret] = slot; |
976 | return 0; | ||
977 | results[0] = (void **)&root->rnode; | ||
978 | if (indices) | 1045 | if (indices) |
979 | indices[0] = 0; | 1046 | indices[ret] = iter.index; |
980 | return 1; | 1047 | if (++ret == max_items) |
981 | } | ||
982 | node = indirect_to_ptr(node); | ||
983 | |||
984 | max_index = radix_tree_maxindex(node->height); | ||
985 | |||
986 | ret = 0; | ||
987 | while (ret < max_items) { | ||
988 | unsigned int slots_found; | ||
989 | unsigned long next_index; /* Index of next search */ | ||
990 | |||
991 | if (cur_index > max_index) | ||
992 | break; | 1048 | break; |
993 | slots_found = __lookup(node, results + ret, | ||
994 | indices ? indices + ret : NULL, | ||
995 | cur_index, max_items - ret, &next_index); | ||
996 | ret += slots_found; | ||
997 | if (next_index == 0) | ||
998 | break; | ||
999 | cur_index = next_index; | ||
1000 | } | 1049 | } |
1001 | 1050 | ||
1002 | return ret; | 1051 | return ret; |
1003 | } | 1052 | } |
1004 | EXPORT_SYMBOL(radix_tree_gang_lookup_slot); | 1053 | EXPORT_SYMBOL(radix_tree_gang_lookup_slot); |
1005 | 1054 | ||
1006 | /* | ||
1007 | * FIXME: the two tag_get()s here should use find_next_bit() instead of | ||
1008 | * open-coding the search. | ||
1009 | */ | ||
1010 | static unsigned int | ||
1011 | __lookup_tag(struct radix_tree_node *slot, void ***results, unsigned long index, | ||
1012 | unsigned int max_items, unsigned long *next_index, unsigned int tag) | ||
1013 | { | ||
1014 | unsigned int nr_found = 0; | ||
1015 | unsigned int shift, height; | ||
1016 | |||
1017 | height = slot->height; | ||
1018 | if (height == 0) | ||
1019 | goto out; | ||
1020 | shift = (height-1) * RADIX_TREE_MAP_SHIFT; | ||
1021 | |||
1022 | while (height > 0) { | ||
1023 | unsigned long i = (index >> shift) & RADIX_TREE_MAP_MASK ; | ||
1024 | |||
1025 | for (;;) { | ||
1026 | if (tag_get(slot, tag, i)) | ||
1027 | break; | ||
1028 | index &= ~((1UL << shift) - 1); | ||
1029 | index += 1UL << shift; | ||
1030 | if (index == 0) | ||
1031 | goto out; /* 32-bit wraparound */ | ||
1032 | i++; | ||
1033 | if (i == RADIX_TREE_MAP_SIZE) | ||
1034 | goto out; | ||
1035 | } | ||
1036 | height--; | ||
1037 | if (height == 0) { /* Bottom level: grab some items */ | ||
1038 | unsigned long j = index & RADIX_TREE_MAP_MASK; | ||
1039 | |||
1040 | for ( ; j < RADIX_TREE_MAP_SIZE; j++) { | ||
1041 | index++; | ||
1042 | if (!tag_get(slot, tag, j)) | ||
1043 | continue; | ||
1044 | /* | ||
1045 | * Even though the tag was found set, we need to | ||
1046 | * recheck that we have a non-NULL node, because | ||
1047 | * if this lookup is lockless, it may have been | ||
1048 | * subsequently deleted. | ||
1049 | * | ||
1050 | * Similar care must be taken in any place that | ||
1051 | * lookup ->slots[x] without a lock (ie. can't | ||
1052 | * rely on its value remaining the same). | ||
1053 | */ | ||
1054 | if (slot->slots[j]) { | ||
1055 | results[nr_found++] = &(slot->slots[j]); | ||
1056 | if (nr_found == max_items) | ||
1057 | goto out; | ||
1058 | } | ||
1059 | } | ||
1060 | } | ||
1061 | shift -= RADIX_TREE_MAP_SHIFT; | ||
1062 | slot = rcu_dereference_raw(slot->slots[i]); | ||
1063 | if (slot == NULL) | ||
1064 | break; | ||
1065 | } | ||
1066 | out: | ||
1067 | *next_index = index; | ||
1068 | return nr_found; | ||
1069 | } | ||
1070 | |||
1071 | /** | 1055 | /** |
1072 | * radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree | 1056 | * radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree |
1073 | * based on a tag | 1057 | * based on a tag |
@@ -1086,52 +1070,19 @@ radix_tree_gang_lookup_tag(struct radix_tree_root *root, void **results, | |||
1086 | unsigned long first_index, unsigned int max_items, | 1070 | unsigned long first_index, unsigned int max_items, |
1087 | unsigned int tag) | 1071 | unsigned int tag) |
1088 | { | 1072 | { |
1089 | struct radix_tree_node *node; | 1073 | struct radix_tree_iter iter; |
1090 | unsigned long max_index; | 1074 | void **slot; |
1091 | unsigned long cur_index = first_index; | 1075 | unsigned int ret = 0; |
1092 | unsigned int ret; | ||
1093 | |||
1094 | /* check the root's tag bit */ | ||
1095 | if (!root_tag_get(root, tag)) | ||
1096 | return 0; | ||
1097 | 1076 | ||
1098 | node = rcu_dereference_raw(root->rnode); | 1077 | if (unlikely(!max_items)) |
1099 | if (!node) | ||
1100 | return 0; | 1078 | return 0; |
1101 | 1079 | ||
1102 | if (!radix_tree_is_indirect_ptr(node)) { | 1080 | radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) { |
1103 | if (first_index > 0) | 1081 | results[ret] = indirect_to_ptr(rcu_dereference_raw(*slot)); |
1104 | return 0; | 1082 | if (!results[ret]) |
1105 | results[0] = node; | 1083 | continue; |
1106 | return 1; | 1084 | if (++ret == max_items) |
1107 | } | ||
1108 | node = indirect_to_ptr(node); | ||
1109 | |||
1110 | max_index = radix_tree_maxindex(node->height); | ||
1111 | |||
1112 | ret = 0; | ||
1113 | while (ret < max_items) { | ||
1114 | unsigned int nr_found, slots_found, i; | ||
1115 | unsigned long next_index; /* Index of next search */ | ||
1116 | |||
1117 | if (cur_index > max_index) | ||
1118 | break; | ||
1119 | slots_found = __lookup_tag(node, (void ***)results + ret, | ||
1120 | cur_index, max_items - ret, &next_index, tag); | ||
1121 | nr_found = 0; | ||
1122 | for (i = 0; i < slots_found; i++) { | ||
1123 | struct radix_tree_node *slot; | ||
1124 | slot = *(((void ***)results)[ret + i]); | ||
1125 | if (!slot) | ||
1126 | continue; | ||
1127 | results[ret + nr_found] = | ||
1128 | indirect_to_ptr(rcu_dereference_raw(slot)); | ||
1129 | nr_found++; | ||
1130 | } | ||
1131 | ret += nr_found; | ||
1132 | if (next_index == 0) | ||
1133 | break; | 1085 | break; |
1134 | cur_index = next_index; | ||
1135 | } | 1086 | } |
1136 | 1087 | ||
1137 | return ret; | 1088 | return ret; |
@@ -1156,42 +1107,17 @@ radix_tree_gang_lookup_tag_slot(struct radix_tree_root *root, void ***results, | |||
1156 | unsigned long first_index, unsigned int max_items, | 1107 | unsigned long first_index, unsigned int max_items, |
1157 | unsigned int tag) | 1108 | unsigned int tag) |
1158 | { | 1109 | { |
1159 | struct radix_tree_node *node; | 1110 | struct radix_tree_iter iter; |
1160 | unsigned long max_index; | 1111 | void **slot; |
1161 | unsigned long cur_index = first_index; | 1112 | unsigned int ret = 0; |
1162 | unsigned int ret; | ||
1163 | 1113 | ||
1164 | /* check the root's tag bit */ | 1114 | if (unlikely(!max_items)) |
1165 | if (!root_tag_get(root, tag)) | ||
1166 | return 0; | ||
1167 | |||
1168 | node = rcu_dereference_raw(root->rnode); | ||
1169 | if (!node) | ||
1170 | return 0; | 1115 | return 0; |
1171 | 1116 | ||
1172 | if (!radix_tree_is_indirect_ptr(node)) { | 1117 | radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) { |
1173 | if (first_index > 0) | 1118 | results[ret] = slot; |
1174 | return 0; | 1119 | if (++ret == max_items) |
1175 | results[0] = (void **)&root->rnode; | ||
1176 | return 1; | ||
1177 | } | ||
1178 | node = indirect_to_ptr(node); | ||
1179 | |||
1180 | max_index = radix_tree_maxindex(node->height); | ||
1181 | |||
1182 | ret = 0; | ||
1183 | while (ret < max_items) { | ||
1184 | unsigned int slots_found; | ||
1185 | unsigned long next_index; /* Index of next search */ | ||
1186 | |||
1187 | if (cur_index > max_index) | ||
1188 | break; | ||
1189 | slots_found = __lookup_tag(node, results + ret, | ||
1190 | cur_index, max_items - ret, &next_index, tag); | ||
1191 | ret += slots_found; | ||
1192 | if (next_index == 0) | ||
1193 | break; | 1120 | break; |
1194 | cur_index = next_index; | ||
1195 | } | 1121 | } |
1196 | 1122 | ||
1197 | return ret; | 1123 | return ret; |