diff options
-rw-r--r-- | include/linux/radix-tree.h | 196 | ||||
-rw-r--r-- | lib/radix-tree.c | 151 |
2 files changed, 347 insertions, 0 deletions
diff --git a/include/linux/radix-tree.h b/include/linux/radix-tree.h index e9a48234e693..0d04cd69ab9b 100644 --- a/include/linux/radix-tree.h +++ b/include/linux/radix-tree.h | |||
@@ -2,6 +2,7 @@ | |||
2 | * Copyright (C) 2001 Momchil Velikov | 2 | * Copyright (C) 2001 Momchil Velikov |
3 | * Portions Copyright (C) 2001 Christoph Hellwig | 3 | * Portions Copyright (C) 2001 Christoph Hellwig |
4 | * Copyright (C) 2006 Nick Piggin | 4 | * Copyright (C) 2006 Nick Piggin |
5 | * Copyright (C) 2012 Konstantin Khlebnikov | ||
5 | * | 6 | * |
6 | * This program is free software; you can redistribute it and/or | 7 | * This program is free software; you can redistribute it and/or |
7 | * modify it under the terms of the GNU General Public License as | 8 | * modify it under the terms of the GNU General Public License as |
@@ -257,4 +258,199 @@ static inline void radix_tree_preload_end(void) | |||
257 | preempt_enable(); | 258 | preempt_enable(); |
258 | } | 259 | } |
259 | 260 | ||
261 | /** | ||
262 | * struct radix_tree_iter - radix tree iterator state | ||
263 | * | ||
264 | * @index: index of current slot | ||
265 | * @next_index: next-to-last index for this chunk | ||
266 | * @tags: bit-mask for tag-iterating | ||
267 | * | ||
268 | * This radix tree iterator works in terms of "chunks" of slots. A chunk is a | ||
269 | * subinterval of slots contained within one radix tree leaf node. It is | ||
270 | * described by a pointer to its first slot and a struct radix_tree_iter | ||
271 | * which holds the chunk's position in the tree and its size. For tagged | ||
272 | * iteration radix_tree_iter also holds the slots' bit-mask for one chosen | ||
273 | * radix tree tag. | ||
274 | */ | ||
275 | struct radix_tree_iter { | ||
276 | unsigned long index; | ||
277 | unsigned long next_index; | ||
278 | unsigned long tags; | ||
279 | }; | ||
280 | |||
281 | #define RADIX_TREE_ITER_TAG_MASK 0x00FF /* tag index in lower byte */ | ||
282 | #define RADIX_TREE_ITER_TAGGED 0x0100 /* lookup tagged slots */ | ||
283 | #define RADIX_TREE_ITER_CONTIG 0x0200 /* stop at first hole */ | ||
284 | |||
285 | /** | ||
286 | * radix_tree_iter_init - initialize radix tree iterator | ||
287 | * | ||
288 | * @iter: pointer to iterator state | ||
289 | * @start: iteration starting index | ||
290 | * Returns: NULL | ||
291 | */ | ||
292 | static __always_inline void ** | ||
293 | radix_tree_iter_init(struct radix_tree_iter *iter, unsigned long start) | ||
294 | { | ||
295 | /* | ||
296 | * Leave iter->tags uninitialized. radix_tree_next_chunk() will fill it | ||
297 | * in the case of a successful tagged chunk lookup. If the lookup was | ||
298 | * unsuccessful or non-tagged then nobody cares about ->tags. | ||
299 | * | ||
300 | * Set index to zero to bypass next_index overflow protection. | ||
301 | * See the comment in radix_tree_next_chunk() for details. | ||
302 | */ | ||
303 | iter->index = 0; | ||
304 | iter->next_index = start; | ||
305 | return NULL; | ||
306 | } | ||
307 | |||
308 | /** | ||
309 | * radix_tree_next_chunk - find next chunk of slots for iteration | ||
310 | * | ||
311 | * @root: radix tree root | ||
312 | * @iter: iterator state | ||
313 | * @flags: RADIX_TREE_ITER_* flags and tag index | ||
314 | * Returns: pointer to chunk first slot, or NULL if there no more left | ||
315 | * | ||
316 | * This function looks up the next chunk in the radix tree starting from | ||
317 | * @iter->next_index. It returns a pointer to the chunk's first slot. | ||
318 | * Also it fills @iter with data about chunk: position in the tree (index), | ||
319 | * its end (next_index), and constructs a bit mask for tagged iterating (tags). | ||
320 | */ | ||
321 | void **radix_tree_next_chunk(struct radix_tree_root *root, | ||
322 | struct radix_tree_iter *iter, unsigned flags); | ||
323 | |||
324 | /** | ||
325 | * radix_tree_chunk_size - get current chunk size | ||
326 | * | ||
327 | * @iter: pointer to radix tree iterator | ||
328 | * Returns: current chunk size | ||
329 | */ | ||
330 | static __always_inline unsigned | ||
331 | radix_tree_chunk_size(struct radix_tree_iter *iter) | ||
332 | { | ||
333 | return iter->next_index - iter->index; | ||
334 | } | ||
335 | |||
336 | /** | ||
337 | * radix_tree_next_slot - find next slot in chunk | ||
338 | * | ||
339 | * @slot: pointer to current slot | ||
340 | * @iter: pointer to interator state | ||
341 | * @flags: RADIX_TREE_ITER_*, should be constant | ||
342 | * Returns: pointer to next slot, or NULL if there no more left | ||
343 | * | ||
344 | * This function updates @iter->index in the case of a successful lookup. | ||
345 | * For tagged lookup it also eats @iter->tags. | ||
346 | */ | ||
347 | static __always_inline void ** | ||
348 | radix_tree_next_slot(void **slot, struct radix_tree_iter *iter, unsigned flags) | ||
349 | { | ||
350 | if (flags & RADIX_TREE_ITER_TAGGED) { | ||
351 | iter->tags >>= 1; | ||
352 | if (likely(iter->tags & 1ul)) { | ||
353 | iter->index++; | ||
354 | return slot + 1; | ||
355 | } | ||
356 | if (!(flags & RADIX_TREE_ITER_CONTIG) && likely(iter->tags)) { | ||
357 | unsigned offset = __ffs(iter->tags); | ||
358 | |||
359 | iter->tags >>= offset; | ||
360 | iter->index += offset + 1; | ||
361 | return slot + offset + 1; | ||
362 | } | ||
363 | } else { | ||
364 | unsigned size = radix_tree_chunk_size(iter) - 1; | ||
365 | |||
366 | while (size--) { | ||
367 | slot++; | ||
368 | iter->index++; | ||
369 | if (likely(*slot)) | ||
370 | return slot; | ||
371 | if (flags & RADIX_TREE_ITER_CONTIG) | ||
372 | break; | ||
373 | } | ||
374 | } | ||
375 | return NULL; | ||
376 | } | ||
377 | |||
378 | /** | ||
379 | * radix_tree_for_each_chunk - iterate over chunks | ||
380 | * | ||
381 | * @slot: the void** variable for pointer to chunk first slot | ||
382 | * @root: the struct radix_tree_root pointer | ||
383 | * @iter: the struct radix_tree_iter pointer | ||
384 | * @start: iteration starting index | ||
385 | * @flags: RADIX_TREE_ITER_* and tag index | ||
386 | * | ||
387 | * Locks can be released and reacquired between iterations. | ||
388 | */ | ||
389 | #define radix_tree_for_each_chunk(slot, root, iter, start, flags) \ | ||
390 | for (slot = radix_tree_iter_init(iter, start) ; \ | ||
391 | (slot = radix_tree_next_chunk(root, iter, flags)) ;) | ||
392 | |||
393 | /** | ||
394 | * radix_tree_for_each_chunk_slot - iterate over slots in one chunk | ||
395 | * | ||
396 | * @slot: the void** variable, at the beginning points to chunk first slot | ||
397 | * @iter: the struct radix_tree_iter pointer | ||
398 | * @flags: RADIX_TREE_ITER_*, should be constant | ||
399 | * | ||
400 | * This macro is designed to be nested inside radix_tree_for_each_chunk(). | ||
401 | * @slot points to the radix tree slot, @iter->index contains its index. | ||
402 | */ | ||
403 | #define radix_tree_for_each_chunk_slot(slot, iter, flags) \ | ||
404 | for (; slot ; slot = radix_tree_next_slot(slot, iter, flags)) | ||
405 | |||
406 | /** | ||
407 | * radix_tree_for_each_slot - iterate over non-empty slots | ||
408 | * | ||
409 | * @slot: the void** variable for pointer to slot | ||
410 | * @root: the struct radix_tree_root pointer | ||
411 | * @iter: the struct radix_tree_iter pointer | ||
412 | * @start: iteration starting index | ||
413 | * | ||
414 | * @slot points to radix tree slot, @iter->index contains its index. | ||
415 | */ | ||
416 | #define radix_tree_for_each_slot(slot, root, iter, start) \ | ||
417 | for (slot = radix_tree_iter_init(iter, start) ; \ | ||
418 | slot || (slot = radix_tree_next_chunk(root, iter, 0)) ; \ | ||
419 | slot = radix_tree_next_slot(slot, iter, 0)) | ||
420 | |||
421 | /** | ||
422 | * radix_tree_for_each_contig - iterate over contiguous slots | ||
423 | * | ||
424 | * @slot: the void** variable for pointer to slot | ||
425 | * @root: the struct radix_tree_root pointer | ||
426 | * @iter: the struct radix_tree_iter pointer | ||
427 | * @start: iteration starting index | ||
428 | * | ||
429 | * @slot points to radix tree slot, @iter->index contains its index. | ||
430 | */ | ||
431 | #define radix_tree_for_each_contig(slot, root, iter, start) \ | ||
432 | for (slot = radix_tree_iter_init(iter, start) ; \ | ||
433 | slot || (slot = radix_tree_next_chunk(root, iter, \ | ||
434 | RADIX_TREE_ITER_CONTIG)) ; \ | ||
435 | slot = radix_tree_next_slot(slot, iter, \ | ||
436 | RADIX_TREE_ITER_CONTIG)) | ||
437 | |||
438 | /** | ||
439 | * radix_tree_for_each_tagged - iterate over tagged slots | ||
440 | * | ||
441 | * @slot: the void** variable for pointer to slot | ||
442 | * @root: the struct radix_tree_root pointer | ||
443 | * @iter: the struct radix_tree_iter pointer | ||
444 | * @start: iteration starting index | ||
445 | * @tag: tag index | ||
446 | * | ||
447 | * @slot points to radix tree slot, @iter->index contains its index. | ||
448 | */ | ||
449 | #define radix_tree_for_each_tagged(slot, root, iter, start, tag) \ | ||
450 | for (slot = radix_tree_iter_init(iter, start) ; \ | ||
451 | slot || (slot = radix_tree_next_chunk(root, iter, \ | ||
452 | RADIX_TREE_ITER_TAGGED | tag)) ; \ | ||
453 | slot = radix_tree_next_slot(slot, iter, \ | ||
454 | RADIX_TREE_ITER_TAGGED)) | ||
455 | |||
260 | #endif /* _LINUX_RADIX_TREE_H */ | 456 | #endif /* _LINUX_RADIX_TREE_H */ |
diff --git a/lib/radix-tree.c b/lib/radix-tree.c index 3e69c2b66c94..fefa76e6ff96 100644 --- a/lib/radix-tree.c +++ b/lib/radix-tree.c | |||
@@ -3,6 +3,7 @@ | |||
3 | * Portions Copyright (C) 2001 Christoph Hellwig | 3 | * Portions Copyright (C) 2001 Christoph Hellwig |
4 | * Copyright (C) 2005 SGI, Christoph Lameter | 4 | * Copyright (C) 2005 SGI, Christoph Lameter |
5 | * Copyright (C) 2006 Nick Piggin | 5 | * Copyright (C) 2006 Nick Piggin |
6 | * Copyright (C) 2012 Konstantin Khlebnikov | ||
6 | * | 7 | * |
7 | * This program is free software; you can redistribute it and/or | 8 | * This program is free software; you can redistribute it and/or |
8 | * modify it under the terms of the GNU General Public License as | 9 | * modify it under the terms of the GNU General Public License as |
@@ -146,6 +147,43 @@ static inline int any_tag_set(struct radix_tree_node *node, unsigned int tag) | |||
146 | } | 147 | } |
147 | return 0; | 148 | return 0; |
148 | } | 149 | } |
150 | |||
151 | /** | ||
152 | * radix_tree_find_next_bit - find the next set bit in a memory region | ||
153 | * | ||
154 | * @addr: The address to base the search on | ||
155 | * @size: The bitmap size in bits | ||
156 | * @offset: The bitnumber to start searching at | ||
157 | * | ||
158 | * Unrollable variant of find_next_bit() for constant size arrays. | ||
159 | * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero. | ||
160 | * Returns next bit offset, or size if nothing found. | ||
161 | */ | ||
162 | static __always_inline unsigned long | ||
163 | radix_tree_find_next_bit(const unsigned long *addr, | ||
164 | unsigned long size, unsigned long offset) | ||
165 | { | ||
166 | if (!__builtin_constant_p(size)) | ||
167 | return find_next_bit(addr, size, offset); | ||
168 | |||
169 | if (offset < size) { | ||
170 | unsigned long tmp; | ||
171 | |||
172 | addr += offset / BITS_PER_LONG; | ||
173 | tmp = *addr >> (offset % BITS_PER_LONG); | ||
174 | if (tmp) | ||
175 | return __ffs(tmp) + offset; | ||
176 | offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1); | ||
177 | while (offset < size) { | ||
178 | tmp = *++addr; | ||
179 | if (tmp) | ||
180 | return __ffs(tmp) + offset; | ||
181 | offset += BITS_PER_LONG; | ||
182 | } | ||
183 | } | ||
184 | return size; | ||
185 | } | ||
186 | |||
149 | /* | 187 | /* |
150 | * This assumes that the caller has performed appropriate preallocation, and | 188 | * This assumes that the caller has performed appropriate preallocation, and |
151 | * that the caller has pinned this thread of control to the current CPU. | 189 | * that the caller has pinned this thread of control to the current CPU. |
@@ -613,6 +651,119 @@ int radix_tree_tag_get(struct radix_tree_root *root, | |||
613 | EXPORT_SYMBOL(radix_tree_tag_get); | 651 | EXPORT_SYMBOL(radix_tree_tag_get); |
614 | 652 | ||
615 | /** | 653 | /** |
654 | * radix_tree_next_chunk - find next chunk of slots for iteration | ||
655 | * | ||
656 | * @root: radix tree root | ||
657 | * @iter: iterator state | ||
658 | * @flags: RADIX_TREE_ITER_* flags and tag index | ||
659 | * Returns: pointer to chunk first slot, or NULL if iteration is over | ||
660 | */ | ||
661 | void **radix_tree_next_chunk(struct radix_tree_root *root, | ||
662 | struct radix_tree_iter *iter, unsigned flags) | ||
663 | { | ||
664 | unsigned shift, tag = flags & RADIX_TREE_ITER_TAG_MASK; | ||
665 | struct radix_tree_node *rnode, *node; | ||
666 | unsigned long index, offset; | ||
667 | |||
668 | if ((flags & RADIX_TREE_ITER_TAGGED) && !root_tag_get(root, tag)) | ||
669 | return NULL; | ||
670 | |||
671 | /* | ||
672 | * Catch next_index overflow after ~0UL. iter->index never overflows | ||
673 | * during iterating; it can be zero only at the beginning. | ||
674 | * And we cannot overflow iter->next_index in a single step, | ||
675 | * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG. | ||
676 | */ | ||
677 | index = iter->next_index; | ||
678 | if (!index && iter->index) | ||
679 | return NULL; | ||
680 | |||
681 | rnode = rcu_dereference_raw(root->rnode); | ||
682 | if (radix_tree_is_indirect_ptr(rnode)) { | ||
683 | rnode = indirect_to_ptr(rnode); | ||
684 | } else if (rnode && !index) { | ||
685 | /* Single-slot tree */ | ||
686 | iter->index = 0; | ||
687 | iter->next_index = 1; | ||
688 | iter->tags = 1; | ||
689 | return (void **)&root->rnode; | ||
690 | } else | ||
691 | return NULL; | ||
692 | |||
693 | restart: | ||
694 | shift = (rnode->height - 1) * RADIX_TREE_MAP_SHIFT; | ||
695 | offset = index >> shift; | ||
696 | |||
697 | /* Index outside of the tree */ | ||
698 | if (offset >= RADIX_TREE_MAP_SIZE) | ||
699 | return NULL; | ||
700 | |||
701 | node = rnode; | ||
702 | while (1) { | ||
703 | if ((flags & RADIX_TREE_ITER_TAGGED) ? | ||
704 | !test_bit(offset, node->tags[tag]) : | ||
705 | !node->slots[offset]) { | ||
706 | /* Hole detected */ | ||
707 | if (flags & RADIX_TREE_ITER_CONTIG) | ||
708 | return NULL; | ||
709 | |||
710 | if (flags & RADIX_TREE_ITER_TAGGED) | ||
711 | offset = radix_tree_find_next_bit( | ||
712 | node->tags[tag], | ||
713 | RADIX_TREE_MAP_SIZE, | ||
714 | offset + 1); | ||
715 | else | ||
716 | while (++offset < RADIX_TREE_MAP_SIZE) { | ||
717 | if (node->slots[offset]) | ||
718 | break; | ||
719 | } | ||
720 | index &= ~((RADIX_TREE_MAP_SIZE << shift) - 1); | ||
721 | index += offset << shift; | ||
722 | /* Overflow after ~0UL */ | ||
723 | if (!index) | ||
724 | return NULL; | ||
725 | if (offset == RADIX_TREE_MAP_SIZE) | ||
726 | goto restart; | ||
727 | } | ||
728 | |||
729 | /* This is leaf-node */ | ||
730 | if (!shift) | ||
731 | break; | ||
732 | |||
733 | node = rcu_dereference_raw(node->slots[offset]); | ||
734 | if (node == NULL) | ||
735 | goto restart; | ||
736 | shift -= RADIX_TREE_MAP_SHIFT; | ||
737 | offset = (index >> shift) & RADIX_TREE_MAP_MASK; | ||
738 | } | ||
739 | |||
740 | /* Update the iterator state */ | ||
741 | iter->index = index; | ||
742 | iter->next_index = (index | RADIX_TREE_MAP_MASK) + 1; | ||
743 | |||
744 | /* Construct iter->tags bit-mask from node->tags[tag] array */ | ||
745 | if (flags & RADIX_TREE_ITER_TAGGED) { | ||
746 | unsigned tag_long, tag_bit; | ||
747 | |||
748 | tag_long = offset / BITS_PER_LONG; | ||
749 | tag_bit = offset % BITS_PER_LONG; | ||
750 | iter->tags = node->tags[tag][tag_long] >> tag_bit; | ||
751 | /* This never happens if RADIX_TREE_TAG_LONGS == 1 */ | ||
752 | if (tag_long < RADIX_TREE_TAG_LONGS - 1) { | ||
753 | /* Pick tags from next element */ | ||
754 | if (tag_bit) | ||
755 | iter->tags |= node->tags[tag][tag_long + 1] << | ||
756 | (BITS_PER_LONG - tag_bit); | ||
757 | /* Clip chunk size, here only BITS_PER_LONG tags */ | ||
758 | iter->next_index = index + BITS_PER_LONG; | ||
759 | } | ||
760 | } | ||
761 | |||
762 | return node->slots + offset; | ||
763 | } | ||
764 | EXPORT_SYMBOL(radix_tree_next_chunk); | ||
765 | |||
766 | /** | ||
616 | * radix_tree_range_tag_if_tagged - for each item in given range set given | 767 | * radix_tree_range_tag_if_tagged - for each item in given range set given |
617 | * tag if item has another tag set | 768 | * tag if item has another tag set |
618 | * @root: radix tree root | 769 | * @root: radix tree root |