diff options
-rw-r--r-- | fs/btrfs/tests/btrfs-tests.c | 2 | ||||
-rw-r--r-- | include/linux/radix-tree.h | 71 | ||||
-rw-r--r-- | lib/radix-tree.c | 138 | ||||
-rw-r--r-- | mm/khugepaged.c | 7 | ||||
-rw-r--r-- | mm/shmem.c | 6 | ||||
-rw-r--r-- | tools/testing/radix-tree/iteration_check.c | 12 | ||||
-rw-r--r-- | tools/testing/radix-tree/multiorder.c | 28 | ||||
-rw-r--r-- | tools/testing/radix-tree/regression3.c | 8 | ||||
-rw-r--r-- | tools/testing/radix-tree/test.h | 1 |
9 files changed, 188 insertions, 85 deletions
diff --git a/fs/btrfs/tests/btrfs-tests.c b/fs/btrfs/tests/btrfs-tests.c index 73076a0ea6a9..00ee006a8aa2 100644 --- a/fs/btrfs/tests/btrfs-tests.c +++ b/fs/btrfs/tests/btrfs-tests.c | |||
@@ -162,7 +162,7 @@ void btrfs_free_dummy_fs_info(struct btrfs_fs_info *fs_info) | |||
162 | slot = radix_tree_iter_retry(&iter); | 162 | slot = radix_tree_iter_retry(&iter); |
163 | continue; | 163 | continue; |
164 | } | 164 | } |
165 | slot = radix_tree_iter_next(&iter); | 165 | slot = radix_tree_iter_resume(slot, &iter); |
166 | spin_unlock(&fs_info->buffer_lock); | 166 | spin_unlock(&fs_info->buffer_lock); |
167 | free_extent_buffer_stale(eb); | 167 | free_extent_buffer_stale(eb); |
168 | spin_lock(&fs_info->buffer_lock); | 168 | spin_lock(&fs_info->buffer_lock); |
diff --git a/include/linux/radix-tree.h b/include/linux/radix-tree.h index d04073ac3a9f..289d007d487b 100644 --- a/include/linux/radix-tree.h +++ b/include/linux/radix-tree.h | |||
@@ -403,20 +403,17 @@ __radix_tree_iter_add(struct radix_tree_iter *iter, unsigned long slots) | |||
403 | } | 403 | } |
404 | 404 | ||
405 | /** | 405 | /** |
406 | * radix_tree_iter_next - resume iterating when the chunk may be invalid | 406 | * radix_tree_iter_resume - resume iterating when the chunk may be invalid |
407 | * @iter: iterator state | 407 | * @slot: pointer to current slot |
408 | * @iter: iterator state | ||
409 | * Returns: New slot pointer | ||
408 | * | 410 | * |
409 | * If the iterator needs to release then reacquire a lock, the chunk may | 411 | * If the iterator needs to release then reacquire a lock, the chunk may |
410 | * have been invalidated by an insertion or deletion. Call this function | 412 | * have been invalidated by an insertion or deletion. Call this function |
411 | * to continue the iteration from the next index. | 413 | * before releasing the lock to continue the iteration from the next index. |
412 | */ | 414 | */ |
413 | static inline __must_check | 415 | void **__must_check radix_tree_iter_resume(void **slot, |
414 | void **radix_tree_iter_next(struct radix_tree_iter *iter) | 416 | struct radix_tree_iter *iter); |
415 | { | ||
416 | iter->next_index = __radix_tree_iter_add(iter, 1); | ||
417 | iter->tags = 0; | ||
418 | return NULL; | ||
419 | } | ||
420 | 417 | ||
421 | /** | 418 | /** |
422 | * radix_tree_chunk_size - get current chunk size | 419 | * radix_tree_chunk_size - get current chunk size |
@@ -430,10 +427,17 @@ radix_tree_chunk_size(struct radix_tree_iter *iter) | |||
430 | return (iter->next_index - iter->index) >> iter_shift(iter); | 427 | return (iter->next_index - iter->index) >> iter_shift(iter); |
431 | } | 428 | } |
432 | 429 | ||
433 | static inline struct radix_tree_node *entry_to_node(void *ptr) | 430 | #ifdef CONFIG_RADIX_TREE_MULTIORDER |
431 | void ** __radix_tree_next_slot(void **slot, struct radix_tree_iter *iter, | ||
432 | unsigned flags); | ||
433 | #else | ||
434 | /* Can't happen without sibling entries, but the compiler can't tell that */ | ||
435 | static inline void ** __radix_tree_next_slot(void **slot, | ||
436 | struct radix_tree_iter *iter, unsigned flags) | ||
434 | { | 437 | { |
435 | return (void *)((unsigned long)ptr & ~RADIX_TREE_INTERNAL_NODE); | 438 | return slot; |
436 | } | 439 | } |
440 | #endif | ||
437 | 441 | ||
438 | /** | 442 | /** |
439 | * radix_tree_next_slot - find next slot in chunk | 443 | * radix_tree_next_slot - find next slot in chunk |
@@ -447,7 +451,7 @@ static inline struct radix_tree_node *entry_to_node(void *ptr) | |||
447 | * For tagged lookup it also eats @iter->tags. | 451 | * For tagged lookup it also eats @iter->tags. |
448 | * | 452 | * |
449 | * There are several cases where 'slot' can be passed in as NULL to this | 453 | * There are several cases where 'slot' can be passed in as NULL to this |
450 | * function. These cases result from the use of radix_tree_iter_next() or | 454 | * function. These cases result from the use of radix_tree_iter_resume() or |
451 | * radix_tree_iter_retry(). In these cases we don't end up dereferencing | 455 | * radix_tree_iter_retry(). In these cases we don't end up dereferencing |
452 | * 'slot' because either: | 456 | * 'slot' because either: |
453 | * a) we are doing tagged iteration and iter->tags has been set to 0, or | 457 | * a) we are doing tagged iteration and iter->tags has been set to 0, or |
@@ -458,51 +462,31 @@ static __always_inline void ** | |||
458 | radix_tree_next_slot(void **slot, struct radix_tree_iter *iter, unsigned flags) | 462 | radix_tree_next_slot(void **slot, struct radix_tree_iter *iter, unsigned flags) |
459 | { | 463 | { |
460 | if (flags & RADIX_TREE_ITER_TAGGED) { | 464 | if (flags & RADIX_TREE_ITER_TAGGED) { |
461 | void *canon = slot; | ||
462 | |||
463 | iter->tags >>= 1; | 465 | iter->tags >>= 1; |
464 | if (unlikely(!iter->tags)) | 466 | if (unlikely(!iter->tags)) |
465 | return NULL; | 467 | return NULL; |
466 | while (IS_ENABLED(CONFIG_RADIX_TREE_MULTIORDER) && | ||
467 | radix_tree_is_internal_node(slot[1])) { | ||
468 | if (entry_to_node(slot[1]) == canon) { | ||
469 | iter->tags >>= 1; | ||
470 | iter->index = __radix_tree_iter_add(iter, 1); | ||
471 | slot++; | ||
472 | continue; | ||
473 | } | ||
474 | iter->next_index = __radix_tree_iter_add(iter, 1); | ||
475 | return NULL; | ||
476 | } | ||
477 | if (likely(iter->tags & 1ul)) { | 468 | if (likely(iter->tags & 1ul)) { |
478 | iter->index = __radix_tree_iter_add(iter, 1); | 469 | iter->index = __radix_tree_iter_add(iter, 1); |
479 | return slot + 1; | 470 | slot++; |
471 | goto found; | ||
480 | } | 472 | } |
481 | if (!(flags & RADIX_TREE_ITER_CONTIG)) { | 473 | if (!(flags & RADIX_TREE_ITER_CONTIG)) { |
482 | unsigned offset = __ffs(iter->tags); | 474 | unsigned offset = __ffs(iter->tags); |
483 | 475 | ||
484 | iter->tags >>= offset; | 476 | iter->tags >>= offset++; |
485 | iter->index = __radix_tree_iter_add(iter, offset + 1); | 477 | iter->index = __radix_tree_iter_add(iter, offset); |
486 | return slot + offset + 1; | 478 | slot += offset; |
479 | goto found; | ||
487 | } | 480 | } |
488 | } else { | 481 | } else { |
489 | long count = radix_tree_chunk_size(iter); | 482 | long count = radix_tree_chunk_size(iter); |
490 | void *canon = slot; | ||
491 | 483 | ||
492 | while (--count > 0) { | 484 | while (--count > 0) { |
493 | slot++; | 485 | slot++; |
494 | iter->index = __radix_tree_iter_add(iter, 1); | 486 | iter->index = __radix_tree_iter_add(iter, 1); |
495 | 487 | ||
496 | if (IS_ENABLED(CONFIG_RADIX_TREE_MULTIORDER) && | ||
497 | radix_tree_is_internal_node(*slot)) { | ||
498 | if (entry_to_node(*slot) == canon) | ||
499 | continue; | ||
500 | iter->next_index = iter->index; | ||
501 | break; | ||
502 | } | ||
503 | |||
504 | if (likely(*slot)) | 488 | if (likely(*slot)) |
505 | return slot; | 489 | goto found; |
506 | if (flags & RADIX_TREE_ITER_CONTIG) { | 490 | if (flags & RADIX_TREE_ITER_CONTIG) { |
507 | /* forbid switching to the next chunk */ | 491 | /* forbid switching to the next chunk */ |
508 | iter->next_index = 0; | 492 | iter->next_index = 0; |
@@ -511,6 +495,11 @@ radix_tree_next_slot(void **slot, struct radix_tree_iter *iter, unsigned flags) | |||
511 | } | 495 | } |
512 | } | 496 | } |
513 | return NULL; | 497 | return NULL; |
498 | |||
499 | found: | ||
500 | if (unlikely(radix_tree_is_internal_node(*slot))) | ||
501 | return __radix_tree_next_slot(slot, iter, flags); | ||
502 | return slot; | ||
514 | } | 503 | } |
515 | 504 | ||
516 | /** | 505 | /** |
@@ -561,6 +550,6 @@ radix_tree_next_slot(void **slot, struct radix_tree_iter *iter, unsigned flags) | |||
561 | slot || (slot = radix_tree_next_chunk(root, iter, \ | 550 | slot || (slot = radix_tree_next_chunk(root, iter, \ |
562 | RADIX_TREE_ITER_TAGGED | tag)) ; \ | 551 | RADIX_TREE_ITER_TAGGED | tag)) ; \ |
563 | slot = radix_tree_next_slot(slot, iter, \ | 552 | slot = radix_tree_next_slot(slot, iter, \ |
564 | RADIX_TREE_ITER_TAGGED)) | 553 | RADIX_TREE_ITER_TAGGED | tag)) |
565 | 554 | ||
566 | #endif /* _LINUX_RADIX_TREE_H */ | 555 | #endif /* _LINUX_RADIX_TREE_H */ |
diff --git a/lib/radix-tree.c b/lib/radix-tree.c index dbf4e8c8e100..0a7ac0fb4a65 100644 --- a/lib/radix-tree.c +++ b/lib/radix-tree.c | |||
@@ -69,6 +69,11 @@ struct radix_tree_preload { | |||
69 | }; | 69 | }; |
70 | static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, }; | 70 | static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, }; |
71 | 71 | ||
72 | static inline struct radix_tree_node *entry_to_node(void *ptr) | ||
73 | { | ||
74 | return (void *)((unsigned long)ptr & ~RADIX_TREE_INTERNAL_NODE); | ||
75 | } | ||
76 | |||
72 | static inline void *node_to_entry(void *ptr) | 77 | static inline void *node_to_entry(void *ptr) |
73 | { | 78 | { |
74 | return (void *)((unsigned long)ptr | RADIX_TREE_INTERNAL_NODE); | 79 | return (void *)((unsigned long)ptr | RADIX_TREE_INTERNAL_NODE); |
@@ -1104,6 +1109,120 @@ static inline void __set_iter_shift(struct radix_tree_iter *iter, | |||
1104 | #endif | 1109 | #endif |
1105 | } | 1110 | } |
1106 | 1111 | ||
1112 | /* Construct iter->tags bit-mask from node->tags[tag] array */ | ||
1113 | static void set_iter_tags(struct radix_tree_iter *iter, | ||
1114 | struct radix_tree_node *node, unsigned offset, | ||
1115 | unsigned tag) | ||
1116 | { | ||
1117 | unsigned tag_long = offset / BITS_PER_LONG; | ||
1118 | unsigned tag_bit = offset % BITS_PER_LONG; | ||
1119 | |||
1120 | iter->tags = node->tags[tag][tag_long] >> tag_bit; | ||
1121 | |||
1122 | /* This never happens if RADIX_TREE_TAG_LONGS == 1 */ | ||
1123 | if (tag_long < RADIX_TREE_TAG_LONGS - 1) { | ||
1124 | /* Pick tags from next element */ | ||
1125 | if (tag_bit) | ||
1126 | iter->tags |= node->tags[tag][tag_long + 1] << | ||
1127 | (BITS_PER_LONG - tag_bit); | ||
1128 | /* Clip chunk size, here only BITS_PER_LONG tags */ | ||
1129 | iter->next_index = __radix_tree_iter_add(iter, BITS_PER_LONG); | ||
1130 | } | ||
1131 | } | ||
1132 | |||
1133 | #ifdef CONFIG_RADIX_TREE_MULTIORDER | ||
1134 | static void **skip_siblings(struct radix_tree_node **nodep, | ||
1135 | void **slot, struct radix_tree_iter *iter) | ||
1136 | { | ||
1137 | void *sib = node_to_entry(slot - 1); | ||
1138 | |||
1139 | while (iter->index < iter->next_index) { | ||
1140 | *nodep = rcu_dereference_raw(*slot); | ||
1141 | if (*nodep && *nodep != sib) | ||
1142 | return slot; | ||
1143 | slot++; | ||
1144 | iter->index = __radix_tree_iter_add(iter, 1); | ||
1145 | iter->tags >>= 1; | ||
1146 | } | ||
1147 | |||
1148 | *nodep = NULL; | ||
1149 | return NULL; | ||
1150 | } | ||
1151 | |||
1152 | void ** __radix_tree_next_slot(void **slot, struct radix_tree_iter *iter, | ||
1153 | unsigned flags) | ||
1154 | { | ||
1155 | unsigned tag = flags & RADIX_TREE_ITER_TAG_MASK; | ||
1156 | struct radix_tree_node *node = rcu_dereference_raw(*slot); | ||
1157 | |||
1158 | slot = skip_siblings(&node, slot, iter); | ||
1159 | |||
1160 | while (radix_tree_is_internal_node(node)) { | ||
1161 | unsigned offset; | ||
1162 | unsigned long next_index; | ||
1163 | |||
1164 | if (node == RADIX_TREE_RETRY) | ||
1165 | return slot; | ||
1166 | node = entry_to_node(node); | ||
1167 | iter->shift = node->shift; | ||
1168 | |||
1169 | if (flags & RADIX_TREE_ITER_TAGGED) { | ||
1170 | offset = radix_tree_find_next_bit(node, tag, 0); | ||
1171 | if (offset == RADIX_TREE_MAP_SIZE) | ||
1172 | return NULL; | ||
1173 | slot = &node->slots[offset]; | ||
1174 | iter->index = __radix_tree_iter_add(iter, offset); | ||
1175 | set_iter_tags(iter, node, offset, tag); | ||
1176 | node = rcu_dereference_raw(*slot); | ||
1177 | } else { | ||
1178 | offset = 0; | ||
1179 | slot = &node->slots[0]; | ||
1180 | for (;;) { | ||
1181 | node = rcu_dereference_raw(*slot); | ||
1182 | if (node) | ||
1183 | break; | ||
1184 | slot++; | ||
1185 | offset++; | ||
1186 | if (offset == RADIX_TREE_MAP_SIZE) | ||
1187 | return NULL; | ||
1188 | } | ||
1189 | iter->index = __radix_tree_iter_add(iter, offset); | ||
1190 | } | ||
1191 | if ((flags & RADIX_TREE_ITER_CONTIG) && (offset > 0)) | ||
1192 | goto none; | ||
1193 | next_index = (iter->index | shift_maxindex(iter->shift)) + 1; | ||
1194 | if (next_index < iter->next_index) | ||
1195 | iter->next_index = next_index; | ||
1196 | } | ||
1197 | |||
1198 | return slot; | ||
1199 | none: | ||
1200 | iter->next_index = 0; | ||
1201 | return NULL; | ||
1202 | } | ||
1203 | EXPORT_SYMBOL(__radix_tree_next_slot); | ||
1204 | #else | ||
1205 | static void **skip_siblings(struct radix_tree_node **nodep, | ||
1206 | void **slot, struct radix_tree_iter *iter) | ||
1207 | { | ||
1208 | return slot; | ||
1209 | } | ||
1210 | #endif | ||
1211 | |||
1212 | void **radix_tree_iter_resume(void **slot, struct radix_tree_iter *iter) | ||
1213 | { | ||
1214 | struct radix_tree_node *node; | ||
1215 | |||
1216 | slot++; | ||
1217 | iter->index = __radix_tree_iter_add(iter, 1); | ||
1218 | node = rcu_dereference_raw(*slot); | ||
1219 | skip_siblings(&node, slot, iter); | ||
1220 | iter->next_index = iter->index; | ||
1221 | iter->tags = 0; | ||
1222 | return NULL; | ||
1223 | } | ||
1224 | EXPORT_SYMBOL(radix_tree_iter_resume); | ||
1225 | |||
1107 | /** | 1226 | /** |
1108 | * radix_tree_next_chunk - find next chunk of slots for iteration | 1227 | * radix_tree_next_chunk - find next chunk of slots for iteration |
1109 | * | 1228 | * |
@@ -1191,23 +1310,8 @@ void **radix_tree_next_chunk(struct radix_tree_root *root, | |||
1191 | iter->next_index = (index | node_maxindex(node)) + 1; | 1310 | iter->next_index = (index | node_maxindex(node)) + 1; |
1192 | __set_iter_shift(iter, node->shift); | 1311 | __set_iter_shift(iter, node->shift); |
1193 | 1312 | ||
1194 | /* Construct iter->tags bit-mask from node->tags[tag] array */ | 1313 | if (flags & RADIX_TREE_ITER_TAGGED) |
1195 | if (flags & RADIX_TREE_ITER_TAGGED) { | 1314 | set_iter_tags(iter, node, offset, tag); |
1196 | unsigned tag_long, tag_bit; | ||
1197 | |||
1198 | tag_long = offset / BITS_PER_LONG; | ||
1199 | tag_bit = offset % BITS_PER_LONG; | ||
1200 | iter->tags = node->tags[tag][tag_long] >> tag_bit; | ||
1201 | /* This never happens if RADIX_TREE_TAG_LONGS == 1 */ | ||
1202 | if (tag_long < RADIX_TREE_TAG_LONGS - 1) { | ||
1203 | /* Pick tags from next element */ | ||
1204 | if (tag_bit) | ||
1205 | iter->tags |= node->tags[tag][tag_long + 1] << | ||
1206 | (BITS_PER_LONG - tag_bit); | ||
1207 | /* Clip chunk size, here only BITS_PER_LONG tags */ | ||
1208 | iter->next_index = index + BITS_PER_LONG; | ||
1209 | } | ||
1210 | } | ||
1211 | 1315 | ||
1212 | return node->slots + offset; | 1316 | return node->slots + offset; |
1213 | } | 1317 | } |
diff --git a/mm/khugepaged.c b/mm/khugepaged.c index 7434a63cac94..e32389a97030 100644 --- a/mm/khugepaged.c +++ b/mm/khugepaged.c | |||
@@ -1446,7 +1446,7 @@ static void collapse_shmem(struct mm_struct *mm, | |||
1446 | radix_tree_replace_slot(&mapping->page_tree, slot, | 1446 | radix_tree_replace_slot(&mapping->page_tree, slot, |
1447 | new_page + (index % HPAGE_PMD_NR)); | 1447 | new_page + (index % HPAGE_PMD_NR)); |
1448 | 1448 | ||
1449 | slot = radix_tree_iter_next(&iter); | 1449 | slot = radix_tree_iter_resume(slot, &iter); |
1450 | index++; | 1450 | index++; |
1451 | continue; | 1451 | continue; |
1452 | out_lru: | 1452 | out_lru: |
@@ -1546,7 +1546,6 @@ tree_unlocked: | |||
1546 | /* Put holes back where they were */ | 1546 | /* Put holes back where they were */ |
1547 | radix_tree_delete(&mapping->page_tree, | 1547 | radix_tree_delete(&mapping->page_tree, |
1548 | iter.index); | 1548 | iter.index); |
1549 | slot = radix_tree_iter_next(&iter); | ||
1550 | continue; | 1549 | continue; |
1551 | } | 1550 | } |
1552 | 1551 | ||
@@ -1557,11 +1556,11 @@ tree_unlocked: | |||
1557 | page_ref_unfreeze(page, 2); | 1556 | page_ref_unfreeze(page, 2); |
1558 | radix_tree_replace_slot(&mapping->page_tree, | 1557 | radix_tree_replace_slot(&mapping->page_tree, |
1559 | slot, page); | 1558 | slot, page); |
1559 | slot = radix_tree_iter_resume(slot, &iter); | ||
1560 | spin_unlock_irq(&mapping->tree_lock); | 1560 | spin_unlock_irq(&mapping->tree_lock); |
1561 | putback_lru_page(page); | 1561 | putback_lru_page(page); |
1562 | unlock_page(page); | 1562 | unlock_page(page); |
1563 | spin_lock_irq(&mapping->tree_lock); | 1563 | spin_lock_irq(&mapping->tree_lock); |
1564 | slot = radix_tree_iter_next(&iter); | ||
1565 | } | 1564 | } |
1566 | VM_BUG_ON(nr_none); | 1565 | VM_BUG_ON(nr_none); |
1567 | spin_unlock_irq(&mapping->tree_lock); | 1566 | spin_unlock_irq(&mapping->tree_lock); |
@@ -1641,8 +1640,8 @@ static void khugepaged_scan_shmem(struct mm_struct *mm, | |||
1641 | present++; | 1640 | present++; |
1642 | 1641 | ||
1643 | if (need_resched()) { | 1642 | if (need_resched()) { |
1643 | slot = radix_tree_iter_resume(slot, &iter); | ||
1644 | cond_resched_rcu(); | 1644 | cond_resched_rcu(); |
1645 | slot = radix_tree_iter_next(&iter); | ||
1646 | } | 1645 | } |
1647 | } | 1646 | } |
1648 | rcu_read_unlock(); | 1647 | rcu_read_unlock(); |
diff --git a/mm/shmem.c b/mm/shmem.c index abd7403aba41..be11c6dd414a 100644 --- a/mm/shmem.c +++ b/mm/shmem.c | |||
@@ -661,8 +661,8 @@ unsigned long shmem_partial_swap_usage(struct address_space *mapping, | |||
661 | swapped++; | 661 | swapped++; |
662 | 662 | ||
663 | if (need_resched()) { | 663 | if (need_resched()) { |
664 | slot = radix_tree_iter_resume(slot, &iter); | ||
664 | cond_resched_rcu(); | 665 | cond_resched_rcu(); |
665 | slot = radix_tree_iter_next(&iter); | ||
666 | } | 666 | } |
667 | } | 667 | } |
668 | 668 | ||
@@ -2447,8 +2447,8 @@ static void shmem_tag_pins(struct address_space *mapping) | |||
2447 | } | 2447 | } |
2448 | 2448 | ||
2449 | if (need_resched()) { | 2449 | if (need_resched()) { |
2450 | slot = radix_tree_iter_resume(slot, &iter); | ||
2450 | cond_resched_rcu(); | 2451 | cond_resched_rcu(); |
2451 | slot = radix_tree_iter_next(&iter); | ||
2452 | } | 2452 | } |
2453 | } | 2453 | } |
2454 | rcu_read_unlock(); | 2454 | rcu_read_unlock(); |
@@ -2517,8 +2517,8 @@ static int shmem_wait_for_pins(struct address_space *mapping) | |||
2517 | spin_unlock_irq(&mapping->tree_lock); | 2517 | spin_unlock_irq(&mapping->tree_lock); |
2518 | continue_resched: | 2518 | continue_resched: |
2519 | if (need_resched()) { | 2519 | if (need_resched()) { |
2520 | slot = radix_tree_iter_resume(slot, &iter); | ||
2520 | cond_resched_rcu(); | 2521 | cond_resched_rcu(); |
2521 | slot = radix_tree_iter_next(&iter); | ||
2522 | } | 2522 | } |
2523 | } | 2523 | } |
2524 | rcu_read_unlock(); | 2524 | rcu_read_unlock(); |
diff --git a/tools/testing/radix-tree/iteration_check.c b/tools/testing/radix-tree/iteration_check.c index df71cb841385..f328a66899b4 100644 --- a/tools/testing/radix-tree/iteration_check.c +++ b/tools/testing/radix-tree/iteration_check.c | |||
@@ -48,8 +48,8 @@ static void *add_entries_fn(void *arg) | |||
48 | /* | 48 | /* |
49 | * Iterate over the tagged entries, doing a radix_tree_iter_retry() as we find | 49 | * Iterate over the tagged entries, doing a radix_tree_iter_retry() as we find |
50 | * things that have been removed and randomly resetting our iteration to the | 50 | * things that have been removed and randomly resetting our iteration to the |
51 | * next chunk with radix_tree_iter_next(). Both radix_tree_iter_retry() and | 51 | * next chunk with radix_tree_iter_resume(). Both radix_tree_iter_retry() and |
52 | * radix_tree_iter_next() cause radix_tree_next_slot() to be called with a | 52 | * radix_tree_iter_resume() cause radix_tree_next_slot() to be called with a |
53 | * NULL 'slot' variable. | 53 | * NULL 'slot' variable. |
54 | */ | 54 | */ |
55 | static void *tagged_iteration_fn(void *arg) | 55 | static void *tagged_iteration_fn(void *arg) |
@@ -79,7 +79,7 @@ static void *tagged_iteration_fn(void *arg) | |||
79 | } | 79 | } |
80 | 80 | ||
81 | if (rand_r(&seeds[0]) % 50 == 0) { | 81 | if (rand_r(&seeds[0]) % 50 == 0) { |
82 | slot = radix_tree_iter_next(&iter); | 82 | slot = radix_tree_iter_resume(slot, &iter); |
83 | rcu_read_unlock(); | 83 | rcu_read_unlock(); |
84 | rcu_barrier(); | 84 | rcu_barrier(); |
85 | rcu_read_lock(); | 85 | rcu_read_lock(); |
@@ -96,8 +96,8 @@ static void *tagged_iteration_fn(void *arg) | |||
96 | /* | 96 | /* |
97 | * Iterate over the entries, doing a radix_tree_iter_retry() as we find things | 97 | * Iterate over the entries, doing a radix_tree_iter_retry() as we find things |
98 | * that have been removed and randomly resetting our iteration to the next | 98 | * that have been removed and randomly resetting our iteration to the next |
99 | * chunk with radix_tree_iter_next(). Both radix_tree_iter_retry() and | 99 | * chunk with radix_tree_iter_resume(). Both radix_tree_iter_retry() and |
100 | * radix_tree_iter_next() cause radix_tree_next_slot() to be called with a | 100 | * radix_tree_iter_resume() cause radix_tree_next_slot() to be called with a |
101 | * NULL 'slot' variable. | 101 | * NULL 'slot' variable. |
102 | */ | 102 | */ |
103 | static void *untagged_iteration_fn(void *arg) | 103 | static void *untagged_iteration_fn(void *arg) |
@@ -127,7 +127,7 @@ static void *untagged_iteration_fn(void *arg) | |||
127 | } | 127 | } |
128 | 128 | ||
129 | if (rand_r(&seeds[1]) % 50 == 0) { | 129 | if (rand_r(&seeds[1]) % 50 == 0) { |
130 | slot = radix_tree_iter_next(&iter); | 130 | slot = radix_tree_iter_resume(slot, &iter); |
131 | rcu_read_unlock(); | 131 | rcu_read_unlock(); |
132 | rcu_barrier(); | 132 | rcu_barrier(); |
133 | rcu_read_lock(); | 133 | rcu_read_lock(); |
diff --git a/tools/testing/radix-tree/multiorder.c b/tools/testing/radix-tree/multiorder.c index 8d5865c95664..b9be8856d652 100644 --- a/tools/testing/radix-tree/multiorder.c +++ b/tools/testing/radix-tree/multiorder.c | |||
@@ -231,11 +231,14 @@ void multiorder_iteration(void) | |||
231 | radix_tree_for_each_slot(slot, &tree, &iter, j) { | 231 | radix_tree_for_each_slot(slot, &tree, &iter, j) { |
232 | int height = order[i] / RADIX_TREE_MAP_SHIFT; | 232 | int height = order[i] / RADIX_TREE_MAP_SHIFT; |
233 | int shift = height * RADIX_TREE_MAP_SHIFT; | 233 | int shift = height * RADIX_TREE_MAP_SHIFT; |
234 | int mask = (1 << order[i]) - 1; | 234 | unsigned long mask = (1UL << order[i]) - 1; |
235 | struct item *item = *slot; | ||
235 | 236 | ||
236 | assert(iter.index >= (index[i] &~ mask)); | 237 | assert((iter.index | mask) == (index[i] | mask)); |
237 | assert(iter.index <= (index[i] | mask)); | ||
238 | assert(iter.shift == shift); | 238 | assert(iter.shift == shift); |
239 | assert(!radix_tree_is_internal_node(item)); | ||
240 | assert((item->index | mask) == (index[i] | mask)); | ||
241 | assert(item->order == order[i]); | ||
239 | i++; | 242 | i++; |
240 | } | 243 | } |
241 | } | 244 | } |
@@ -269,7 +272,7 @@ void multiorder_tagged_iteration(void) | |||
269 | assert(radix_tree_tag_set(&tree, tag_index[i], 1)); | 272 | assert(radix_tree_tag_set(&tree, tag_index[i], 1)); |
270 | 273 | ||
271 | for (j = 0; j < 256; j++) { | 274 | for (j = 0; j < 256; j++) { |
272 | int mask, k; | 275 | int k; |
273 | 276 | ||
274 | for (i = 0; i < TAG_ENTRIES; i++) { | 277 | for (i = 0; i < TAG_ENTRIES; i++) { |
275 | for (k = i; index[k] < tag_index[i]; k++) | 278 | for (k = i; index[k] < tag_index[i]; k++) |
@@ -279,12 +282,16 @@ void multiorder_tagged_iteration(void) | |||
279 | } | 282 | } |
280 | 283 | ||
281 | radix_tree_for_each_tagged(slot, &tree, &iter, j, 1) { | 284 | radix_tree_for_each_tagged(slot, &tree, &iter, j, 1) { |
285 | unsigned long mask; | ||
286 | struct item *item = *slot; | ||
282 | for (k = i; index[k] < tag_index[i]; k++) | 287 | for (k = i; index[k] < tag_index[i]; k++) |
283 | ; | 288 | ; |
284 | mask = (1 << order[k]) - 1; | 289 | mask = (1UL << order[k]) - 1; |
285 | 290 | ||
286 | assert(iter.index >= (tag_index[i] &~ mask)); | 291 | assert((iter.index | mask) == (tag_index[i] | mask)); |
287 | assert(iter.index <= (tag_index[i] | mask)); | 292 | assert(!radix_tree_is_internal_node(item)); |
293 | assert((item->index | mask) == (tag_index[i] | mask)); | ||
294 | assert(item->order == order[k]); | ||
288 | i++; | 295 | i++; |
289 | } | 296 | } |
290 | } | 297 | } |
@@ -303,12 +310,15 @@ void multiorder_tagged_iteration(void) | |||
303 | } | 310 | } |
304 | 311 | ||
305 | radix_tree_for_each_tagged(slot, &tree, &iter, j, 2) { | 312 | radix_tree_for_each_tagged(slot, &tree, &iter, j, 2) { |
313 | struct item *item = *slot; | ||
306 | for (k = i; index[k] < tag_index[i]; k++) | 314 | for (k = i; index[k] < tag_index[i]; k++) |
307 | ; | 315 | ; |
308 | mask = (1 << order[k]) - 1; | 316 | mask = (1 << order[k]) - 1; |
309 | 317 | ||
310 | assert(iter.index >= (tag_index[i] &~ mask)); | 318 | assert((iter.index | mask) == (tag_index[i] | mask)); |
311 | assert(iter.index <= (tag_index[i] | mask)); | 319 | assert(!radix_tree_is_internal_node(item)); |
320 | assert((item->index | mask) == (tag_index[i] | mask)); | ||
321 | assert(item->order == order[k]); | ||
312 | i++; | 322 | i++; |
313 | } | 323 | } |
314 | } | 324 | } |
diff --git a/tools/testing/radix-tree/regression3.c b/tools/testing/radix-tree/regression3.c index 1f06ed73d0a8..b594841fae85 100644 --- a/tools/testing/radix-tree/regression3.c +++ b/tools/testing/radix-tree/regression3.c | |||
@@ -5,7 +5,7 @@ | |||
5 | * In following radix_tree_next_slot current chunk size becomes zero. | 5 | * In following radix_tree_next_slot current chunk size becomes zero. |
6 | * This isn't checked and it tries to dereference null pointer in slot. | 6 | * This isn't checked and it tries to dereference null pointer in slot. |
7 | * | 7 | * |
8 | * Helper radix_tree_iter_next reset slot to NULL and next_index to index + 1, | 8 | * Helper radix_tree_iter_resume reset slot to NULL and next_index to index + 1, |
9 | * for tagger iteraction it also must reset cached tags in iterator to abort | 9 | * for tagger iteraction it also must reset cached tags in iterator to abort |
10 | * next radix_tree_next_slot and go to slow-path into radix_tree_next_chunk. | 10 | * next radix_tree_next_slot and go to slow-path into radix_tree_next_chunk. |
11 | * | 11 | * |
@@ -88,7 +88,7 @@ void regression3_test(void) | |||
88 | printf("slot %ld %p\n", iter.index, *slot); | 88 | printf("slot %ld %p\n", iter.index, *slot); |
89 | if (!iter.index) { | 89 | if (!iter.index) { |
90 | printf("next at %ld\n", iter.index); | 90 | printf("next at %ld\n", iter.index); |
91 | slot = radix_tree_iter_next(&iter); | 91 | slot = radix_tree_iter_resume(slot, &iter); |
92 | } | 92 | } |
93 | } | 93 | } |
94 | 94 | ||
@@ -96,7 +96,7 @@ void regression3_test(void) | |||
96 | printf("contig %ld %p\n", iter.index, *slot); | 96 | printf("contig %ld %p\n", iter.index, *slot); |
97 | if (!iter.index) { | 97 | if (!iter.index) { |
98 | printf("next at %ld\n", iter.index); | 98 | printf("next at %ld\n", iter.index); |
99 | slot = radix_tree_iter_next(&iter); | 99 | slot = radix_tree_iter_resume(slot, &iter); |
100 | } | 100 | } |
101 | } | 101 | } |
102 | 102 | ||
@@ -106,7 +106,7 @@ void regression3_test(void) | |||
106 | printf("tagged %ld %p\n", iter.index, *slot); | 106 | printf("tagged %ld %p\n", iter.index, *slot); |
107 | if (!iter.index) { | 107 | if (!iter.index) { |
108 | printf("next at %ld\n", iter.index); | 108 | printf("next at %ld\n", iter.index); |
109 | slot = radix_tree_iter_next(&iter); | 109 | slot = radix_tree_iter_resume(slot, &iter); |
110 | } | 110 | } |
111 | } | 111 | } |
112 | 112 | ||
diff --git a/tools/testing/radix-tree/test.h b/tools/testing/radix-tree/test.h index 423c528aaee9..617416ec3c5e 100644 --- a/tools/testing/radix-tree/test.h +++ b/tools/testing/radix-tree/test.h | |||
@@ -41,6 +41,7 @@ void verify_tag_consistency(struct radix_tree_root *root, unsigned int tag); | |||
41 | extern int nr_allocated; | 41 | extern int nr_allocated; |
42 | 42 | ||
43 | /* Normally private parts of lib/radix-tree.c */ | 43 | /* Normally private parts of lib/radix-tree.c */ |
44 | struct radix_tree_node *entry_to_node(void *ptr); | ||
44 | void radix_tree_dump(struct radix_tree_root *root); | 45 | void radix_tree_dump(struct radix_tree_root *root); |
45 | int root_tag_get(struct radix_tree_root *root, unsigned int tag); | 46 | int root_tag_get(struct radix_tree_root *root, unsigned int tag); |
46 | unsigned long node_maxindex(struct radix_tree_node *); | 47 | unsigned long node_maxindex(struct radix_tree_node *); |