diff options
author | Matthew Wilcox <willy@linux.intel.com> | 2016-05-20 20:03:30 -0400 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2016-05-20 20:58:30 -0400 |
commit | b194d16c27af905d6e3552f4851bc7d9fee4e90f (patch) | |
tree | fe4619d43e3f97f8d687aa6f806d44a062991d55 /lib | |
parent | 4dd6c0987ca43d6544f4f0a3f86f6ea3bfc60fc1 (diff) |
radix-tree: rename radix_tree_is_indirect_ptr()
As with indirect_to_ptr(), ptr_to_indirect() and
RADIX_TREE_INDIRECT_PTR, change radix_tree_is_indirect_ptr() to
radix_tree_is_internal_node().
Signed-off-by: Matthew Wilcox <willy@linux.intel.com>
Cc: Konstantin Khlebnikov <koct9i@gmail.com>
Cc: Kirill Shutemov <kirill.shutemov@linux.intel.com>
Cc: Jan Kara <jack@suse.com>
Cc: Neil Brown <neilb@suse.de>
Cc: Ross Zwisler <ross.zwisler@linux.intel.com>
Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'lib')
-rw-r--r-- | lib/radix-tree.c | 48 |
1 files changed, 24 insertions, 24 deletions
diff --git a/lib/radix-tree.c b/lib/radix-tree.c index 3c3fdd9c5bb3..b65c83036ca4 100644 --- a/lib/radix-tree.c +++ b/lib/radix-tree.c | |||
@@ -100,7 +100,7 @@ static unsigned radix_tree_descend(struct radix_tree_node *parent, | |||
100 | void **entry = rcu_dereference_raw(parent->slots[offset]); | 100 | void **entry = rcu_dereference_raw(parent->slots[offset]); |
101 | 101 | ||
102 | #ifdef CONFIG_RADIX_TREE_MULTIORDER | 102 | #ifdef CONFIG_RADIX_TREE_MULTIORDER |
103 | if (radix_tree_is_indirect_ptr(entry)) { | 103 | if (radix_tree_is_internal_node(entry)) { |
104 | unsigned long siboff = get_slot_offset(parent, entry); | 104 | unsigned long siboff = get_slot_offset(parent, entry); |
105 | if (siboff < RADIX_TREE_MAP_SIZE) { | 105 | if (siboff < RADIX_TREE_MAP_SIZE) { |
106 | offset = siboff; | 106 | offset = siboff; |
@@ -232,7 +232,7 @@ static void dump_node(struct radix_tree_node *node, unsigned long index) | |||
232 | entry, i, | 232 | entry, i, |
233 | *(void **)entry_to_node(entry), | 233 | *(void **)entry_to_node(entry), |
234 | first, last); | 234 | first, last); |
235 | } else if (!radix_tree_is_indirect_ptr(entry)) { | 235 | } else if (!radix_tree_is_internal_node(entry)) { |
236 | pr_debug("radix entry %p offset %ld indices %ld-%ld\n", | 236 | pr_debug("radix entry %p offset %ld indices %ld-%ld\n", |
237 | entry, i, first, last); | 237 | entry, i, first, last); |
238 | } else { | 238 | } else { |
@@ -247,7 +247,7 @@ static void radix_tree_dump(struct radix_tree_root *root) | |||
247 | pr_debug("radix root: %p rnode %p tags %x\n", | 247 | pr_debug("radix root: %p rnode %p tags %x\n", |
248 | root, root->rnode, | 248 | root, root->rnode, |
249 | root->gfp_mask >> __GFP_BITS_SHIFT); | 249 | root->gfp_mask >> __GFP_BITS_SHIFT); |
250 | if (!radix_tree_is_indirect_ptr(root->rnode)) | 250 | if (!radix_tree_is_internal_node(root->rnode)) |
251 | return; | 251 | return; |
252 | dump_node(entry_to_node(root->rnode), 0); | 252 | dump_node(entry_to_node(root->rnode), 0); |
253 | } | 253 | } |
@@ -302,7 +302,7 @@ radix_tree_node_alloc(struct radix_tree_root *root) | |||
302 | ret = kmem_cache_alloc(radix_tree_node_cachep, | 302 | ret = kmem_cache_alloc(radix_tree_node_cachep, |
303 | gfp_mask | __GFP_ACCOUNT); | 303 | gfp_mask | __GFP_ACCOUNT); |
304 | out: | 304 | out: |
305 | BUG_ON(radix_tree_is_indirect_ptr(ret)); | 305 | BUG_ON(radix_tree_is_internal_node(ret)); |
306 | return ret; | 306 | return ret; |
307 | } | 307 | } |
308 | 308 | ||
@@ -421,7 +421,7 @@ static unsigned radix_tree_load_root(struct radix_tree_root *root, | |||
421 | 421 | ||
422 | *nodep = node; | 422 | *nodep = node; |
423 | 423 | ||
424 | if (likely(radix_tree_is_indirect_ptr(node))) { | 424 | if (likely(radix_tree_is_internal_node(node))) { |
425 | node = entry_to_node(node); | 425 | node = entry_to_node(node); |
426 | *maxindex = node_maxindex(node); | 426 | *maxindex = node_maxindex(node); |
427 | return node->shift + RADIX_TREE_MAP_SHIFT; | 427 | return node->shift + RADIX_TREE_MAP_SHIFT; |
@@ -467,7 +467,7 @@ static int radix_tree_extend(struct radix_tree_root *root, | |||
467 | node->offset = 0; | 467 | node->offset = 0; |
468 | node->count = 1; | 468 | node->count = 1; |
469 | node->parent = NULL; | 469 | node->parent = NULL; |
470 | if (radix_tree_is_indirect_ptr(slot)) | 470 | if (radix_tree_is_internal_node(slot)) |
471 | entry_to_node(slot)->parent = node; | 471 | entry_to_node(slot)->parent = node; |
472 | node->slots[0] = slot; | 472 | node->slots[0] = slot; |
473 | slot = node_to_entry(node); | 473 | slot = node_to_entry(node); |
@@ -535,7 +535,7 @@ int __radix_tree_create(struct radix_tree_root *root, unsigned long index, | |||
535 | } else | 535 | } else |
536 | rcu_assign_pointer(root->rnode, | 536 | rcu_assign_pointer(root->rnode, |
537 | node_to_entry(slot)); | 537 | node_to_entry(slot)); |
538 | } else if (!radix_tree_is_indirect_ptr(slot)) | 538 | } else if (!radix_tree_is_internal_node(slot)) |
539 | break; | 539 | break; |
540 | 540 | ||
541 | /* Go a level down */ | 541 | /* Go a level down */ |
@@ -585,7 +585,7 @@ int __radix_tree_insert(struct radix_tree_root *root, unsigned long index, | |||
585 | void **slot; | 585 | void **slot; |
586 | int error; | 586 | int error; |
587 | 587 | ||
588 | BUG_ON(radix_tree_is_indirect_ptr(item)); | 588 | BUG_ON(radix_tree_is_internal_node(item)); |
589 | 589 | ||
590 | error = __radix_tree_create(root, index, order, &node, &slot); | 590 | error = __radix_tree_create(root, index, order, &node, &slot); |
591 | if (error) | 591 | if (error) |
@@ -637,7 +637,7 @@ void *__radix_tree_lookup(struct radix_tree_root *root, unsigned long index, | |||
637 | if (index > maxindex) | 637 | if (index > maxindex) |
638 | return NULL; | 638 | return NULL; |
639 | 639 | ||
640 | while (radix_tree_is_indirect_ptr(node)) { | 640 | while (radix_tree_is_internal_node(node)) { |
641 | unsigned offset; | 641 | unsigned offset; |
642 | 642 | ||
643 | if (node == RADIX_TREE_RETRY) | 643 | if (node == RADIX_TREE_RETRY) |
@@ -720,7 +720,7 @@ void *radix_tree_tag_set(struct radix_tree_root *root, | |||
720 | shift = radix_tree_load_root(root, &node, &maxindex); | 720 | shift = radix_tree_load_root(root, &node, &maxindex); |
721 | BUG_ON(index > maxindex); | 721 | BUG_ON(index > maxindex); |
722 | 722 | ||
723 | while (radix_tree_is_indirect_ptr(node)) { | 723 | while (radix_tree_is_internal_node(node)) { |
724 | unsigned offset; | 724 | unsigned offset; |
725 | 725 | ||
726 | shift -= RADIX_TREE_MAP_SHIFT; | 726 | shift -= RADIX_TREE_MAP_SHIFT; |
@@ -770,7 +770,7 @@ void *radix_tree_tag_clear(struct radix_tree_root *root, | |||
770 | 770 | ||
771 | parent = NULL; | 771 | parent = NULL; |
772 | 772 | ||
773 | while (radix_tree_is_indirect_ptr(node)) { | 773 | while (radix_tree_is_internal_node(node)) { |
774 | shift -= RADIX_TREE_MAP_SHIFT; | 774 | shift -= RADIX_TREE_MAP_SHIFT; |
775 | offset = (index >> shift) & RADIX_TREE_MAP_MASK; | 775 | offset = (index >> shift) & RADIX_TREE_MAP_MASK; |
776 | 776 | ||
@@ -835,7 +835,7 @@ int radix_tree_tag_get(struct radix_tree_root *root, | |||
835 | if (node == NULL) | 835 | if (node == NULL) |
836 | return 0; | 836 | return 0; |
837 | 837 | ||
838 | while (radix_tree_is_indirect_ptr(node)) { | 838 | while (radix_tree_is_internal_node(node)) { |
839 | int offset; | 839 | int offset; |
840 | 840 | ||
841 | shift -= RADIX_TREE_MAP_SHIFT; | 841 | shift -= RADIX_TREE_MAP_SHIFT; |
@@ -900,7 +900,7 @@ void **radix_tree_next_chunk(struct radix_tree_root *root, | |||
900 | if (index > maxindex) | 900 | if (index > maxindex) |
901 | return NULL; | 901 | return NULL; |
902 | 902 | ||
903 | if (radix_tree_is_indirect_ptr(rnode)) { | 903 | if (radix_tree_is_internal_node(rnode)) { |
904 | rnode = entry_to_node(rnode); | 904 | rnode = entry_to_node(rnode); |
905 | } else if (rnode) { | 905 | } else if (rnode) { |
906 | /* Single-slot tree */ | 906 | /* Single-slot tree */ |
@@ -957,7 +957,7 @@ void **radix_tree_next_chunk(struct radix_tree_root *root, | |||
957 | 957 | ||
958 | if ((slot == NULL) || (slot == RADIX_TREE_RETRY)) | 958 | if ((slot == NULL) || (slot == RADIX_TREE_RETRY)) |
959 | goto restart; | 959 | goto restart; |
960 | if (!radix_tree_is_indirect_ptr(slot)) | 960 | if (!radix_tree_is_internal_node(slot)) |
961 | break; | 961 | break; |
962 | 962 | ||
963 | node = entry_to_node(slot); | 963 | node = entry_to_node(slot); |
@@ -1039,7 +1039,7 @@ unsigned long radix_tree_range_tag_if_tagged(struct radix_tree_root *root, | |||
1039 | *first_indexp = last_index + 1; | 1039 | *first_indexp = last_index + 1; |
1040 | return 0; | 1040 | return 0; |
1041 | } | 1041 | } |
1042 | if (!radix_tree_is_indirect_ptr(slot)) { | 1042 | if (!radix_tree_is_internal_node(slot)) { |
1043 | *first_indexp = last_index + 1; | 1043 | *first_indexp = last_index + 1; |
1044 | root_tag_set(root, settag); | 1044 | root_tag_set(root, settag); |
1045 | return 1; | 1045 | return 1; |
@@ -1059,7 +1059,7 @@ unsigned long radix_tree_range_tag_if_tagged(struct radix_tree_root *root, | |||
1059 | if (!tag_get(node, iftag, offset)) | 1059 | if (!tag_get(node, iftag, offset)) |
1060 | goto next; | 1060 | goto next; |
1061 | /* Sibling slots never have tags set on them */ | 1061 | /* Sibling slots never have tags set on them */ |
1062 | if (radix_tree_is_indirect_ptr(slot)) { | 1062 | if (radix_tree_is_internal_node(slot)) { |
1063 | node = entry_to_node(slot); | 1063 | node = entry_to_node(slot); |
1064 | shift -= RADIX_TREE_MAP_SHIFT; | 1064 | shift -= RADIX_TREE_MAP_SHIFT; |
1065 | continue; | 1065 | continue; |
@@ -1152,7 +1152,7 @@ radix_tree_gang_lookup(struct radix_tree_root *root, void **results, | |||
1152 | results[ret] = rcu_dereference_raw(*slot); | 1152 | results[ret] = rcu_dereference_raw(*slot); |
1153 | if (!results[ret]) | 1153 | if (!results[ret]) |
1154 | continue; | 1154 | continue; |
1155 | if (radix_tree_is_indirect_ptr(results[ret])) { | 1155 | if (radix_tree_is_internal_node(results[ret])) { |
1156 | slot = radix_tree_iter_retry(&iter); | 1156 | slot = radix_tree_iter_retry(&iter); |
1157 | continue; | 1157 | continue; |
1158 | } | 1158 | } |
@@ -1235,7 +1235,7 @@ radix_tree_gang_lookup_tag(struct radix_tree_root *root, void **results, | |||
1235 | results[ret] = rcu_dereference_raw(*slot); | 1235 | results[ret] = rcu_dereference_raw(*slot); |
1236 | if (!results[ret]) | 1236 | if (!results[ret]) |
1237 | continue; | 1237 | continue; |
1238 | if (radix_tree_is_indirect_ptr(results[ret])) { | 1238 | if (radix_tree_is_internal_node(results[ret])) { |
1239 | slot = radix_tree_iter_retry(&iter); | 1239 | slot = radix_tree_iter_retry(&iter); |
1240 | continue; | 1240 | continue; |
1241 | } | 1241 | } |
@@ -1311,7 +1311,7 @@ static unsigned long __locate(struct radix_tree_node *slot, void *item, | |||
1311 | rcu_dereference_raw(slot->slots[i]); | 1311 | rcu_dereference_raw(slot->slots[i]); |
1312 | if (node == RADIX_TREE_RETRY) | 1312 | if (node == RADIX_TREE_RETRY) |
1313 | goto out; | 1313 | goto out; |
1314 | if (!radix_tree_is_indirect_ptr(node)) { | 1314 | if (!radix_tree_is_internal_node(node)) { |
1315 | if (node == item) { | 1315 | if (node == item) { |
1316 | info->found_index = index; | 1316 | info->found_index = index; |
1317 | info->stop = true; | 1317 | info->stop = true; |
@@ -1357,7 +1357,7 @@ unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item) | |||
1357 | do { | 1357 | do { |
1358 | rcu_read_lock(); | 1358 | rcu_read_lock(); |
1359 | node = rcu_dereference_raw(root->rnode); | 1359 | node = rcu_dereference_raw(root->rnode); |
1360 | if (!radix_tree_is_indirect_ptr(node)) { | 1360 | if (!radix_tree_is_internal_node(node)) { |
1361 | rcu_read_unlock(); | 1361 | rcu_read_unlock(); |
1362 | if (node == item) | 1362 | if (node == item) |
1363 | info.found_index = 0; | 1363 | info.found_index = 0; |
@@ -1398,7 +1398,7 @@ static inline bool radix_tree_shrink(struct radix_tree_root *root) | |||
1398 | struct radix_tree_node *to_free = root->rnode; | 1398 | struct radix_tree_node *to_free = root->rnode; |
1399 | struct radix_tree_node *slot; | 1399 | struct radix_tree_node *slot; |
1400 | 1400 | ||
1401 | if (!radix_tree_is_indirect_ptr(to_free)) | 1401 | if (!radix_tree_is_internal_node(to_free)) |
1402 | break; | 1402 | break; |
1403 | to_free = entry_to_node(to_free); | 1403 | to_free = entry_to_node(to_free); |
1404 | 1404 | ||
@@ -1412,10 +1412,10 @@ static inline bool radix_tree_shrink(struct radix_tree_root *root) | |||
1412 | slot = to_free->slots[0]; | 1412 | slot = to_free->slots[0]; |
1413 | if (!slot) | 1413 | if (!slot) |
1414 | break; | 1414 | break; |
1415 | if (!radix_tree_is_indirect_ptr(slot) && to_free->shift) | 1415 | if (!radix_tree_is_internal_node(slot) && to_free->shift) |
1416 | break; | 1416 | break; |
1417 | 1417 | ||
1418 | if (radix_tree_is_indirect_ptr(slot)) | 1418 | if (radix_tree_is_internal_node(slot)) |
1419 | entry_to_node(slot)->parent = NULL; | 1419 | entry_to_node(slot)->parent = NULL; |
1420 | 1420 | ||
1421 | /* | 1421 | /* |
@@ -1445,7 +1445,7 @@ static inline bool radix_tree_shrink(struct radix_tree_root *root) | |||
1445 | * also results in a stale slot). So tag the slot as indirect | 1445 | * also results in a stale slot). So tag the slot as indirect |
1446 | * to force callers to retry. | 1446 | * to force callers to retry. |
1447 | */ | 1447 | */ |
1448 | if (!radix_tree_is_indirect_ptr(slot)) | 1448 | if (!radix_tree_is_internal_node(slot)) |
1449 | to_free->slots[0] = RADIX_TREE_RETRY; | 1449 | to_free->slots[0] = RADIX_TREE_RETRY; |
1450 | 1450 | ||
1451 | radix_tree_node_free(to_free); | 1451 | radix_tree_node_free(to_free); |