aboutsummaryrefslogtreecommitdiffstats
path: root/tools/testing/radix-tree/multiorder.c
diff options
context:
space:
mode:
Diffstat (limited to 'tools/testing/radix-tree/multiorder.c')
-rw-r--r--tools/testing/radix-tree/multiorder.c326
1 files changed, 308 insertions, 18 deletions
diff --git a/tools/testing/radix-tree/multiorder.c b/tools/testing/radix-tree/multiorder.c
index d1be94667a30..f79812a5e070 100644
--- a/tools/testing/radix-tree/multiorder.c
+++ b/tools/testing/radix-tree/multiorder.c
@@ -26,7 +26,6 @@ static void __multiorder_tag_test(int index, int order)
26{ 26{
27 RADIX_TREE(tree, GFP_KERNEL); 27 RADIX_TREE(tree, GFP_KERNEL);
28 int base, err, i; 28 int base, err, i;
29 unsigned long first = 0;
30 29
31 /* our canonical entry */ 30 /* our canonical entry */
32 base = index & ~((1 << order) - 1); 31 base = index & ~((1 << order) - 1);
@@ -60,7 +59,7 @@ static void __multiorder_tag_test(int index, int order)
60 assert(!radix_tree_tag_get(&tree, i, 1)); 59 assert(!radix_tree_tag_get(&tree, i, 1));
61 } 60 }
62 61
63 assert(radix_tree_range_tag_if_tagged(&tree, &first, ~0UL, 10, 0, 1) == 1); 62 assert(tag_tagged_items(&tree, NULL, 0, ~0UL, 10, 0, 1) == 1);
64 assert(radix_tree_tag_clear(&tree, index, 0)); 63 assert(radix_tree_tag_clear(&tree, index, 0));
65 64
66 for_each_index(i, base, order) { 65 for_each_index(i, base, order) {
@@ -76,8 +75,27 @@ static void __multiorder_tag_test(int index, int order)
76 item_kill_tree(&tree); 75 item_kill_tree(&tree);
77} 76}
78 77
78static void __multiorder_tag_test2(unsigned order, unsigned long index2)
79{
80 RADIX_TREE(tree, GFP_KERNEL);
81 unsigned long index = (1 << order);
82 index2 += index;
83
84 assert(item_insert_order(&tree, 0, order) == 0);
85 assert(item_insert(&tree, index2) == 0);
86
87 assert(radix_tree_tag_set(&tree, 0, 0));
88 assert(radix_tree_tag_set(&tree, index2, 0));
89
90 assert(tag_tagged_items(&tree, NULL, 0, ~0UL, 10, 0, 1) == 2);
91
92 item_kill_tree(&tree);
93}
94
79static void multiorder_tag_tests(void) 95static void multiorder_tag_tests(void)
80{ 96{
97 int i, j;
98
81 /* test multi-order entry for indices 0-7 with no sibling pointers */ 99 /* test multi-order entry for indices 0-7 with no sibling pointers */
82 __multiorder_tag_test(0, 3); 100 __multiorder_tag_test(0, 3);
83 __multiorder_tag_test(5, 3); 101 __multiorder_tag_test(5, 3);
@@ -117,6 +135,10 @@ static void multiorder_tag_tests(void)
117 __multiorder_tag_test(300, 8); 135 __multiorder_tag_test(300, 8);
118 136
119 __multiorder_tag_test(0x12345678UL, 8); 137 __multiorder_tag_test(0x12345678UL, 8);
138
139 for (i = 1; i < 10; i++)
140 for (j = 0; j < (10 << i); j++)
141 __multiorder_tag_test2(i, j);
120} 142}
121 143
122static void multiorder_check(unsigned long index, int order) 144static void multiorder_check(unsigned long index, int order)
@@ -125,7 +147,7 @@ static void multiorder_check(unsigned long index, int order)
125 unsigned long min = index & ~((1UL << order) - 1); 147 unsigned long min = index & ~((1UL << order) - 1);
126 unsigned long max = min + (1UL << order); 148 unsigned long max = min + (1UL << order);
127 void **slot; 149 void **slot;
128 struct item *item2 = item_create(min); 150 struct item *item2 = item_create(min, order);
129 RADIX_TREE(tree, GFP_KERNEL); 151 RADIX_TREE(tree, GFP_KERNEL);
130 152
131 printf("Multiorder index %ld, order %d\n", index, order); 153 printf("Multiorder index %ld, order %d\n", index, order);
@@ -231,11 +253,14 @@ void multiorder_iteration(void)
231 radix_tree_for_each_slot(slot, &tree, &iter, j) { 253 radix_tree_for_each_slot(slot, &tree, &iter, j) {
232 int height = order[i] / RADIX_TREE_MAP_SHIFT; 254 int height = order[i] / RADIX_TREE_MAP_SHIFT;
233 int shift = height * RADIX_TREE_MAP_SHIFT; 255 int shift = height * RADIX_TREE_MAP_SHIFT;
234 int mask = (1 << order[i]) - 1; 256 unsigned long mask = (1UL << order[i]) - 1;
257 struct item *item = *slot;
235 258
236 assert(iter.index >= (index[i] &~ mask)); 259 assert((iter.index | mask) == (index[i] | mask));
237 assert(iter.index <= (index[i] | mask));
238 assert(iter.shift == shift); 260 assert(iter.shift == shift);
261 assert(!radix_tree_is_internal_node(item));
262 assert((item->index | mask) == (index[i] | mask));
263 assert(item->order == order[i]);
239 i++; 264 i++;
240 } 265 }
241 } 266 }
@@ -248,7 +273,6 @@ void multiorder_tagged_iteration(void)
248 RADIX_TREE(tree, GFP_KERNEL); 273 RADIX_TREE(tree, GFP_KERNEL);
249 struct radix_tree_iter iter; 274 struct radix_tree_iter iter;
250 void **slot; 275 void **slot;
251 unsigned long first = 0;
252 int i, j; 276 int i, j;
253 277
254 printf("Multiorder tagged iteration test\n"); 278 printf("Multiorder tagged iteration test\n");
@@ -269,7 +293,7 @@ void multiorder_tagged_iteration(void)
269 assert(radix_tree_tag_set(&tree, tag_index[i], 1)); 293 assert(radix_tree_tag_set(&tree, tag_index[i], 1));
270 294
271 for (j = 0; j < 256; j++) { 295 for (j = 0; j < 256; j++) {
272 int mask, k; 296 int k;
273 297
274 for (i = 0; i < TAG_ENTRIES; i++) { 298 for (i = 0; i < TAG_ENTRIES; i++) {
275 for (k = i; index[k] < tag_index[i]; k++) 299 for (k = i; index[k] < tag_index[i]; k++)
@@ -279,18 +303,22 @@ void multiorder_tagged_iteration(void)
279 } 303 }
280 304
281 radix_tree_for_each_tagged(slot, &tree, &iter, j, 1) { 305 radix_tree_for_each_tagged(slot, &tree, &iter, j, 1) {
306 unsigned long mask;
307 struct item *item = *slot;
282 for (k = i; index[k] < tag_index[i]; k++) 308 for (k = i; index[k] < tag_index[i]; k++)
283 ; 309 ;
284 mask = (1 << order[k]) - 1; 310 mask = (1UL << order[k]) - 1;
285 311
286 assert(iter.index >= (tag_index[i] &~ mask)); 312 assert((iter.index | mask) == (tag_index[i] | mask));
287 assert(iter.index <= (tag_index[i] | mask)); 313 assert(!radix_tree_is_internal_node(item));
314 assert((item->index | mask) == (tag_index[i] | mask));
315 assert(item->order == order[k]);
288 i++; 316 i++;
289 } 317 }
290 } 318 }
291 319
292 radix_tree_range_tag_if_tagged(&tree, &first, ~0UL, 320 assert(tag_tagged_items(&tree, NULL, 0, ~0UL, TAG_ENTRIES, 1, 2) ==
293 MT_NUM_ENTRIES, 1, 2); 321 TAG_ENTRIES);
294 322
295 for (j = 0; j < 256; j++) { 323 for (j = 0; j < 256; j++) {
296 int mask, k; 324 int mask, k;
@@ -303,19 +331,21 @@ void multiorder_tagged_iteration(void)
303 } 331 }
304 332
305 radix_tree_for_each_tagged(slot, &tree, &iter, j, 2) { 333 radix_tree_for_each_tagged(slot, &tree, &iter, j, 2) {
334 struct item *item = *slot;
306 for (k = i; index[k] < tag_index[i]; k++) 335 for (k = i; index[k] < tag_index[i]; k++)
307 ; 336 ;
308 mask = (1 << order[k]) - 1; 337 mask = (1 << order[k]) - 1;
309 338
310 assert(iter.index >= (tag_index[i] &~ mask)); 339 assert((iter.index | mask) == (tag_index[i] | mask));
311 assert(iter.index <= (tag_index[i] | mask)); 340 assert(!radix_tree_is_internal_node(item));
341 assert((item->index | mask) == (tag_index[i] | mask));
342 assert(item->order == order[k]);
312 i++; 343 i++;
313 } 344 }
314 } 345 }
315 346
316 first = 1; 347 assert(tag_tagged_items(&tree, NULL, 1, ~0UL, MT_NUM_ENTRIES * 2, 1, 0)
317 radix_tree_range_tag_if_tagged(&tree, &first, ~0UL, 348 == TAG_ENTRIES);
318 MT_NUM_ENTRIES, 1, 0);
319 i = 0; 349 i = 0;
320 radix_tree_for_each_tagged(slot, &tree, &iter, 0, 0) { 350 radix_tree_for_each_tagged(slot, &tree, &iter, 0, 0) {
321 assert(iter.index == tag_index[i]); 351 assert(iter.index == tag_index[i]);
@@ -325,6 +355,261 @@ void multiorder_tagged_iteration(void)
325 item_kill_tree(&tree); 355 item_kill_tree(&tree);
326} 356}
327 357
358static void multiorder_join1(unsigned long index,
359 unsigned order1, unsigned order2)
360{
361 unsigned long loc;
362 void *item, *item2 = item_create(index + 1, order1);
363 RADIX_TREE(tree, GFP_KERNEL);
364
365 item_insert_order(&tree, index, order2);
366 item = radix_tree_lookup(&tree, index);
367 radix_tree_join(&tree, index + 1, order1, item2);
368 loc = find_item(&tree, item);
369 if (loc == -1)
370 free(item);
371 item = radix_tree_lookup(&tree, index + 1);
372 assert(item == item2);
373 item_kill_tree(&tree);
374}
375
376static void multiorder_join2(unsigned order1, unsigned order2)
377{
378 RADIX_TREE(tree, GFP_KERNEL);
379 struct radix_tree_node *node;
380 void *item1 = item_create(0, order1);
381 void *item2;
382
383 item_insert_order(&tree, 0, order2);
384 radix_tree_insert(&tree, 1 << order2, (void *)0x12UL);
385 item2 = __radix_tree_lookup(&tree, 1 << order2, &node, NULL);
386 assert(item2 == (void *)0x12UL);
387 assert(node->exceptional == 1);
388
389 radix_tree_join(&tree, 0, order1, item1);
390 item2 = __radix_tree_lookup(&tree, 1 << order2, &node, NULL);
391 assert(item2 == item1);
392 assert(node->exceptional == 0);
393 item_kill_tree(&tree);
394}
395
396/*
397 * This test revealed an accounting bug for exceptional entries at one point.
398 * Nodes were being freed back into the pool with an elevated exception count
399 * by radix_tree_join() and then radix_tree_split() was failing to zero the
400 * count of exceptional entries.
401 */
402static void multiorder_join3(unsigned int order)
403{
404 RADIX_TREE(tree, GFP_KERNEL);
405 struct radix_tree_node *node;
406 void **slot;
407 struct radix_tree_iter iter;
408 unsigned long i;
409
410 for (i = 0; i < (1 << order); i++) {
411 radix_tree_insert(&tree, i, (void *)0x12UL);
412 }
413
414 radix_tree_join(&tree, 0, order, (void *)0x16UL);
415 rcu_barrier();
416
417 radix_tree_split(&tree, 0, 0);
418
419 radix_tree_for_each_slot(slot, &tree, &iter, 0) {
420 radix_tree_iter_replace(&tree, &iter, slot, (void *)0x12UL);
421 }
422
423 __radix_tree_lookup(&tree, 0, &node, NULL);
424 assert(node->exceptional == node->count);
425
426 item_kill_tree(&tree);
427}
428
429static void multiorder_join(void)
430{
431 int i, j, idx;
432
433 for (idx = 0; idx < 1024; idx = idx * 2 + 3) {
434 for (i = 1; i < 15; i++) {
435 for (j = 0; j < i; j++) {
436 multiorder_join1(idx, i, j);
437 }
438 }
439 }
440
441 for (i = 1; i < 15; i++) {
442 for (j = 0; j < i; j++) {
443 multiorder_join2(i, j);
444 }
445 }
446
447 for (i = 3; i < 10; i++) {
448 multiorder_join3(i);
449 }
450}
451
452static void check_mem(unsigned old_order, unsigned new_order, unsigned alloc)
453{
454 struct radix_tree_preload *rtp = &radix_tree_preloads;
455 if (rtp->nr != 0)
456 printf("split(%u %u) remaining %u\n", old_order, new_order,
457 rtp->nr);
458 /*
459 * Can't check for equality here as some nodes may have been
460 * RCU-freed while we ran. But we should never finish with more
461 * nodes allocated since they should have all been preloaded.
462 */
463 if (nr_allocated > alloc)
464 printf("split(%u %u) allocated %u %u\n", old_order, new_order,
465 alloc, nr_allocated);
466}
467
468static void __multiorder_split(int old_order, int new_order)
469{
470 RADIX_TREE(tree, GFP_ATOMIC);
471 void **slot;
472 struct radix_tree_iter iter;
473 unsigned alloc;
474
475 radix_tree_preload(GFP_KERNEL);
476 assert(item_insert_order(&tree, 0, old_order) == 0);
477 radix_tree_preload_end();
478
479 /* Wipe out the preloaded cache or it'll confuse check_mem() */
480 radix_tree_cpu_dead(0);
481
482 radix_tree_tag_set(&tree, 0, 2);
483
484 radix_tree_split_preload(old_order, new_order, GFP_KERNEL);
485 alloc = nr_allocated;
486 radix_tree_split(&tree, 0, new_order);
487 check_mem(old_order, new_order, alloc);
488 radix_tree_for_each_slot(slot, &tree, &iter, 0) {
489 radix_tree_iter_replace(&tree, &iter, slot,
490 item_create(iter.index, new_order));
491 }
492 radix_tree_preload_end();
493
494 item_kill_tree(&tree);
495}
496
497static void __multiorder_split2(int old_order, int new_order)
498{
499 RADIX_TREE(tree, GFP_KERNEL);
500 void **slot;
501 struct radix_tree_iter iter;
502 struct radix_tree_node *node;
503 void *item;
504
505 __radix_tree_insert(&tree, 0, old_order, (void *)0x12);
506
507 item = __radix_tree_lookup(&tree, 0, &node, NULL);
508 assert(item == (void *)0x12);
509 assert(node->exceptional > 0);
510
511 radix_tree_split(&tree, 0, new_order);
512 radix_tree_for_each_slot(slot, &tree, &iter, 0) {
513 radix_tree_iter_replace(&tree, &iter, slot,
514 item_create(iter.index, new_order));
515 }
516
517 item = __radix_tree_lookup(&tree, 0, &node, NULL);
518 assert(item != (void *)0x12);
519 assert(node->exceptional == 0);
520
521 item_kill_tree(&tree);
522}
523
524static void __multiorder_split3(int old_order, int new_order)
525{
526 RADIX_TREE(tree, GFP_KERNEL);
527 void **slot;
528 struct radix_tree_iter iter;
529 struct radix_tree_node *node;
530 void *item;
531
532 __radix_tree_insert(&tree, 0, old_order, (void *)0x12);
533
534 item = __radix_tree_lookup(&tree, 0, &node, NULL);
535 assert(item == (void *)0x12);
536 assert(node->exceptional > 0);
537
538 radix_tree_split(&tree, 0, new_order);
539 radix_tree_for_each_slot(slot, &tree, &iter, 0) {
540 radix_tree_iter_replace(&tree, &iter, slot, (void *)0x16);
541 }
542
543 item = __radix_tree_lookup(&tree, 0, &node, NULL);
544 assert(item == (void *)0x16);
545 assert(node->exceptional > 0);
546
547 item_kill_tree(&tree);
548
549 __radix_tree_insert(&tree, 0, old_order, (void *)0x12);
550
551 item = __radix_tree_lookup(&tree, 0, &node, NULL);
552 assert(item == (void *)0x12);
553 assert(node->exceptional > 0);
554
555 radix_tree_split(&tree, 0, new_order);
556 radix_tree_for_each_slot(slot, &tree, &iter, 0) {
557 if (iter.index == (1 << new_order))
558 radix_tree_iter_replace(&tree, &iter, slot,
559 (void *)0x16);
560 else
561 radix_tree_iter_replace(&tree, &iter, slot, NULL);
562 }
563
564 item = __radix_tree_lookup(&tree, 1 << new_order, &node, NULL);
565 assert(item == (void *)0x16);
566 assert(node->count == node->exceptional);
567 do {
568 node = node->parent;
569 if (!node)
570 break;
571 assert(node->count == 1);
572 assert(node->exceptional == 0);
573 } while (1);
574
575 item_kill_tree(&tree);
576}
577
578static void multiorder_split(void)
579{
580 int i, j;
581
582 for (i = 3; i < 11; i++)
583 for (j = 0; j < i; j++) {
584 __multiorder_split(i, j);
585 __multiorder_split2(i, j);
586 __multiorder_split3(i, j);
587 }
588}
589
590static void multiorder_account(void)
591{
592 RADIX_TREE(tree, GFP_KERNEL);
593 struct radix_tree_node *node;
594 void **slot;
595
596 item_insert_order(&tree, 0, 5);
597
598 __radix_tree_insert(&tree, 1 << 5, 5, (void *)0x12);
599 __radix_tree_lookup(&tree, 0, &node, NULL);
600 assert(node->count == node->exceptional * 2);
601 radix_tree_delete(&tree, 1 << 5);
602 assert(node->exceptional == 0);
603
604 __radix_tree_insert(&tree, 1 << 5, 5, (void *)0x12);
605 __radix_tree_lookup(&tree, 1 << 5, &node, &slot);
606 assert(node->count == node->exceptional * 2);
607 __radix_tree_replace(&tree, node, slot, NULL, NULL, NULL);
608 assert(node->exceptional == 0);
609
610 item_kill_tree(&tree);
611}
612
328void multiorder_checks(void) 613void multiorder_checks(void)
329{ 614{
330 int i; 615 int i;
@@ -342,4 +627,9 @@ void multiorder_checks(void)
342 multiorder_tag_tests(); 627 multiorder_tag_tests();
343 multiorder_iteration(); 628 multiorder_iteration();
344 multiorder_tagged_iteration(); 629 multiorder_tagged_iteration();
630 multiorder_join();
631 multiorder_split();
632 multiorder_account();
633
634 radix_tree_cpu_dead(0);
345} 635}