aboutsummaryrefslogtreecommitdiffstats
path: root/tools/testing/radix-tree/iteration_check.c
diff options
context:
space:
mode:
Diffstat (limited to 'tools/testing/radix-tree/iteration_check.c')
-rw-r--r--tools/testing/radix-tree/iteration_check.c103
1 files changed, 51 insertions, 52 deletions
diff --git a/tools/testing/radix-tree/iteration_check.c b/tools/testing/radix-tree/iteration_check.c
index d047327bb9ef..238db187aa15 100644
--- a/tools/testing/radix-tree/iteration_check.c
+++ b/tools/testing/radix-tree/iteration_check.c
@@ -1,5 +1,5 @@
1/* 1/*
2 * iteration_check.c: test races having to do with radix tree iteration 2 * iteration_check.c: test races having to do with xarray iteration
3 * Copyright (c) 2016 Intel Corporation 3 * Copyright (c) 2016 Intel Corporation
4 * Author: Ross Zwisler <ross.zwisler@linux.intel.com> 4 * Author: Ross Zwisler <ross.zwisler@linux.intel.com>
5 * 5 *
@@ -12,7 +12,6 @@
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for 12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details. 13 * more details.
14 */ 14 */
15#include <linux/radix-tree.h>
16#include <pthread.h> 15#include <pthread.h>
17#include "test.h" 16#include "test.h"
18 17
@@ -23,29 +22,44 @@
23 22
24static pthread_t threads[NUM_THREADS]; 23static pthread_t threads[NUM_THREADS];
25static unsigned int seeds[3]; 24static unsigned int seeds[3];
26static RADIX_TREE(tree, GFP_KERNEL); 25static DEFINE_XARRAY(array);
27static bool test_complete; 26static bool test_complete;
28static int max_order; 27static int max_order;
29 28
30/* relentlessly fill the tree with tagged entries */ 29void my_item_insert(struct xarray *xa, unsigned long index)
30{
31 XA_STATE(xas, xa, index);
32 struct item *item = item_create(index, 0);
33 int order;
34
35retry:
36 xas_lock(&xas);
37 for (order = max_order; order >= 0; order--) {
38 xas_set_order(&xas, index, order);
39 item->order = order;
40 if (xas_find_conflict(&xas))
41 continue;
42 xas_store(&xas, item);
43 xas_set_mark(&xas, TAG);
44 break;
45 }
46 xas_unlock(&xas);
47 if (xas_nomem(&xas, GFP_KERNEL))
48 goto retry;
49 if (order < 0)
50 free(item);
51}
52
53/* relentlessly fill the array with tagged entries */
31static void *add_entries_fn(void *arg) 54static void *add_entries_fn(void *arg)
32{ 55{
33 rcu_register_thread(); 56 rcu_register_thread();
34 57
35 while (!test_complete) { 58 while (!test_complete) {
36 unsigned long pgoff; 59 unsigned long pgoff;
37 int order;
38 60
39 for (pgoff = 0; pgoff < MAX_IDX; pgoff++) { 61 for (pgoff = 0; pgoff < MAX_IDX; pgoff++) {
40 xa_lock(&tree); 62 my_item_insert(&array, pgoff);
41 for (order = max_order; order >= 0; order--) {
42 if (item_insert_order(&tree, pgoff, order)
43 == 0) {
44 item_tag_set(&tree, pgoff, TAG);
45 break;
46 }
47 }
48 xa_unlock(&tree);
49 } 63 }
50 } 64 }
51 65
@@ -55,33 +69,25 @@ static void *add_entries_fn(void *arg)
55} 69}
56 70
57/* 71/*
58 * Iterate over the tagged entries, doing a radix_tree_iter_retry() as we find 72 * Iterate over tagged entries, retrying when we find ourselves in a deleted
59 * things that have been removed and randomly resetting our iteration to the 73 * node and randomly pausing the iteration.
60 * next chunk with radix_tree_iter_resume(). Both radix_tree_iter_retry() and
61 * radix_tree_iter_resume() cause radix_tree_next_slot() to be called with a
62 * NULL 'slot' variable.
63 */ 74 */
64static void *tagged_iteration_fn(void *arg) 75static void *tagged_iteration_fn(void *arg)
65{ 76{
66 struct radix_tree_iter iter; 77 XA_STATE(xas, &array, 0);
67 void **slot; 78 void *entry;
68 79
69 rcu_register_thread(); 80 rcu_register_thread();
70 81
71 while (!test_complete) { 82 while (!test_complete) {
83 xas_set(&xas, 0);
72 rcu_read_lock(); 84 rcu_read_lock();
73 radix_tree_for_each_tagged(slot, &tree, &iter, 0, TAG) { 85 xas_for_each_marked(&xas, entry, ULONG_MAX, TAG) {
74 void *entry = radix_tree_deref_slot(slot); 86 if (xas_retry(&xas, entry))
75 if (unlikely(!entry))
76 continue; 87 continue;
77 88
78 if (radix_tree_deref_retry(entry)) {
79 slot = radix_tree_iter_retry(&iter);
80 continue;
81 }
82
83 if (rand_r(&seeds[0]) % 50 == 0) { 89 if (rand_r(&seeds[0]) % 50 == 0) {
84 slot = radix_tree_iter_resume(slot, &iter); 90 xas_pause(&xas);
85 rcu_read_unlock(); 91 rcu_read_unlock();
86 rcu_barrier(); 92 rcu_barrier();
87 rcu_read_lock(); 93 rcu_read_lock();
@@ -96,33 +102,25 @@ static void *tagged_iteration_fn(void *arg)
96} 102}
97 103
98/* 104/*
99 * Iterate over the entries, doing a radix_tree_iter_retry() as we find things 105 * Iterate over the entries, retrying when we find ourselves in a deleted
100 * that have been removed and randomly resetting our iteration to the next 106 * node and randomly pausing the iteration.
101 * chunk with radix_tree_iter_resume(). Both radix_tree_iter_retry() and
102 * radix_tree_iter_resume() cause radix_tree_next_slot() to be called with a
103 * NULL 'slot' variable.
104 */ 107 */
105static void *untagged_iteration_fn(void *arg) 108static void *untagged_iteration_fn(void *arg)
106{ 109{
107 struct radix_tree_iter iter; 110 XA_STATE(xas, &array, 0);
108 void **slot; 111 void *entry;
109 112
110 rcu_register_thread(); 113 rcu_register_thread();
111 114
112 while (!test_complete) { 115 while (!test_complete) {
116 xas_set(&xas, 0);
113 rcu_read_lock(); 117 rcu_read_lock();
114 radix_tree_for_each_slot(slot, &tree, &iter, 0) { 118 xas_for_each(&xas, entry, ULONG_MAX) {
115 void *entry = radix_tree_deref_slot(slot); 119 if (xas_retry(&xas, entry))
116 if (unlikely(!entry))
117 continue; 120 continue;
118 121
119 if (radix_tree_deref_retry(entry)) {
120 slot = radix_tree_iter_retry(&iter);
121 continue;
122 }
123
124 if (rand_r(&seeds[1]) % 50 == 0) { 122 if (rand_r(&seeds[1]) % 50 == 0) {
125 slot = radix_tree_iter_resume(slot, &iter); 123 xas_pause(&xas);
126 rcu_read_unlock(); 124 rcu_read_unlock();
127 rcu_barrier(); 125 rcu_barrier();
128 rcu_read_lock(); 126 rcu_read_lock();
@@ -137,7 +135,7 @@ static void *untagged_iteration_fn(void *arg)
137} 135}
138 136
139/* 137/*
140 * Randomly remove entries to help induce radix_tree_iter_retry() calls in the 138 * Randomly remove entries to help induce retries in the
141 * two iteration functions. 139 * two iteration functions.
142 */ 140 */
143static void *remove_entries_fn(void *arg) 141static void *remove_entries_fn(void *arg)
@@ -146,12 +144,13 @@ static void *remove_entries_fn(void *arg)
146 144
147 while (!test_complete) { 145 while (!test_complete) {
148 int pgoff; 146 int pgoff;
147 struct item *item;
149 148
150 pgoff = rand_r(&seeds[2]) % MAX_IDX; 149 pgoff = rand_r(&seeds[2]) % MAX_IDX;
151 150
152 xa_lock(&tree); 151 item = xa_erase(&array, pgoff);
153 item_delete(&tree, pgoff); 152 if (item)
154 xa_unlock(&tree); 153 item_free(item, pgoff);
155 } 154 }
156 155
157 rcu_unregister_thread(); 156 rcu_unregister_thread();
@@ -164,7 +163,7 @@ static void *tag_entries_fn(void *arg)
164 rcu_register_thread(); 163 rcu_register_thread();
165 164
166 while (!test_complete) { 165 while (!test_complete) {
167 tag_tagged_items(&tree, 0, MAX_IDX, 10, TAG, NEW_TAG); 166 tag_tagged_items(&array, 0, MAX_IDX, 10, TAG, NEW_TAG);
168 } 167 }
169 rcu_unregister_thread(); 168 rcu_unregister_thread();
170 return NULL; 169 return NULL;
@@ -215,5 +214,5 @@ void iteration_test(unsigned order, unsigned test_duration)
215 } 214 }
216 } 215 }
217 216
218 item_kill_tree(&tree); 217 item_kill_tree(&array);
219} 218}