diff options
Diffstat (limited to 'lib/radix-tree.c')
-rw-r--r-- | lib/radix-tree.c | 28 |
1 files changed, 17 insertions, 11 deletions
diff --git a/lib/radix-tree.c b/lib/radix-tree.c index 061550de77bc..f9ebe1c82060 100644 --- a/lib/radix-tree.c +++ b/lib/radix-tree.c | |||
@@ -65,7 +65,8 @@ static struct kmem_cache *radix_tree_node_cachep; | |||
65 | */ | 65 | */ |
66 | struct radix_tree_preload { | 66 | struct radix_tree_preload { |
67 | int nr; | 67 | int nr; |
68 | struct radix_tree_node *nodes[RADIX_TREE_PRELOAD_SIZE]; | 68 | /* nodes->private_data points to next preallocated node */ |
69 | struct radix_tree_node *nodes; | ||
69 | }; | 70 | }; |
70 | static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, }; | 71 | static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, }; |
71 | 72 | ||
@@ -197,8 +198,9 @@ radix_tree_node_alloc(struct radix_tree_root *root) | |||
197 | */ | 198 | */ |
198 | rtp = this_cpu_ptr(&radix_tree_preloads); | 199 | rtp = this_cpu_ptr(&radix_tree_preloads); |
199 | if (rtp->nr) { | 200 | if (rtp->nr) { |
200 | ret = rtp->nodes[rtp->nr - 1]; | 201 | ret = rtp->nodes; |
201 | rtp->nodes[rtp->nr - 1] = NULL; | 202 | rtp->nodes = ret->private_data; |
203 | ret->private_data = NULL; | ||
202 | rtp->nr--; | 204 | rtp->nr--; |
203 | } | 205 | } |
204 | /* | 206 | /* |
@@ -257,17 +259,20 @@ static int __radix_tree_preload(gfp_t gfp_mask) | |||
257 | 259 | ||
258 | preempt_disable(); | 260 | preempt_disable(); |
259 | rtp = this_cpu_ptr(&radix_tree_preloads); | 261 | rtp = this_cpu_ptr(&radix_tree_preloads); |
260 | while (rtp->nr < ARRAY_SIZE(rtp->nodes)) { | 262 | while (rtp->nr < RADIX_TREE_PRELOAD_SIZE) { |
261 | preempt_enable(); | 263 | preempt_enable(); |
262 | node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask); | 264 | node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask); |
263 | if (node == NULL) | 265 | if (node == NULL) |
264 | goto out; | 266 | goto out; |
265 | preempt_disable(); | 267 | preempt_disable(); |
266 | rtp = this_cpu_ptr(&radix_tree_preloads); | 268 | rtp = this_cpu_ptr(&radix_tree_preloads); |
267 | if (rtp->nr < ARRAY_SIZE(rtp->nodes)) | 269 | if (rtp->nr < RADIX_TREE_PRELOAD_SIZE) { |
268 | rtp->nodes[rtp->nr++] = node; | 270 | node->private_data = rtp->nodes; |
269 | else | 271 | rtp->nodes = node; |
272 | rtp->nr++; | ||
273 | } else { | ||
270 | kmem_cache_free(radix_tree_node_cachep, node); | 274 | kmem_cache_free(radix_tree_node_cachep, node); |
275 | } | ||
271 | } | 276 | } |
272 | ret = 0; | 277 | ret = 0; |
273 | out: | 278 | out: |
@@ -1463,15 +1468,16 @@ static int radix_tree_callback(struct notifier_block *nfb, | |||
1463 | { | 1468 | { |
1464 | int cpu = (long)hcpu; | 1469 | int cpu = (long)hcpu; |
1465 | struct radix_tree_preload *rtp; | 1470 | struct radix_tree_preload *rtp; |
1471 | struct radix_tree_node *node; | ||
1466 | 1472 | ||
1467 | /* Free per-cpu pool of perloaded nodes */ | 1473 | /* Free per-cpu pool of perloaded nodes */ |
1468 | if (action == CPU_DEAD || action == CPU_DEAD_FROZEN) { | 1474 | if (action == CPU_DEAD || action == CPU_DEAD_FROZEN) { |
1469 | rtp = &per_cpu(radix_tree_preloads, cpu); | 1475 | rtp = &per_cpu(radix_tree_preloads, cpu); |
1470 | while (rtp->nr) { | 1476 | while (rtp->nr) { |
1471 | kmem_cache_free(radix_tree_node_cachep, | 1477 | node = rtp->nodes; |
1472 | rtp->nodes[rtp->nr-1]); | 1478 | rtp->nodes = node->private_data; |
1473 | rtp->nodes[rtp->nr-1] = NULL; | 1479 | kmem_cache_free(radix_tree_node_cachep, node); |
1474 | rtp->nr--; | 1480 | rtp->nr--; |
1475 | } | 1481 | } |
1476 | } | 1482 | } |
1477 | return NOTIFY_OK; | 1483 | return NOTIFY_OK; |