diff options
-rw-r--r-- | include/linux/genalloc.h | 27 | ||||
-rw-r--r-- | lib/genalloc.c | 88 |
2 files changed, 111 insertions, 4 deletions
diff --git a/include/linux/genalloc.h b/include/linux/genalloc.h index 5e98eeb2af3b..dd7c569aacad 100644 --- a/include/linux/genalloc.h +++ b/include/linux/genalloc.h | |||
@@ -29,6 +29,20 @@ | |||
29 | 29 | ||
30 | #ifndef __GENALLOC_H__ | 30 | #ifndef __GENALLOC_H__ |
31 | #define __GENALLOC_H__ | 31 | #define __GENALLOC_H__ |
32 | /** | ||
33 | * Allocation callback function type definition | ||
34 | * @map: Pointer to bitmap | ||
35 | * @size: The bitmap size in bits | ||
36 | * @start: The bitnumber to start searching at | ||
37 | * @nr: The number of zeroed bits we're looking for | ||
38 | * @data: optional additional data used by @genpool_algo_t | ||
39 | */ | ||
40 | typedef unsigned long (*genpool_algo_t)(unsigned long *map, | ||
41 | unsigned long size, | ||
42 | unsigned long start, | ||
43 | unsigned int nr, | ||
44 | void *data); | ||
45 | |||
32 | /* | 46 | /* |
33 | * General purpose special memory pool descriptor. | 47 | * General purpose special memory pool descriptor. |
34 | */ | 48 | */ |
@@ -36,6 +50,9 @@ struct gen_pool { | |||
36 | spinlock_t lock; | 50 | spinlock_t lock; |
37 | struct list_head chunks; /* list of chunks in this pool */ | 51 | struct list_head chunks; /* list of chunks in this pool */ |
38 | int min_alloc_order; /* minimum allocation order */ | 52 | int min_alloc_order; /* minimum allocation order */ |
53 | |||
54 | genpool_algo_t algo; /* allocation function */ | ||
55 | void *data; | ||
39 | }; | 56 | }; |
40 | 57 | ||
41 | /* | 58 | /* |
@@ -78,4 +95,14 @@ extern void gen_pool_for_each_chunk(struct gen_pool *, | |||
78 | void (*)(struct gen_pool *, struct gen_pool_chunk *, void *), void *); | 95 | void (*)(struct gen_pool *, struct gen_pool_chunk *, void *), void *); |
79 | extern size_t gen_pool_avail(struct gen_pool *); | 96 | extern size_t gen_pool_avail(struct gen_pool *); |
80 | extern size_t gen_pool_size(struct gen_pool *); | 97 | extern size_t gen_pool_size(struct gen_pool *); |
98 | |||
99 | extern void gen_pool_set_algo(struct gen_pool *pool, genpool_algo_t algo, | ||
100 | void *data); | ||
101 | |||
102 | extern unsigned long gen_pool_first_fit(unsigned long *map, unsigned long size, | ||
103 | unsigned long start, unsigned int nr, void *data); | ||
104 | |||
105 | extern unsigned long gen_pool_best_fit(unsigned long *map, unsigned long size, | ||
106 | unsigned long start, unsigned int nr, void *data); | ||
107 | |||
81 | #endif /* __GENALLOC_H__ */ | 108 | #endif /* __GENALLOC_H__ */ |
diff --git a/lib/genalloc.c b/lib/genalloc.c index 6bc04aab6ec7..ca208a92628c 100644 --- a/lib/genalloc.c +++ b/lib/genalloc.c | |||
@@ -152,6 +152,8 @@ struct gen_pool *gen_pool_create(int min_alloc_order, int nid) | |||
152 | spin_lock_init(&pool->lock); | 152 | spin_lock_init(&pool->lock); |
153 | INIT_LIST_HEAD(&pool->chunks); | 153 | INIT_LIST_HEAD(&pool->chunks); |
154 | pool->min_alloc_order = min_alloc_order; | 154 | pool->min_alloc_order = min_alloc_order; |
155 | pool->algo = gen_pool_first_fit; | ||
156 | pool->data = NULL; | ||
155 | } | 157 | } |
156 | return pool; | 158 | return pool; |
157 | } | 159 | } |
@@ -255,8 +257,9 @@ EXPORT_SYMBOL(gen_pool_destroy); | |||
255 | * @size: number of bytes to allocate from the pool | 257 | * @size: number of bytes to allocate from the pool |
256 | * | 258 | * |
257 | * Allocate the requested number of bytes from the specified pool. | 259 | * Allocate the requested number of bytes from the specified pool. |
258 | * Uses a first-fit algorithm. Can not be used in NMI handler on | 260 | * Uses the pool allocation function (with first-fit algorithm by default). |
259 | * architectures without NMI-safe cmpxchg implementation. | 261 | * Can not be used in NMI handler on architectures without |
262 | * NMI-safe cmpxchg implementation. | ||
260 | */ | 263 | */ |
261 | unsigned long gen_pool_alloc(struct gen_pool *pool, size_t size) | 264 | unsigned long gen_pool_alloc(struct gen_pool *pool, size_t size) |
262 | { | 265 | { |
@@ -280,8 +283,8 @@ unsigned long gen_pool_alloc(struct gen_pool *pool, size_t size) | |||
280 | 283 | ||
281 | end_bit = (chunk->end_addr - chunk->start_addr) >> order; | 284 | end_bit = (chunk->end_addr - chunk->start_addr) >> order; |
282 | retry: | 285 | retry: |
283 | start_bit = bitmap_find_next_zero_area(chunk->bits, end_bit, | 286 | start_bit = pool->algo(chunk->bits, end_bit, start_bit, nbits, |
284 | start_bit, nbits, 0); | 287 | pool->data); |
285 | if (start_bit >= end_bit) | 288 | if (start_bit >= end_bit) |
286 | continue; | 289 | continue; |
287 | remain = bitmap_set_ll(chunk->bits, start_bit, nbits); | 290 | remain = bitmap_set_ll(chunk->bits, start_bit, nbits); |
@@ -400,3 +403,80 @@ size_t gen_pool_size(struct gen_pool *pool) | |||
400 | return size; | 403 | return size; |
401 | } | 404 | } |
402 | EXPORT_SYMBOL_GPL(gen_pool_size); | 405 | EXPORT_SYMBOL_GPL(gen_pool_size); |
406 | |||
407 | /** | ||
408 | * gen_pool_set_algo - set the allocation algorithm | ||
409 | * @pool: pool to change allocation algorithm | ||
410 | * @algo: custom algorithm function | ||
411 | * @data: additional data used by @algo | ||
412 | * | ||
413 | * Call @algo for each memory allocation in the pool. | ||
414 | * If @algo is NULL use gen_pool_first_fit as default | ||
415 | * memory allocation function. | ||
416 | */ | ||
417 | void gen_pool_set_algo(struct gen_pool *pool, genpool_algo_t algo, void *data) | ||
418 | { | ||
419 | rcu_read_lock(); | ||
420 | |||
421 | pool->algo = algo; | ||
422 | if (!pool->algo) | ||
423 | pool->algo = gen_pool_first_fit; | ||
424 | |||
425 | pool->data = data; | ||
426 | |||
427 | rcu_read_unlock(); | ||
428 | } | ||
429 | EXPORT_SYMBOL(gen_pool_set_algo); | ||
430 | |||
431 | /** | ||
432 | * gen_pool_first_fit - find the first available region | ||
433 | * of memory matching the size requirement (no alignment constraint) | ||
434 | * @map: The address to base the search on | ||
435 | * @size: The bitmap size in bits | ||
436 | * @start: The bitnumber to start searching at | ||
437 | * @nr: The number of zeroed bits we're looking for | ||
438 | * @data: additional data - unused | ||
439 | */ | ||
440 | unsigned long gen_pool_first_fit(unsigned long *map, unsigned long size, | ||
441 | unsigned long start, unsigned int nr, void *data) | ||
442 | { | ||
443 | return bitmap_find_next_zero_area(map, size, start, nr, 0); | ||
444 | } | ||
445 | EXPORT_SYMBOL(gen_pool_first_fit); | ||
446 | |||
447 | /** | ||
448 | * gen_pool_best_fit - find the best fitting region of memory | ||
449 | * macthing the size requirement (no alignment constraint) | ||
450 | * @map: The address to base the search on | ||
451 | * @size: The bitmap size in bits | ||
452 | * @start: The bitnumber to start searching at | ||
453 | * @nr: The number of zeroed bits we're looking for | ||
454 | * @data: additional data - unused | ||
455 | * | ||
456 | * Iterate over the bitmap to find the smallest free region | ||
457 | * which we can allocate the memory. | ||
458 | */ | ||
459 | unsigned long gen_pool_best_fit(unsigned long *map, unsigned long size, | ||
460 | unsigned long start, unsigned int nr, void *data) | ||
461 | { | ||
462 | unsigned long start_bit = size; | ||
463 | unsigned long len = size + 1; | ||
464 | unsigned long index; | ||
465 | |||
466 | index = bitmap_find_next_zero_area(map, size, start, nr, 0); | ||
467 | |||
468 | while (index < size) { | ||
469 | int next_bit = find_next_bit(map, size, index + nr); | ||
470 | if ((next_bit - index) < len) { | ||
471 | len = next_bit - index; | ||
472 | start_bit = index; | ||
473 | if (len == nr) | ||
474 | return start_bit; | ||
475 | } | ||
476 | index = bitmap_find_next_zero_area(map, size, | ||
477 | next_bit + 1, nr, 0); | ||
478 | } | ||
479 | |||
480 | return start_bit; | ||
481 | } | ||
482 | EXPORT_SYMBOL(gen_pool_best_fit); | ||