aboutsummaryrefslogtreecommitdiffstats
path: root/lib/genalloc.c
diff options
context:
space:
mode:
authorZhao Qiang <qiang.zhao@freescale.com>2015-11-29 21:48:52 -0500
committerScott Wood <scottwood@freescale.com>2015-12-22 18:10:17 -0500
commitde2dd0eb30af55d3893979d5641c50c7a8969c99 (patch)
treedcfd94f13f29b4ad8bcf32b2849876467aa3626c /lib/genalloc.c
parente9d764f803964a54ca7da4a67d124fe824ebd80a (diff)
genalloc:support memory-allocation with bytes-alignment to genalloc
Bytes alignment is required to manage some special RAM, so add gen_pool_first_fit_align to genalloc, meanwhile add gen_pool_alloc_algo to pass algo in case user layer using more than one algo, and pass data to gen_pool_first_fit_align(modify gen_pool_alloc as a wrapper) Signed-off-by: Zhao Qiang <qiang.zhao@freescale.com> Signed-off-by: Scott Wood <scottwood@freescale.com>
Diffstat (limited to 'lib/genalloc.c')
-rw-r--r--lib/genalloc.c61
1 files changed, 55 insertions, 6 deletions
diff --git a/lib/genalloc.c b/lib/genalloc.c
index 116a166b096f..b8cf89d9e17d 100644
--- a/lib/genalloc.c
+++ b/lib/genalloc.c
@@ -270,6 +270,25 @@ EXPORT_SYMBOL(gen_pool_destroy);
270 */ 270 */
271unsigned long gen_pool_alloc(struct gen_pool *pool, size_t size) 271unsigned long gen_pool_alloc(struct gen_pool *pool, size_t size)
272{ 272{
273 return gen_pool_alloc_algo(pool, size, pool->algo, pool->data);
274}
275EXPORT_SYMBOL(gen_pool_alloc);
276
277/**
278 * gen_pool_alloc_algo - allocate special memory from the pool
279 * @pool: pool to allocate from
280 * @size: number of bytes to allocate from the pool
281 * @algo: algorithm passed from caller
282 * @data: data passed to algorithm
283 *
284 * Allocate the requested number of bytes from the specified pool.
285 * Uses the pool allocation function (with first-fit algorithm by default).
286 * Can not be used in NMI handler on architectures without
287 * NMI-safe cmpxchg implementation.
288 */
289unsigned long gen_pool_alloc_algo(struct gen_pool *pool, size_t size,
290 genpool_algo_t algo, void *data)
291{
273 struct gen_pool_chunk *chunk; 292 struct gen_pool_chunk *chunk;
274 unsigned long addr = 0; 293 unsigned long addr = 0;
275 int order = pool->min_alloc_order; 294 int order = pool->min_alloc_order;
@@ -290,8 +309,8 @@ unsigned long gen_pool_alloc(struct gen_pool *pool, size_t size)
290 309
291 end_bit = chunk_size(chunk) >> order; 310 end_bit = chunk_size(chunk) >> order;
292retry: 311retry:
293 start_bit = pool->algo(chunk->bits, end_bit, start_bit, nbits, 312 start_bit = algo(chunk->bits, end_bit, start_bit,
294 pool->data); 313 nbits, data, pool);
295 if (start_bit >= end_bit) 314 if (start_bit >= end_bit)
296 continue; 315 continue;
297 remain = bitmap_set_ll(chunk->bits, start_bit, nbits); 316 remain = bitmap_set_ll(chunk->bits, start_bit, nbits);
@@ -310,7 +329,7 @@ retry:
310 rcu_read_unlock(); 329 rcu_read_unlock();
311 return addr; 330 return addr;
312} 331}
313EXPORT_SYMBOL(gen_pool_alloc); 332EXPORT_SYMBOL(gen_pool_alloc_algo);
314 333
315/** 334/**
316 * gen_pool_dma_alloc - allocate special memory from the pool for DMA usage 335 * gen_pool_dma_alloc - allocate special memory from the pool for DMA usage
@@ -501,15 +520,42 @@ EXPORT_SYMBOL(gen_pool_set_algo);
501 * @start: The bitnumber to start searching at 520 * @start: The bitnumber to start searching at
502 * @nr: The number of zeroed bits we're looking for 521 * @nr: The number of zeroed bits we're looking for
503 * @data: additional data - unused 522 * @data: additional data - unused
523 * @pool: pool to find the fit region memory from
504 */ 524 */
505unsigned long gen_pool_first_fit(unsigned long *map, unsigned long size, 525unsigned long gen_pool_first_fit(unsigned long *map, unsigned long size,
506 unsigned long start, unsigned int nr, void *data) 526 unsigned long start, unsigned int nr, void *data,
527 struct gen_pool *pool)
507{ 528{
508 return bitmap_find_next_zero_area(map, size, start, nr, 0); 529 return bitmap_find_next_zero_area(map, size, start, nr, 0);
509} 530}
510EXPORT_SYMBOL(gen_pool_first_fit); 531EXPORT_SYMBOL(gen_pool_first_fit);
511 532
512/** 533/**
534 * gen_pool_first_fit_align - find the first available region
535 * of memory matching the size requirement (alignment constraint)
536 * @map: The address to base the search on
537 * @size: The bitmap size in bits
538 * @start: The bitnumber to start searching at
539 * @nr: The number of zeroed bits we're looking for
540 * @data: data for alignment
541 * @pool: pool to get order from
542 */
543unsigned long gen_pool_first_fit_align(unsigned long *map, unsigned long size,
544 unsigned long start, unsigned int nr, void *data,
545 struct gen_pool *pool)
546{
547 struct genpool_data_align *alignment;
548 unsigned long align_mask;
549 int order;
550
551 alignment = data;
552 order = pool->min_alloc_order;
553 align_mask = ((alignment->align + (1UL << order) - 1) >> order) - 1;
554 return bitmap_find_next_zero_area(map, size, start, nr, align_mask);
555}
556EXPORT_SYMBOL(gen_pool_first_fit_align);
557
558/**
513 * gen_pool_first_fit_order_align - find the first available region 559 * gen_pool_first_fit_order_align - find the first available region
514 * of memory matching the size requirement. The region will be aligned 560 * of memory matching the size requirement. The region will be aligned
515 * to the order of the size specified. 561 * to the order of the size specified.
@@ -518,10 +564,11 @@ EXPORT_SYMBOL(gen_pool_first_fit);
518 * @start: The bitnumber to start searching at 564 * @start: The bitnumber to start searching at
519 * @nr: The number of zeroed bits we're looking for 565 * @nr: The number of zeroed bits we're looking for
520 * @data: additional data - unused 566 * @data: additional data - unused
567 * @pool: pool to find the fit region memory from
521 */ 568 */
522unsigned long gen_pool_first_fit_order_align(unsigned long *map, 569unsigned long gen_pool_first_fit_order_align(unsigned long *map,
523 unsigned long size, unsigned long start, 570 unsigned long size, unsigned long start,
524 unsigned int nr, void *data) 571 unsigned int nr, void *data, struct gen_pool *pool)
525{ 572{
526 unsigned long align_mask = roundup_pow_of_two(nr) - 1; 573 unsigned long align_mask = roundup_pow_of_two(nr) - 1;
527 574
@@ -537,12 +584,14 @@ EXPORT_SYMBOL(gen_pool_first_fit_order_align);
537 * @start: The bitnumber to start searching at 584 * @start: The bitnumber to start searching at
538 * @nr: The number of zeroed bits we're looking for 585 * @nr: The number of zeroed bits we're looking for
539 * @data: additional data - unused 586 * @data: additional data - unused
587 * @pool: pool to find the fit region memory from
540 * 588 *
541 * Iterate over the bitmap to find the smallest free region 589 * Iterate over the bitmap to find the smallest free region
542 * which we can allocate the memory. 590 * which we can allocate the memory.
543 */ 591 */
544unsigned long gen_pool_best_fit(unsigned long *map, unsigned long size, 592unsigned long gen_pool_best_fit(unsigned long *map, unsigned long size,
545 unsigned long start, unsigned int nr, void *data) 593 unsigned long start, unsigned int nr, void *data,
594 struct gen_pool *pool)
546{ 595{
547 unsigned long start_bit = size; 596 unsigned long start_bit = size;
548 unsigned long len = size + 1; 597 unsigned long len = size + 1;