aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorZhao Qiang <qiang.zhao@freescale.com>2015-11-29 21:48:52 -0500
committerScott Wood <scottwood@freescale.com>2015-12-22 18:10:17 -0500
commitde2dd0eb30af55d3893979d5641c50c7a8969c99 (patch)
treedcfd94f13f29b4ad8bcf32b2849876467aa3626c
parente9d764f803964a54ca7da4a67d124fe824ebd80a (diff)
genalloc:support memory-allocation with bytes-alignment to genalloc
Bytes alignment is required to manage some special RAM, so add gen_pool_first_fit_align to genalloc, meanwhile add gen_pool_alloc_algo to pass algo in case user layer using more than one algo, and pass data to gen_pool_first_fit_align(modify gen_pool_alloc as a wrapper) Signed-off-by: Zhao Qiang <qiang.zhao@freescale.com> Signed-off-by: Scott Wood <scottwood@freescale.com>
-rw-r--r--include/linux/genalloc.h27
-rw-r--r--lib/genalloc.c61
2 files changed, 78 insertions, 10 deletions
diff --git a/include/linux/genalloc.h b/include/linux/genalloc.h
index 7ff168d06967..3c676ce46ee0 100644
--- a/include/linux/genalloc.h
+++ b/include/linux/genalloc.h
@@ -30,10 +30,12 @@
30#ifndef __GENALLOC_H__ 30#ifndef __GENALLOC_H__
31#define __GENALLOC_H__ 31#define __GENALLOC_H__
32 32
33#include <linux/types.h>
33#include <linux/spinlock_types.h> 34#include <linux/spinlock_types.h>
34 35
35struct device; 36struct device;
36struct device_node; 37struct device_node;
38struct gen_pool;
37 39
38/** 40/**
39 * Allocation callback function type definition 41 * Allocation callback function type definition
@@ -47,7 +49,7 @@ typedef unsigned long (*genpool_algo_t)(unsigned long *map,
47 unsigned long size, 49 unsigned long size,
48 unsigned long start, 50 unsigned long start,
49 unsigned int nr, 51 unsigned int nr,
50 void *data); 52 void *data, struct gen_pool *pool);
51 53
52/* 54/*
53 * General purpose special memory pool descriptor. 55 * General purpose special memory pool descriptor.
@@ -75,6 +77,13 @@ struct gen_pool_chunk {
75 unsigned long bits[0]; /* bitmap for allocating memory chunk */ 77 unsigned long bits[0]; /* bitmap for allocating memory chunk */
76}; 78};
77 79
80/*
81 * gen_pool data descriptor for gen_pool_first_fit_align.
82 */
83struct genpool_data_align {
84 int align; /* alignment by bytes for starting address */
85};
86
78extern struct gen_pool *gen_pool_create(int, int); 87extern struct gen_pool *gen_pool_create(int, int);
79extern phys_addr_t gen_pool_virt_to_phys(struct gen_pool *pool, unsigned long); 88extern phys_addr_t gen_pool_virt_to_phys(struct gen_pool *pool, unsigned long);
80extern int gen_pool_add_virt(struct gen_pool *, unsigned long, phys_addr_t, 89extern int gen_pool_add_virt(struct gen_pool *, unsigned long, phys_addr_t,
@@ -98,6 +107,8 @@ static inline int gen_pool_add(struct gen_pool *pool, unsigned long addr,
98} 107}
99extern void gen_pool_destroy(struct gen_pool *); 108extern void gen_pool_destroy(struct gen_pool *);
100extern unsigned long gen_pool_alloc(struct gen_pool *, size_t); 109extern unsigned long gen_pool_alloc(struct gen_pool *, size_t);
110extern unsigned long gen_pool_alloc_algo(struct gen_pool *, size_t,
111 genpool_algo_t algo, void *data);
101extern void *gen_pool_dma_alloc(struct gen_pool *pool, size_t size, 112extern void *gen_pool_dma_alloc(struct gen_pool *pool, size_t size,
102 dma_addr_t *dma); 113 dma_addr_t *dma);
103extern void gen_pool_free(struct gen_pool *, unsigned long, size_t); 114extern void gen_pool_free(struct gen_pool *, unsigned long, size_t);
@@ -110,14 +121,22 @@ extern void gen_pool_set_algo(struct gen_pool *pool, genpool_algo_t algo,
110 void *data); 121 void *data);
111 122
112extern unsigned long gen_pool_first_fit(unsigned long *map, unsigned long size, 123extern unsigned long gen_pool_first_fit(unsigned long *map, unsigned long size,
113 unsigned long start, unsigned int nr, void *data); 124 unsigned long start, unsigned int nr, void *data,
125 struct gen_pool *pool);
126
127extern unsigned long gen_pool_first_fit_align(unsigned long *map,
128 unsigned long size, unsigned long start, unsigned int nr,
129 void *data, struct gen_pool *pool);
130
114 131
115extern unsigned long gen_pool_first_fit_order_align(unsigned long *map, 132extern unsigned long gen_pool_first_fit_order_align(unsigned long *map,
116 unsigned long size, unsigned long start, unsigned int nr, 133 unsigned long size, unsigned long start, unsigned int nr,
117 void *data); 134 void *data, struct gen_pool *pool);
118 135
119extern unsigned long gen_pool_best_fit(unsigned long *map, unsigned long size, 136extern unsigned long gen_pool_best_fit(unsigned long *map, unsigned long size,
120 unsigned long start, unsigned int nr, void *data); 137 unsigned long start, unsigned int nr, void *data,
138 struct gen_pool *pool);
139
121 140
122extern struct gen_pool *devm_gen_pool_create(struct device *dev, 141extern struct gen_pool *devm_gen_pool_create(struct device *dev,
123 int min_alloc_order, int nid, const char *name); 142 int min_alloc_order, int nid, const char *name);
diff --git a/lib/genalloc.c b/lib/genalloc.c
index 116a166b096f..b8cf89d9e17d 100644
--- a/lib/genalloc.c
+++ b/lib/genalloc.c
@@ -270,6 +270,25 @@ EXPORT_SYMBOL(gen_pool_destroy);
270 */ 270 */
271unsigned long gen_pool_alloc(struct gen_pool *pool, size_t size) 271unsigned long gen_pool_alloc(struct gen_pool *pool, size_t size)
272{ 272{
273 return gen_pool_alloc_algo(pool, size, pool->algo, pool->data);
274}
275EXPORT_SYMBOL(gen_pool_alloc);
276
277/**
278 * gen_pool_alloc_algo - allocate special memory from the pool
279 * @pool: pool to allocate from
280 * @size: number of bytes to allocate from the pool
281 * @algo: algorithm passed from caller
282 * @data: data passed to algorithm
283 *
284 * Allocate the requested number of bytes from the specified pool.
285 * Uses the pool allocation function (with first-fit algorithm by default).
286 * Can not be used in NMI handler on architectures without
287 * NMI-safe cmpxchg implementation.
288 */
289unsigned long gen_pool_alloc_algo(struct gen_pool *pool, size_t size,
290 genpool_algo_t algo, void *data)
291{
273 struct gen_pool_chunk *chunk; 292 struct gen_pool_chunk *chunk;
274 unsigned long addr = 0; 293 unsigned long addr = 0;
275 int order = pool->min_alloc_order; 294 int order = pool->min_alloc_order;
@@ -290,8 +309,8 @@ unsigned long gen_pool_alloc(struct gen_pool *pool, size_t size)
290 309
291 end_bit = chunk_size(chunk) >> order; 310 end_bit = chunk_size(chunk) >> order;
292retry: 311retry:
293 start_bit = pool->algo(chunk->bits, end_bit, start_bit, nbits, 312 start_bit = algo(chunk->bits, end_bit, start_bit,
294 pool->data); 313 nbits, data, pool);
295 if (start_bit >= end_bit) 314 if (start_bit >= end_bit)
296 continue; 315 continue;
297 remain = bitmap_set_ll(chunk->bits, start_bit, nbits); 316 remain = bitmap_set_ll(chunk->bits, start_bit, nbits);
@@ -310,7 +329,7 @@ retry:
310 rcu_read_unlock(); 329 rcu_read_unlock();
311 return addr; 330 return addr;
312} 331}
313EXPORT_SYMBOL(gen_pool_alloc); 332EXPORT_SYMBOL(gen_pool_alloc_algo);
314 333
315/** 334/**
316 * gen_pool_dma_alloc - allocate special memory from the pool for DMA usage 335 * gen_pool_dma_alloc - allocate special memory from the pool for DMA usage
@@ -501,15 +520,42 @@ EXPORT_SYMBOL(gen_pool_set_algo);
501 * @start: The bitnumber to start searching at 520 * @start: The bitnumber to start searching at
502 * @nr: The number of zeroed bits we're looking for 521 * @nr: The number of zeroed bits we're looking for
503 * @data: additional data - unused 522 * @data: additional data - unused
523 * @pool: pool to find the fit region memory from
504 */ 524 */
505unsigned long gen_pool_first_fit(unsigned long *map, unsigned long size, 525unsigned long gen_pool_first_fit(unsigned long *map, unsigned long size,
506 unsigned long start, unsigned int nr, void *data) 526 unsigned long start, unsigned int nr, void *data,
527 struct gen_pool *pool)
507{ 528{
508 return bitmap_find_next_zero_area(map, size, start, nr, 0); 529 return bitmap_find_next_zero_area(map, size, start, nr, 0);
509} 530}
510EXPORT_SYMBOL(gen_pool_first_fit); 531EXPORT_SYMBOL(gen_pool_first_fit);
511 532
512/** 533/**
534 * gen_pool_first_fit_align - find the first available region
535 * of memory matching the size requirement (alignment constraint)
536 * @map: The address to base the search on
537 * @size: The bitmap size in bits
538 * @start: The bitnumber to start searching at
539 * @nr: The number of zeroed bits we're looking for
540 * @data: data for alignment
541 * @pool: pool to get order from
542 */
543unsigned long gen_pool_first_fit_align(unsigned long *map, unsigned long size,
544 unsigned long start, unsigned int nr, void *data,
545 struct gen_pool *pool)
546{
547 struct genpool_data_align *alignment;
548 unsigned long align_mask;
549 int order;
550
551 alignment = data;
552 order = pool->min_alloc_order;
553 align_mask = ((alignment->align + (1UL << order) - 1) >> order) - 1;
554 return bitmap_find_next_zero_area(map, size, start, nr, align_mask);
555}
556EXPORT_SYMBOL(gen_pool_first_fit_align);
557
558/**
513 * gen_pool_first_fit_order_align - find the first available region 559 * gen_pool_first_fit_order_align - find the first available region
514 * of memory matching the size requirement. The region will be aligned 560 * of memory matching the size requirement. The region will be aligned
515 * to the order of the size specified. 561 * to the order of the size specified.
@@ -518,10 +564,11 @@ EXPORT_SYMBOL(gen_pool_first_fit);
518 * @start: The bitnumber to start searching at 564 * @start: The bitnumber to start searching at
519 * @nr: The number of zeroed bits we're looking for 565 * @nr: The number of zeroed bits we're looking for
520 * @data: additional data - unused 566 * @data: additional data - unused
567 * @pool: pool to find the fit region memory from
521 */ 568 */
522unsigned long gen_pool_first_fit_order_align(unsigned long *map, 569unsigned long gen_pool_first_fit_order_align(unsigned long *map,
523 unsigned long size, unsigned long start, 570 unsigned long size, unsigned long start,
524 unsigned int nr, void *data) 571 unsigned int nr, void *data, struct gen_pool *pool)
525{ 572{
526 unsigned long align_mask = roundup_pow_of_two(nr) - 1; 573 unsigned long align_mask = roundup_pow_of_two(nr) - 1;
527 574
@@ -537,12 +584,14 @@ EXPORT_SYMBOL(gen_pool_first_fit_order_align);
537 * @start: The bitnumber to start searching at 584 * @start: The bitnumber to start searching at
538 * @nr: The number of zeroed bits we're looking for 585 * @nr: The number of zeroed bits we're looking for
539 * @data: additional data - unused 586 * @data: additional data - unused
587 * @pool: pool to find the fit region memory from
540 * 588 *
541 * Iterate over the bitmap to find the smallest free region 589 * Iterate over the bitmap to find the smallest free region
542 * which we can allocate the memory. 590 * which we can allocate the memory.
543 */ 591 */
544unsigned long gen_pool_best_fit(unsigned long *map, unsigned long size, 592unsigned long gen_pool_best_fit(unsigned long *map, unsigned long size,
545 unsigned long start, unsigned int nr, void *data) 593 unsigned long start, unsigned int nr, void *data,
594 struct gen_pool *pool)
546{ 595{
547 unsigned long start_bit = size; 596 unsigned long start_bit = size;
548 unsigned long len = size + 1; 597 unsigned long len = size + 1;