summaryrefslogtreecommitdiffstats
path: root/drivers/gpu/nvgpu/common
diff options
context:
space:
mode:
authorDeepak Nibade <dnibade@nvidia.com>2017-04-19 02:59:53 -0400
committermobile promotions <svcmobile_promotions@nvidia.com>2017-04-19 15:15:56 -0400
commit78fe154ff761bd1dff6db96e90cd4e23185574e2 (patch)
treeb91ea9600dd52f56322900161c7d2cb9f27314e5 /drivers/gpu/nvgpu/common
parentafe12a49f7c61b5725113e2b9d2a5121d7913383 (diff)
gpu: nvgpu: use nvgpu list for page allocator
Use nvgpu list APIs instead of linux list APIs for page allocator lists Jira NVGPU-13 Change-Id: I3ee64a5cdc2ced4ca9c4ba7ad6271915a66d90f5 Signed-off-by: Deepak Nibade <dnibade@nvidia.com> Reviewed-on: http://git-master/r/1462076 Reviewed-by: Automatic_Commit_Validation_User Reviewed-by: svccoveritychecker <svccoveritychecker@nvidia.com> GVS: Gerrit_Virtual_Submit Reviewed-by: Terje Bergstrom <tbergstrom@nvidia.com>
Diffstat (limited to 'drivers/gpu/nvgpu/common')
-rw-r--r--drivers/gpu/nvgpu/common/mm/page_allocator.c34
1 files changed, 17 insertions, 17 deletions
diff --git a/drivers/gpu/nvgpu/common/mm/page_allocator.c b/drivers/gpu/nvgpu/common/mm/page_allocator.c
index 1b6a2092..11d4ca73 100644
--- a/drivers/gpu/nvgpu/common/mm/page_allocator.c
+++ b/drivers/gpu/nvgpu/common/mm/page_allocator.c
@@ -34,7 +34,7 @@ static inline void add_slab_page_to_empty(struct page_alloc_slab *slab,
34 struct page_alloc_slab_page *page) 34 struct page_alloc_slab_page *page)
35{ 35{
36 BUG_ON(page->state != SP_NONE); 36 BUG_ON(page->state != SP_NONE);
37 list_add(&page->list_entry, &slab->empty); 37 nvgpu_list_add(&page->list_entry, &slab->empty);
38 slab->nr_empty++; 38 slab->nr_empty++;
39 page->state = SP_EMPTY; 39 page->state = SP_EMPTY;
40} 40}
@@ -42,7 +42,7 @@ static inline void add_slab_page_to_partial(struct page_alloc_slab *slab,
42 struct page_alloc_slab_page *page) 42 struct page_alloc_slab_page *page)
43{ 43{
44 BUG_ON(page->state != SP_NONE); 44 BUG_ON(page->state != SP_NONE);
45 list_add(&page->list_entry, &slab->partial); 45 nvgpu_list_add(&page->list_entry, &slab->partial);
46 slab->nr_partial++; 46 slab->nr_partial++;
47 page->state = SP_PARTIAL; 47 page->state = SP_PARTIAL;
48} 48}
@@ -50,7 +50,7 @@ static inline void add_slab_page_to_full(struct page_alloc_slab *slab,
50 struct page_alloc_slab_page *page) 50 struct page_alloc_slab_page *page)
51{ 51{
52 BUG_ON(page->state != SP_NONE); 52 BUG_ON(page->state != SP_NONE);
53 list_add(&page->list_entry, &slab->full); 53 nvgpu_list_add(&page->list_entry, &slab->full);
54 slab->nr_full++; 54 slab->nr_full++;
55 page->state = SP_FULL; 55 page->state = SP_FULL;
56} 56}
@@ -58,21 +58,21 @@ static inline void add_slab_page_to_full(struct page_alloc_slab *slab,
58static inline void del_slab_page_from_empty(struct page_alloc_slab *slab, 58static inline void del_slab_page_from_empty(struct page_alloc_slab *slab,
59 struct page_alloc_slab_page *page) 59 struct page_alloc_slab_page *page)
60{ 60{
61 list_del_init(&page->list_entry); 61 nvgpu_list_del(&page->list_entry);
62 slab->nr_empty--; 62 slab->nr_empty--;
63 page->state = SP_NONE; 63 page->state = SP_NONE;
64} 64}
65static inline void del_slab_page_from_partial(struct page_alloc_slab *slab, 65static inline void del_slab_page_from_partial(struct page_alloc_slab *slab,
66 struct page_alloc_slab_page *page) 66 struct page_alloc_slab_page *page)
67{ 67{
68 list_del_init(&page->list_entry); 68 nvgpu_list_del(&page->list_entry);
69 slab->nr_partial--; 69 slab->nr_partial--;
70 page->state = SP_NONE; 70 page->state = SP_NONE;
71} 71}
72static inline void del_slab_page_from_full(struct page_alloc_slab *slab, 72static inline void del_slab_page_from_full(struct page_alloc_slab *slab,
73 struct page_alloc_slab_page *page) 73 struct page_alloc_slab_page *page)
74{ 74{
75 list_del_init(&page->list_entry); 75 nvgpu_list_del(&page->list_entry);
76 slab->nr_full--; 76 slab->nr_full--;
77 page->state = SP_NONE; 77 page->state = SP_NONE;
78} 78}
@@ -197,7 +197,7 @@ static struct page_alloc_slab_page *alloc_slab_page(
197 return ERR_PTR(-ENOMEM); 197 return ERR_PTR(-ENOMEM);
198 } 198 }
199 199
200 INIT_LIST_HEAD(&slab_page->list_entry); 200 nvgpu_init_list_node(&slab_page->list_entry);
201 slab_page->slab_size = slab->slab_size; 201 slab_page->slab_size = slab->slab_size;
202 slab_page->nr_objects = (u32)a->page_size / slab->slab_size; 202 slab_page->nr_objects = (u32)a->page_size / slab->slab_size;
203 slab_page->nr_objects_alloced = 0; 203 slab_page->nr_objects_alloced = 0;
@@ -244,14 +244,14 @@ static int __do_slab_alloc(struct nvgpu_page_allocator *a,
244 * readily available. Take the slab_page out of what ever list it 244 * readily available. Take the slab_page out of what ever list it
245 * was in since it may be put back into a different list later. 245 * was in since it may be put back into a different list later.
246 */ 246 */
247 if (!list_empty(&slab->partial)) { 247 if (!nvgpu_list_empty(&slab->partial)) {
248 slab_page = list_first_entry(&slab->partial, 248 slab_page = nvgpu_list_first_entry(&slab->partial,
249 struct page_alloc_slab_page, 249 page_alloc_slab_page,
250 list_entry); 250 list_entry);
251 del_slab_page_from_partial(slab, slab_page); 251 del_slab_page_from_partial(slab, slab_page);
252 } else if (!list_empty(&slab->empty)) { 252 } else if (!nvgpu_list_empty(&slab->empty)) {
253 slab_page = list_first_entry(&slab->empty, 253 slab_page = nvgpu_list_first_entry(&slab->empty,
254 struct page_alloc_slab_page, 254 page_alloc_slab_page,
255 list_entry); 255 list_entry);
256 del_slab_page_from_empty(slab, slab_page); 256 del_slab_page_from_empty(slab, slab_page);
257 } 257 }
@@ -383,7 +383,7 @@ static void __nvgpu_free_slab(struct nvgpu_page_allocator *a,
383 383
384 /* And add. */ 384 /* And add. */
385 if (new_state == SP_EMPTY) { 385 if (new_state == SP_EMPTY) {
386 if (list_empty(&slab->empty)) 386 if (nvgpu_list_empty(&slab->empty))
387 add_slab_page_to_empty(slab, slab_page); 387 add_slab_page_to_empty(slab, slab_page);
388 else 388 else
389 free_slab_page(a, slab_page); 389 free_slab_page(a, slab_page);
@@ -835,9 +835,9 @@ static int nvgpu_page_alloc_init_slabs(struct nvgpu_page_allocator *a)
835 struct page_alloc_slab *slab = &a->slabs[i]; 835 struct page_alloc_slab *slab = &a->slabs[i];
836 836
837 slab->slab_size = SZ_4K * (1 << i); 837 slab->slab_size = SZ_4K * (1 << i);
838 INIT_LIST_HEAD(&slab->empty); 838 nvgpu_init_list_node(&slab->empty);
839 INIT_LIST_HEAD(&slab->partial); 839 nvgpu_init_list_node(&slab->partial);
840 INIT_LIST_HEAD(&slab->full); 840 nvgpu_init_list_node(&slab->full);
841 slab->nr_empty = 0; 841 slab->nr_empty = 0;
842 slab->nr_partial = 0; 842 slab->nr_partial = 0;
843 slab->nr_full = 0; 843 slab->nr_full = 0;