diff options
Diffstat (limited to 'drivers/gpu/nvgpu/common/mm/pd_cache.c')
-rw-r--r-- | drivers/gpu/nvgpu/common/mm/pd_cache.c | 20 |
1 files changed, 10 insertions, 10 deletions
diff --git a/drivers/gpu/nvgpu/common/mm/pd_cache.c b/drivers/gpu/nvgpu/common/mm/pd_cache.c index dae6d34e..a8ed10e7 100644 --- a/drivers/gpu/nvgpu/common/mm/pd_cache.c +++ b/drivers/gpu/nvgpu/common/mm/pd_cache.c | |||
@@ -102,7 +102,7 @@ int nvgpu_pd_cache_init(struct gk20a *g) | |||
102 | } | 102 | } |
103 | 103 | ||
104 | cache = nvgpu_kzalloc(g, sizeof(*cache)); | 104 | cache = nvgpu_kzalloc(g, sizeof(*cache)); |
105 | if (!cache) { | 105 | if (cache == NULL) { |
106 | nvgpu_err(g, "Failed to alloc pd_cache!"); | 106 | nvgpu_err(g, "Failed to alloc pd_cache!"); |
107 | return -ENOMEM; | 107 | return -ENOMEM; |
108 | } | 108 | } |
@@ -132,7 +132,7 @@ void nvgpu_pd_cache_fini(struct gk20a *g) | |||
132 | u32 i; | 132 | u32 i; |
133 | struct nvgpu_pd_cache *cache = g->mm.pd_cache; | 133 | struct nvgpu_pd_cache *cache = g->mm.pd_cache; |
134 | 134 | ||
135 | if (!cache) { | 135 | if (cache == NULL) { |
136 | return; | 136 | return; |
137 | } | 137 | } |
138 | 138 | ||
@@ -159,7 +159,7 @@ int nvgpu_pd_cache_alloc_direct(struct gk20a *g, | |||
159 | pd_dbg(g, "PD-Alloc [D] %u bytes", bytes); | 159 | pd_dbg(g, "PD-Alloc [D] %u bytes", bytes); |
160 | 160 | ||
161 | pd->mem = nvgpu_kzalloc(g, sizeof(*pd->mem)); | 161 | pd->mem = nvgpu_kzalloc(g, sizeof(*pd->mem)); |
162 | if (!pd->mem) { | 162 | if (pd->mem == NULL) { |
163 | nvgpu_err(g, "OOM allocating nvgpu_mem struct!"); | 163 | nvgpu_err(g, "OOM allocating nvgpu_mem struct!"); |
164 | return -ENOMEM; | 164 | return -ENOMEM; |
165 | } | 165 | } |
@@ -205,7 +205,7 @@ static int nvgpu_pd_cache_alloc_new(struct gk20a *g, | |||
205 | pd_dbg(g, "PD-Alloc [C] New: offs=0"); | 205 | pd_dbg(g, "PD-Alloc [C] New: offs=0"); |
206 | 206 | ||
207 | pentry = nvgpu_kzalloc(g, sizeof(*pentry)); | 207 | pentry = nvgpu_kzalloc(g, sizeof(*pentry)); |
208 | if (!pentry) { | 208 | if (pentry == NULL) { |
209 | nvgpu_err(g, "OOM allocating pentry!"); | 209 | nvgpu_err(g, "OOM allocating pentry!"); |
210 | return -ENOMEM; | 210 | return -ENOMEM; |
211 | } | 211 | } |
@@ -313,7 +313,7 @@ static int nvgpu_pd_cache_alloc(struct gk20a *g, struct nvgpu_pd_cache *cache, | |||
313 | 313 | ||
314 | pd_dbg(g, "PD-Alloc [C] %u bytes", bytes); | 314 | pd_dbg(g, "PD-Alloc [C] %u bytes", bytes); |
315 | 315 | ||
316 | if (bytes & (bytes - 1U) || | 316 | if ((bytes & (bytes - 1U)) != 0U || |
317 | (bytes >= PAGE_SIZE || | 317 | (bytes >= PAGE_SIZE || |
318 | bytes < NVGPU_PD_CACHE_MIN)) { | 318 | bytes < NVGPU_PD_CACHE_MIN)) { |
319 | pd_dbg(g, "PD-Alloc [C] Invalid (bytes=%u)!", bytes); | 319 | pd_dbg(g, "PD-Alloc [C] Invalid (bytes=%u)!", bytes); |
@@ -321,7 +321,7 @@ static int nvgpu_pd_cache_alloc(struct gk20a *g, struct nvgpu_pd_cache *cache, | |||
321 | } | 321 | } |
322 | 322 | ||
323 | pentry = nvgpu_pd_cache_get_partial(cache, bytes); | 323 | pentry = nvgpu_pd_cache_get_partial(cache, bytes); |
324 | if (!pentry) { | 324 | if (pentry == NULL) { |
325 | err = nvgpu_pd_cache_alloc_new(g, cache, pd, bytes); | 325 | err = nvgpu_pd_cache_alloc_new(g, cache, pd, bytes); |
326 | } else { | 326 | } else { |
327 | err = nvgpu_pd_cache_alloc_from_partial(g, cache, pentry, pd); | 327 | err = nvgpu_pd_cache_alloc_from_partial(g, cache, pentry, pd); |
@@ -357,7 +357,7 @@ int nvgpu_pd_alloc(struct vm_gk20a *vm, struct nvgpu_gmmu_pd *pd, u32 bytes) | |||
357 | return 0; | 357 | return 0; |
358 | } | 358 | } |
359 | 359 | ||
360 | if (WARN_ON(!g->mm.pd_cache)) { | 360 | if (WARN_ON(g->mm.pd_cache == NULL)) { |
361 | return -ENOMEM; | 361 | return -ENOMEM; |
362 | } | 362 | } |
363 | 363 | ||
@@ -372,7 +372,7 @@ void nvgpu_pd_cache_free_direct(struct gk20a *g, struct nvgpu_gmmu_pd *pd) | |||
372 | { | 372 | { |
373 | pd_dbg(g, "PD-Free [D] 0x%p", pd->mem); | 373 | pd_dbg(g, "PD-Free [D] 0x%p", pd->mem); |
374 | 374 | ||
375 | if (!pd->mem) { | 375 | if (pd->mem == NULL) { |
376 | return; | 376 | return; |
377 | } | 377 | } |
378 | 378 | ||
@@ -425,7 +425,7 @@ static struct nvgpu_pd_mem_entry *nvgpu_pd_cache_look_up( | |||
425 | 425 | ||
426 | nvgpu_rbtree_search((u64)(uintptr_t)pd->mem, &node, | 426 | nvgpu_rbtree_search((u64)(uintptr_t)pd->mem, &node, |
427 | cache->mem_tree); | 427 | cache->mem_tree); |
428 | if (!node) { | 428 | if (node == NULL) { |
429 | return NULL; | 429 | return NULL; |
430 | } | 430 | } |
431 | 431 | ||
@@ -440,7 +440,7 @@ static void nvgpu_pd_cache_free(struct gk20a *g, struct nvgpu_pd_cache *cache, | |||
440 | pd_dbg(g, "PD-Free [C] 0x%p", pd->mem); | 440 | pd_dbg(g, "PD-Free [C] 0x%p", pd->mem); |
441 | 441 | ||
442 | pentry = nvgpu_pd_cache_look_up(g, cache, pd); | 442 | pentry = nvgpu_pd_cache_look_up(g, cache, pd); |
443 | if (!pentry) { | 443 | if (pentry == NULL) { |
444 | WARN(1, "Attempting to free non-existent pd"); | 444 | WARN(1, "Attempting to free non-existent pd"); |
445 | return; | 445 | return; |
446 | } | 446 | } |