summaryrefslogtreecommitdiffstats
path: root/drivers/gpu/nvgpu/common/pmu/pmu_ipc.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/nvgpu/common/pmu/pmu_ipc.c')
-rw-r--r--drivers/gpu/nvgpu/common/pmu/pmu_ipc.c26
1 files changed, 13 insertions, 13 deletions
diff --git a/drivers/gpu/nvgpu/common/pmu/pmu_ipc.c b/drivers/gpu/nvgpu/common/pmu/pmu_ipc.c
index 39be07cc..68654a70 100644
--- a/drivers/gpu/nvgpu/common/pmu/pmu_ipc.c
+++ b/drivers/gpu/nvgpu/common/pmu/pmu_ipc.c
@@ -184,9 +184,9 @@ static bool pmu_validate_cmd(struct nvgpu_pmu *pmu, struct pmu_cmd *cmd,
184 goto invalid_cmd; 184 goto invalid_cmd;
185 } 185 }
186 186
187 if ((payload->in.buf != NULL && payload->in.size == 0) || 187 if ((payload->in.buf != NULL && payload->in.size == 0U) ||
188 (payload->out.buf != NULL && payload->out.size == 0) || 188 (payload->out.buf != NULL && payload->out.size == 0U) ||
189 (payload->rpc.prpc != NULL && payload->rpc.size_rpc == 0)) { 189 (payload->rpc.prpc != NULL && payload->rpc.size_rpc == 0U)) {
190 goto invalid_cmd; 190 goto invalid_cmd;
191 } 191 }
192 192
@@ -207,8 +207,8 @@ static bool pmu_validate_cmd(struct nvgpu_pmu *pmu, struct pmu_cmd *cmd,
207 } 207 }
208 208
209 209
210 if ((payload->in.offset != 0 && payload->in.buf == NULL) || 210 if ((payload->in.offset != 0U && payload->in.buf == NULL) ||
211 (payload->out.offset != 0 && payload->out.buf == NULL)) { 211 (payload->out.offset != 0U && payload->out.buf == NULL)) {
212 goto invalid_cmd; 212 goto invalid_cmd;
213 } 213 }
214 214
@@ -316,7 +316,7 @@ static int pmu_cmd_payload_extract(struct gk20a *g, struct pmu_cmd *cmd,
316 seq->out_payload = payload->out.buf; 316 seq->out_payload = payload->out.buf;
317 } 317 }
318 318
319 if (payload && payload->in.offset != 0) { 319 if (payload && payload->in.offset != 0U) {
320 pv->set_pmu_allocation_ptr(pmu, &in, 320 pv->set_pmu_allocation_ptr(pmu, &in,
321 ((u8 *)&cmd->cmd + payload->in.offset)); 321 ((u8 *)&cmd->cmd + payload->in.offset));
322 322
@@ -335,7 +335,7 @@ static int pmu_cmd_payload_extract(struct gk20a *g, struct pmu_cmd *cmd,
335 goto clean_up; 335 goto clean_up;
336 } 336 }
337 337
338 if (payload->in.fb_size != 0x0) { 338 if (payload->in.fb_size != 0x0U) {
339 seq->in_mem = nvgpu_kzalloc(g, 339 seq->in_mem = nvgpu_kzalloc(g,
340 sizeof(struct nvgpu_mem)); 340 sizeof(struct nvgpu_mem));
341 if (!seq->in_mem) { 341 if (!seq->in_mem) {
@@ -365,7 +365,7 @@ static int pmu_cmd_payload_extract(struct gk20a *g, struct pmu_cmd *cmd,
365 pv->pmu_allocation_get_dmem_offset(pmu, in)); 365 pv->pmu_allocation_get_dmem_offset(pmu, in));
366 } 366 }
367 367
368 if (payload && payload->out.offset != 0) { 368 if (payload && payload->out.offset != 0U) {
369 pv->set_pmu_allocation_ptr(pmu, &out, 369 pv->set_pmu_allocation_ptr(pmu, &out,
370 ((u8 *)&cmd->cmd + payload->out.offset)); 370 ((u8 *)&cmd->cmd + payload->out.offset));
371 pv->pmu_allocation_set_dmem_size(pmu, out, 371 pv->pmu_allocation_set_dmem_size(pmu, out,
@@ -381,7 +381,7 @@ static int pmu_cmd_payload_extract(struct gk20a *g, struct pmu_cmd *cmd,
381 goto clean_up; 381 goto clean_up;
382 } 382 }
383 383
384 if (payload->out.fb_size != 0x0) { 384 if (payload->out.fb_size != 0x0U) {
385 seq->out_mem = nvgpu_kzalloc(g, 385 seq->out_mem = nvgpu_kzalloc(g,
386 sizeof(struct nvgpu_mem)); 386 sizeof(struct nvgpu_mem));
387 if (!seq->out_mem) { 387 if (!seq->out_mem) {
@@ -534,7 +534,7 @@ static int pmu_response_handle(struct nvgpu_pmu *pmu,
534 } 534 }
535 } 535 }
536 if (pv->pmu_allocation_get_dmem_size(pmu, 536 if (pv->pmu_allocation_get_dmem_size(pmu,
537 pv->get_pmu_seq_out_a_ptr(seq)) != 0) { 537 pv->get_pmu_seq_out_a_ptr(seq)) != 0U) {
538 nvgpu_flcn_copy_from_dmem(pmu->flcn, 538 nvgpu_flcn_copy_from_dmem(pmu->flcn,
539 pv->pmu_allocation_get_dmem_offset(pmu, 539 pv->pmu_allocation_get_dmem_offset(pmu,
540 pv->get_pmu_seq_out_a_ptr(seq)), 540 pv->get_pmu_seq_out_a_ptr(seq)),
@@ -546,13 +546,13 @@ static int pmu_response_handle(struct nvgpu_pmu *pmu,
546 seq->callback = NULL; 546 seq->callback = NULL;
547 } 547 }
548 if (pv->pmu_allocation_get_dmem_size(pmu, 548 if (pv->pmu_allocation_get_dmem_size(pmu,
549 pv->get_pmu_seq_in_a_ptr(seq)) != 0) { 549 pv->get_pmu_seq_in_a_ptr(seq)) != 0U) {
550 nvgpu_free(&pmu->dmem, 550 nvgpu_free(&pmu->dmem,
551 pv->pmu_allocation_get_dmem_offset(pmu, 551 pv->pmu_allocation_get_dmem_offset(pmu,
552 pv->get_pmu_seq_in_a_ptr(seq))); 552 pv->get_pmu_seq_in_a_ptr(seq)));
553 } 553 }
554 if (pv->pmu_allocation_get_dmem_size(pmu, 554 if (pv->pmu_allocation_get_dmem_size(pmu,
555 pv->get_pmu_seq_out_a_ptr(seq)) != 0) { 555 pv->get_pmu_seq_out_a_ptr(seq)) != 0U) {
556 nvgpu_free(&pmu->dmem, 556 nvgpu_free(&pmu->dmem,
557 pv->pmu_allocation_get_dmem_offset(pmu, 557 pv->pmu_allocation_get_dmem_offset(pmu,
558 pv->get_pmu_seq_out_a_ptr(seq))); 558 pv->get_pmu_seq_out_a_ptr(seq)));
@@ -748,7 +748,7 @@ int pmu_wait_message_cond(struct nvgpu_pmu *pmu, u32 timeout_ms,
748 gk20a_pmu_isr(g); 748 gk20a_pmu_isr(g);
749 } 749 }
750 750
751 nvgpu_usleep_range(delay, delay * 2); 751 nvgpu_usleep_range(delay, delay * 2U);
752 delay = min_t(u32, delay << 1, GR_IDLE_CHECK_MAX); 752 delay = min_t(u32, delay << 1, GR_IDLE_CHECK_MAX);
753 } while (!nvgpu_timeout_expired(&timeout)); 753 } while (!nvgpu_timeout_expired(&timeout));
754 754