summaryrefslogtreecommitdiffstats
path: root/drivers/gpu/nvgpu/gk20a/pmu_gk20a.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/nvgpu/gk20a/pmu_gk20a.c')
-rw-r--r--drivers/gpu/nvgpu/gk20a/pmu_gk20a.c44
1 files changed, 25 insertions, 19 deletions
diff --git a/drivers/gpu/nvgpu/gk20a/pmu_gk20a.c b/drivers/gpu/nvgpu/gk20a/pmu_gk20a.c
index 47ee7a1b..5d973938 100644
--- a/drivers/gpu/nvgpu/gk20a/pmu_gk20a.c
+++ b/drivers/gpu/nvgpu/gk20a/pmu_gk20a.c
@@ -2603,7 +2603,8 @@ static int pmu_init_perfmon(struct pmu_gk20a *pmu)
2603 2603
2604 if (!pmu->sample_buffer) 2604 if (!pmu->sample_buffer)
2605 err = pmu->dmem.alloc(&pmu->dmem, 2605 err = pmu->dmem.alloc(&pmu->dmem,
2606 &pmu->sample_buffer, 2 * sizeof(u16)); 2606 &pmu->sample_buffer, 2 * sizeof(u16),
2607 PMU_DMEM_ALLOC_ALIGNMENT);
2607 if (err) { 2608 if (err) {
2608 gk20a_err(dev_from_gk20a(g), 2609 gk20a_err(dev_from_gk20a(g),
2609 "failed to allocate perfmon sample buffer"); 2610 "failed to allocate perfmon sample buffer");
@@ -2707,8 +2708,7 @@ static int pmu_process_init_msg(struct pmu_gk20a *pmu,
2707 if (!pmu->dmem.alloc) 2708 if (!pmu->dmem.alloc)
2708 gk20a_allocator_init(&pmu->dmem, "gk20a_pmu_dmem", 2709 gk20a_allocator_init(&pmu->dmem, "gk20a_pmu_dmem",
2709 pv->get_pmu_init_msg_pmu_sw_mg_off(init), 2710 pv->get_pmu_init_msg_pmu_sw_mg_off(init),
2710 pv->get_pmu_init_msg_pmu_sw_mg_size(init), 2711 pv->get_pmu_init_msg_pmu_sw_mg_size(init));
2711 PMU_DMEM_ALLOC_ALIGNMENT);
2712 2712
2713 pmu->pmu_ready = true; 2713 pmu->pmu_ready = true;
2714 pmu->pmu_state = PMU_STATE_INIT_RECEIVED; 2714 pmu->pmu_state = PMU_STATE_INIT_RECEIVED;
@@ -2845,17 +2845,19 @@ static int pmu_response_handle(struct pmu_gk20a *pmu,
2845 if (pv->pmu_allocation_get_dmem_size(pmu, 2845 if (pv->pmu_allocation_get_dmem_size(pmu,
2846 pv->get_pmu_seq_in_a_ptr(seq)) != 0) 2846 pv->get_pmu_seq_in_a_ptr(seq)) != 0)
2847 pmu->dmem.free(&pmu->dmem, 2847 pmu->dmem.free(&pmu->dmem,
2848 pv->pmu_allocation_get_dmem_offset(pmu, 2848 pv->pmu_allocation_get_dmem_offset(pmu,
2849 pv->get_pmu_seq_in_a_ptr(seq)), 2849 pv->get_pmu_seq_in_a_ptr(seq)),
2850 pv->pmu_allocation_get_dmem_size(pmu, 2850 pv->pmu_allocation_get_dmem_size(pmu,
2851 pv->get_pmu_seq_in_a_ptr(seq))); 2851 pv->get_pmu_seq_in_a_ptr(seq)),
2852 PMU_DMEM_ALLOC_ALIGNMENT);
2852 if (pv->pmu_allocation_get_dmem_size(pmu, 2853 if (pv->pmu_allocation_get_dmem_size(pmu,
2853 pv->get_pmu_seq_out_a_ptr(seq)) != 0) 2854 pv->get_pmu_seq_out_a_ptr(seq)) != 0)
2854 pmu->dmem.free(&pmu->dmem, 2855 pmu->dmem.free(&pmu->dmem,
2855 pv->pmu_allocation_get_dmem_offset(pmu, 2856 pv->pmu_allocation_get_dmem_offset(pmu,
2856 pv->get_pmu_seq_out_a_ptr(seq)), 2857 pv->get_pmu_seq_out_a_ptr(seq)),
2857 pv->pmu_allocation_get_dmem_size(pmu, 2858 pv->pmu_allocation_get_dmem_size(pmu,
2858 pv->get_pmu_seq_out_a_ptr(seq))); 2859 pv->get_pmu_seq_out_a_ptr(seq)),
2860 PMU_DMEM_ALLOC_ALIGNMENT);
2859 2861
2860 if (seq->callback) 2862 if (seq->callback)
2861 seq->callback(g, msg, seq->cb_params, seq->desc, ret); 2863 seq->callback(g, msg, seq->cb_params, seq->desc, ret);
@@ -3493,8 +3495,9 @@ int gk20a_pmu_cmd_post(struct gk20a *g, struct pmu_cmd *cmd,
3493 (u16)max(payload->in.size, payload->out.size)); 3495 (u16)max(payload->in.size, payload->out.size));
3494 3496
3495 err = pmu->dmem.alloc(&pmu->dmem, 3497 err = pmu->dmem.alloc(&pmu->dmem,
3496 pv->pmu_allocation_get_dmem_offset_addr(pmu, in), 3498 pv->pmu_allocation_get_dmem_offset_addr(pmu, in),
3497 pv->pmu_allocation_get_dmem_size(pmu, in)); 3499 pv->pmu_allocation_get_dmem_size(pmu, in),
3500 PMU_DMEM_ALLOC_ALIGNMENT);
3498 if (err) 3501 if (err)
3499 goto clean_up; 3502 goto clean_up;
3500 3503
@@ -3517,8 +3520,9 @@ int gk20a_pmu_cmd_post(struct gk20a *g, struct pmu_cmd *cmd,
3517 3520
3518 if (payload->out.buf != payload->in.buf) { 3521 if (payload->out.buf != payload->in.buf) {
3519 err = pmu->dmem.alloc(&pmu->dmem, 3522 err = pmu->dmem.alloc(&pmu->dmem,
3520 pv->pmu_allocation_get_dmem_offset_addr(pmu, out), 3523 pv->pmu_allocation_get_dmem_offset_addr(pmu, out),
3521 pv->pmu_allocation_get_dmem_size(pmu, out)); 3524 pv->pmu_allocation_get_dmem_size(pmu, out),
3525 PMU_DMEM_ALLOC_ALIGNMENT);
3522 if (err) 3526 if (err)
3523 goto clean_up; 3527 goto clean_up;
3524 } else { 3528 } else {
@@ -3548,12 +3552,14 @@ clean_up:
3548 gk20a_dbg_fn("fail"); 3552 gk20a_dbg_fn("fail");
3549 if (in) 3553 if (in)
3550 pmu->dmem.free(&pmu->dmem, 3554 pmu->dmem.free(&pmu->dmem,
3551 pv->pmu_allocation_get_dmem_offset(pmu, in), 3555 pv->pmu_allocation_get_dmem_offset(pmu, in),
3552 pv->pmu_allocation_get_dmem_size(pmu, in)); 3556 pv->pmu_allocation_get_dmem_size(pmu, in),
3557 PMU_DMEM_ALLOC_ALIGNMENT);
3553 if (out) 3558 if (out)
3554 pmu->dmem.free(&pmu->dmem, 3559 pmu->dmem.free(&pmu->dmem,
3555 pv->pmu_allocation_get_dmem_offset(pmu, out), 3560 pv->pmu_allocation_get_dmem_offset(pmu, out),
3556 pv->pmu_allocation_get_dmem_size(pmu, out)); 3561 pv->pmu_allocation_get_dmem_size(pmu, out),
3562 PMU_DMEM_ALLOC_ALIGNMENT);
3557 3563
3558 pmu_seq_release(pmu, seq); 3564 pmu_seq_release(pmu, seq);
3559 return err; 3565 return err;