summaryrefslogtreecommitdiffstats
path: root/drivers/gpu/nvgpu/gp106/pmu_gp106.c
diff options
context:
space:
mode:
authorDeepak Goyal <dgoyal@nvidia.com>2018-02-28 06:09:57 -0500
committermobile promotions <svcmobile_promotions@nvidia.com>2018-03-06 00:18:20 -0500
commit26b91946031a88293c7ce563ff923802af6509ce (patch)
tree9b7ad56e7feb381b903f0c07a988114cdec0302f /drivers/gpu/nvgpu/gp106/pmu_gp106.c
parent848af2ce6de6140323a6ffe3075bf8021e119434 (diff)
gpu: nvgpu: gv11b: Correct PMU PG enabled masks.
PMU ucode records supported feature list for a particular chip as support mask sent via PMU_PG_PARAM_CMD_GR_INIT_PARAM. It then enables selective feature list through enable mask sent via PMU_PG_PARAM_CMD_SUB_FEATURE_MASK_UPDATE cmd. Right now only ELPG state machine mask was enabled. Only ELPG state machine was getting executed but other crucial steps in ELPG entry/exit sequence were getting skipped. Bug 200392620. Bug 200296076. Change-Id: I5e1800980990c146c731537290cb7d4c07e937c3 Signed-off-by: Deepak Goyal <dgoyal@nvidia.com> Reviewed-on: https://git-master.nvidia.com/r/1665767 Reviewed-by: svc-mobile-coverity <svc-mobile-coverity@nvidia.com> GVS: Gerrit_Virtual_Submit Reviewed-by: Seshendra Gadagottu <sgadagottu@nvidia.com> Tested-by: Seshendra Gadagottu <sgadagottu@nvidia.com> Reviewed-by: Vijayakumar Subbu <vsubbu@nvidia.com> Reviewed-by: mobile promotions <svcmobile_promotions@nvidia.com> Tested-by: mobile promotions <svcmobile_promotions@nvidia.com>
Diffstat (limited to 'drivers/gpu/nvgpu/gp106/pmu_gp106.c')
-rw-r--r--drivers/gpu/nvgpu/gp106/pmu_gp106.c4
1 files changed, 2 insertions, 2 deletions
diff --git a/drivers/gpu/nvgpu/gp106/pmu_gp106.c b/drivers/gpu/nvgpu/gp106/pmu_gp106.c
index de26ecf2..d4041905 100644
--- a/drivers/gpu/nvgpu/gp106/pmu_gp106.c
+++ b/drivers/gpu/nvgpu/gp106/pmu_gp106.c
@@ -81,7 +81,7 @@ int gp106_pmu_engine_reset(struct gk20a *g, bool do_reset)
81u32 gp106_pmu_pg_feature_list(struct gk20a *g, u32 pg_engine_id) 81u32 gp106_pmu_pg_feature_list(struct gk20a *g, u32 pg_engine_id)
82{ 82{
83 if (pg_engine_id == PMU_PG_ELPG_ENGINE_ID_GRAPHICS) 83 if (pg_engine_id == PMU_PG_ELPG_ENGINE_ID_GRAPHICS)
84 return PMU_PG_FEATURE_GR_RPPG_ENABLED; 84 return NVGPU_PMU_GR_FEATURE_MASK_RPPG;
85 85
86 if (pg_engine_id == PMU_PG_ELPG_ENGINE_ID_MS) 86 if (pg_engine_id == PMU_PG_ELPG_ENGINE_ID_MS)
87 return NVGPU_PMU_MS_FEATURE_MASK_ALL; 87 return NVGPU_PMU_MS_FEATURE_MASK_ALL;
@@ -133,7 +133,7 @@ int gp106_pg_param_init(struct gk20a *g, u32 pg_engine_id)
133 cmd.cmd.pg.gr_init_param.sub_cmd_id = 133 cmd.cmd.pg.gr_init_param.sub_cmd_id =
134 PMU_PG_PARAM_CMD_GR_INIT_PARAM; 134 PMU_PG_PARAM_CMD_GR_INIT_PARAM;
135 cmd.cmd.pg.gr_init_param.featuremask = 135 cmd.cmd.pg.gr_init_param.featuremask =
136 PMU_PG_FEATURE_GR_RPPG_ENABLED; 136 NVGPU_PMU_GR_FEATURE_MASK_RPPG;
137 137
138 gp106_dbg_pmu("cmd post GR PMU_PG_CMD_ID_PG_PARAM"); 138 gp106_dbg_pmu("cmd post GR PMU_PG_CMD_ID_PG_PARAM");
139 nvgpu_pmu_cmd_post(g, &cmd, NULL, NULL, PMU_COMMAND_QUEUE_HPQ, 139 nvgpu_pmu_cmd_post(g, &cmd, NULL, NULL, PMU_COMMAND_QUEUE_HPQ,