summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDeepak Goyal <dgoyal@nvidia.com>2018-02-28 06:09:57 -0500
committermobile promotions <svcmobile_promotions@nvidia.com>2018-03-06 00:18:20 -0500
commit26b91946031a88293c7ce563ff923802af6509ce (patch)
tree9b7ad56e7feb381b903f0c07a988114cdec0302f
parent848af2ce6de6140323a6ffe3075bf8021e119434 (diff)
gpu: nvgpu: gv11b: Correct PMU PG enabled masks.
PMU ucode records supported feature list for a particular chip as support mask sent via PMU_PG_PARAM_CMD_GR_INIT_PARAM. It then enables selective feature list through enable mask sent via PMU_PG_PARAM_CMD_SUB_FEATURE_MASK_UPDATE cmd. Right now only ELPG state machine mask was enabled. Only ELPG state machine was getting executed but other crucial steps in ELPG entry/exit sequence were getting skipped. Bug 200392620. Bug 200296076. Change-Id: I5e1800980990c146c731537290cb7d4c07e937c3 Signed-off-by: Deepak Goyal <dgoyal@nvidia.com> Reviewed-on: https://git-master.nvidia.com/r/1665767 Reviewed-by: svc-mobile-coverity <svc-mobile-coverity@nvidia.com> GVS: Gerrit_Virtual_Submit Reviewed-by: Seshendra Gadagottu <sgadagottu@nvidia.com> Tested-by: Seshendra Gadagottu <sgadagottu@nvidia.com> Reviewed-by: Vijayakumar Subbu <vsubbu@nvidia.com> Reviewed-by: mobile promotions <svcmobile_promotions@nvidia.com> Tested-by: mobile promotions <svcmobile_promotions@nvidia.com>
-rw-r--r--drivers/gpu/nvgpu/common/linux/debug_pmu.c2
-rw-r--r--drivers/gpu/nvgpu/common/linux/sysfs.c2
-rw-r--r--drivers/gpu/nvgpu/common/pmu/pmu_pg.c6
-rw-r--r--drivers/gpu/nvgpu/gk20a/pmu_gk20a.c2
-rw-r--r--drivers/gpu/nvgpu/gp106/pmu_gp106.c4
-rw-r--r--drivers/gpu/nvgpu/gp10b/pmu_gp10b.c2
-rw-r--r--drivers/gpu/nvgpu/gv11b/pmu_gv11b.c12
-rw-r--r--drivers/gpu/nvgpu/include/nvgpu/pmuif/gpmuif_pg.h27
8 files changed, 41 insertions, 16 deletions
diff --git a/drivers/gpu/nvgpu/common/linux/debug_pmu.c b/drivers/gpu/nvgpu/common/linux/debug_pmu.c
index a8a8870e..f4ed992d 100644
--- a/drivers/gpu/nvgpu/common/linux/debug_pmu.c
+++ b/drivers/gpu/nvgpu/common/linux/debug_pmu.c
@@ -27,7 +27,7 @@ static int lpwr_debug_show(struct seq_file *s, void *data)
27 if (g->ops.pmu.pmu_pg_engines_feature_list && 27 if (g->ops.pmu.pmu_pg_engines_feature_list &&
28 g->ops.pmu.pmu_pg_engines_feature_list(g, 28 g->ops.pmu.pmu_pg_engines_feature_list(g,
29 PMU_PG_ELPG_ENGINE_ID_GRAPHICS) != 29 PMU_PG_ELPG_ENGINE_ID_GRAPHICS) !=
30 PMU_PG_FEATURE_GR_POWER_GATING_ENABLED) { 30 NVGPU_PMU_GR_FEATURE_MASK_POWER_GATING) {
31 seq_printf(s, "PSTATE: %u\n" 31 seq_printf(s, "PSTATE: %u\n"
32 "RPPG Enabled: %u\n" 32 "RPPG Enabled: %u\n"
33 "RPPG ref count: %u\n" 33 "RPPG ref count: %u\n"
diff --git a/drivers/gpu/nvgpu/common/linux/sysfs.c b/drivers/gpu/nvgpu/common/linux/sysfs.c
index 86f1877d..afa08fc4 100644
--- a/drivers/gpu/nvgpu/common/linux/sysfs.c
+++ b/drivers/gpu/nvgpu/common/linux/sysfs.c
@@ -477,7 +477,7 @@ static ssize_t elpg_enable_store(struct device *dev,
477 if (g->ops.pmu.pmu_pg_engines_feature_list && 477 if (g->ops.pmu.pmu_pg_engines_feature_list &&
478 g->ops.pmu.pmu_pg_engines_feature_list(g, 478 g->ops.pmu.pmu_pg_engines_feature_list(g,
479 PMU_PG_ELPG_ENGINE_ID_GRAPHICS) != 479 PMU_PG_ELPG_ENGINE_ID_GRAPHICS) !=
480 PMU_PG_FEATURE_GR_POWER_GATING_ENABLED) { 480 NVGPU_PMU_GR_FEATURE_MASK_POWER_GATING) {
481 nvgpu_pmu_pg_global_enable(g, false); 481 nvgpu_pmu_pg_global_enable(g, false);
482 g->elpg_enabled = false; 482 g->elpg_enabled = false;
483 } else { 483 } else {
diff --git a/drivers/gpu/nvgpu/common/pmu/pmu_pg.c b/drivers/gpu/nvgpu/common/pmu/pmu_pg.c
index bf39ce19..2d0fc499 100644
--- a/drivers/gpu/nvgpu/common/pmu/pmu_pg.c
+++ b/drivers/gpu/nvgpu/common/pmu/pmu_pg.c
@@ -89,7 +89,7 @@ static void pmu_handle_pg_elpg_msg(struct gk20a *g, struct pmu_msg *msg,
89 if (g->ops.pmu.pmu_pg_engines_feature_list && 89 if (g->ops.pmu.pmu_pg_engines_feature_list &&
90 g->ops.pmu.pmu_pg_engines_feature_list(g, 90 g->ops.pmu.pmu_pg_engines_feature_list(g,
91 PMU_PG_ELPG_ENGINE_ID_GRAPHICS) != 91 PMU_PG_ELPG_ENGINE_ID_GRAPHICS) !=
92 PMU_PG_FEATURE_GR_POWER_GATING_ENABLED) { 92 NVGPU_PMU_GR_FEATURE_MASK_POWER_GATING) {
93 pmu->initialized = true; 93 pmu->initialized = true;
94 nvgpu_pmu_state_change(g, PMU_STATE_STARTED, 94 nvgpu_pmu_state_change(g, PMU_STATE_STARTED,
95 true); 95 true);
@@ -116,7 +116,7 @@ int nvgpu_pmu_pg_global_enable(struct gk20a *g, u32 enable_pg)
116 if (g->ops.pmu.pmu_pg_engines_feature_list && 116 if (g->ops.pmu.pmu_pg_engines_feature_list &&
117 g->ops.pmu.pmu_pg_engines_feature_list(g, 117 g->ops.pmu.pmu_pg_engines_feature_list(g,
118 PMU_PG_ELPG_ENGINE_ID_GRAPHICS) != 118 PMU_PG_ELPG_ENGINE_ID_GRAPHICS) !=
119 PMU_PG_FEATURE_GR_POWER_GATING_ENABLED) { 119 NVGPU_PMU_GR_FEATURE_MASK_POWER_GATING) {
120 if (g->ops.pmu.pmu_lpwr_enable_pg) 120 if (g->ops.pmu.pmu_lpwr_enable_pg)
121 status = g->ops.pmu.pmu_lpwr_enable_pg(g, 121 status = g->ops.pmu.pmu_lpwr_enable_pg(g,
122 true); 122 true);
@@ -126,7 +126,7 @@ int nvgpu_pmu_pg_global_enable(struct gk20a *g, u32 enable_pg)
126 if (g->ops.pmu.pmu_pg_engines_feature_list && 126 if (g->ops.pmu.pmu_pg_engines_feature_list &&
127 g->ops.pmu.pmu_pg_engines_feature_list(g, 127 g->ops.pmu.pmu_pg_engines_feature_list(g,
128 PMU_PG_ELPG_ENGINE_ID_GRAPHICS) != 128 PMU_PG_ELPG_ENGINE_ID_GRAPHICS) !=
129 PMU_PG_FEATURE_GR_POWER_GATING_ENABLED) { 129 NVGPU_PMU_GR_FEATURE_MASK_POWER_GATING) {
130 if (g->ops.pmu.pmu_lpwr_disable_pg) 130 if (g->ops.pmu.pmu_lpwr_disable_pg)
131 status = g->ops.pmu.pmu_lpwr_disable_pg(g, 131 status = g->ops.pmu.pmu_lpwr_disable_pg(g,
132 true); 132 true);
diff --git a/drivers/gpu/nvgpu/gk20a/pmu_gk20a.c b/drivers/gpu/nvgpu/gk20a/pmu_gk20a.c
index 603d25fe..0531b387 100644
--- a/drivers/gpu/nvgpu/gk20a/pmu_gk20a.c
+++ b/drivers/gpu/nvgpu/gk20a/pmu_gk20a.c
@@ -545,7 +545,7 @@ u32 gk20a_pmu_pg_engines_list(struct gk20a *g)
545u32 gk20a_pmu_pg_feature_list(struct gk20a *g, u32 pg_engine_id) 545u32 gk20a_pmu_pg_feature_list(struct gk20a *g, u32 pg_engine_id)
546{ 546{
547 if (pg_engine_id == PMU_PG_ELPG_ENGINE_ID_GRAPHICS) 547 if (pg_engine_id == PMU_PG_ELPG_ENGINE_ID_GRAPHICS)
548 return PMU_PG_FEATURE_GR_POWER_GATING_ENABLED; 548 return NVGPU_PMU_GR_FEATURE_MASK_POWER_GATING;
549 549
550 return 0; 550 return 0;
551} 551}
diff --git a/drivers/gpu/nvgpu/gp106/pmu_gp106.c b/drivers/gpu/nvgpu/gp106/pmu_gp106.c
index de26ecf2..d4041905 100644
--- a/drivers/gpu/nvgpu/gp106/pmu_gp106.c
+++ b/drivers/gpu/nvgpu/gp106/pmu_gp106.c
@@ -81,7 +81,7 @@ int gp106_pmu_engine_reset(struct gk20a *g, bool do_reset)
81u32 gp106_pmu_pg_feature_list(struct gk20a *g, u32 pg_engine_id) 81u32 gp106_pmu_pg_feature_list(struct gk20a *g, u32 pg_engine_id)
82{ 82{
83 if (pg_engine_id == PMU_PG_ELPG_ENGINE_ID_GRAPHICS) 83 if (pg_engine_id == PMU_PG_ELPG_ENGINE_ID_GRAPHICS)
84 return PMU_PG_FEATURE_GR_RPPG_ENABLED; 84 return NVGPU_PMU_GR_FEATURE_MASK_RPPG;
85 85
86 if (pg_engine_id == PMU_PG_ELPG_ENGINE_ID_MS) 86 if (pg_engine_id == PMU_PG_ELPG_ENGINE_ID_MS)
87 return NVGPU_PMU_MS_FEATURE_MASK_ALL; 87 return NVGPU_PMU_MS_FEATURE_MASK_ALL;
@@ -133,7 +133,7 @@ int gp106_pg_param_init(struct gk20a *g, u32 pg_engine_id)
133 cmd.cmd.pg.gr_init_param.sub_cmd_id = 133 cmd.cmd.pg.gr_init_param.sub_cmd_id =
134 PMU_PG_PARAM_CMD_GR_INIT_PARAM; 134 PMU_PG_PARAM_CMD_GR_INIT_PARAM;
135 cmd.cmd.pg.gr_init_param.featuremask = 135 cmd.cmd.pg.gr_init_param.featuremask =
136 PMU_PG_FEATURE_GR_RPPG_ENABLED; 136 NVGPU_PMU_GR_FEATURE_MASK_RPPG;
137 137
138 gp106_dbg_pmu("cmd post GR PMU_PG_CMD_ID_PG_PARAM"); 138 gp106_dbg_pmu("cmd post GR PMU_PG_CMD_ID_PG_PARAM");
139 nvgpu_pmu_cmd_post(g, &cmd, NULL, NULL, PMU_COMMAND_QUEUE_HPQ, 139 nvgpu_pmu_cmd_post(g, &cmd, NULL, NULL, PMU_COMMAND_QUEUE_HPQ,
diff --git a/drivers/gpu/nvgpu/gp10b/pmu_gp10b.c b/drivers/gpu/nvgpu/gp10b/pmu_gp10b.c
index d368bad7..49ad3920 100644
--- a/drivers/gpu/nvgpu/gp10b/pmu_gp10b.c
+++ b/drivers/gpu/nvgpu/gp10b/pmu_gp10b.c
@@ -239,7 +239,7 @@ int gp10b_pg_gr_init(struct gk20a *g, u32 pg_engine_id)
239 cmd.cmd.pg.gr_init_param.sub_cmd_id = 239 cmd.cmd.pg.gr_init_param.sub_cmd_id =
240 PMU_PG_PARAM_CMD_GR_INIT_PARAM; 240 PMU_PG_PARAM_CMD_GR_INIT_PARAM;
241 cmd.cmd.pg.gr_init_param.featuremask = 241 cmd.cmd.pg.gr_init_param.featuremask =
242 PMU_PG_FEATURE_GR_POWER_GATING_ENABLED; 242 NVGPU_PMU_GR_FEATURE_MASK_POWER_GATING;
243 243
244 gp10b_dbg_pmu("cmd post PMU_PG_CMD_ID_PG_PARAM "); 244 gp10b_dbg_pmu("cmd post PMU_PG_CMD_ID_PG_PARAM ");
245 nvgpu_pmu_cmd_post(g, &cmd, NULL, NULL, PMU_COMMAND_QUEUE_HPQ, 245 nvgpu_pmu_cmd_post(g, &cmd, NULL, NULL, PMU_COMMAND_QUEUE_HPQ,
diff --git a/drivers/gpu/nvgpu/gv11b/pmu_gv11b.c b/drivers/gpu/nvgpu/gv11b/pmu_gv11b.c
index 7dd4f8f4..32e751d9 100644
--- a/drivers/gpu/nvgpu/gv11b/pmu_gv11b.c
+++ b/drivers/gpu/nvgpu/gv11b/pmu_gv11b.c
@@ -446,7 +446,7 @@ int gv11b_pg_gr_init(struct gk20a *g, u32 pg_engine_id)
446 cmd.cmd.pg.gr_init_param_v1.sub_cmd_id = 446 cmd.cmd.pg.gr_init_param_v1.sub_cmd_id =
447 PMU_PG_PARAM_CMD_GR_INIT_PARAM; 447 PMU_PG_PARAM_CMD_GR_INIT_PARAM;
448 cmd.cmd.pg.gr_init_param_v1.featuremask = 448 cmd.cmd.pg.gr_init_param_v1.featuremask =
449 PMU_PG_FEATURE_GR_POWER_GATING_ENABLED; 449 NVGPU_PMU_GR_FEATURE_MASK_ALL;
450 450
451 gv11b_dbg_pmu("cmd post PMU_PG_CMD_ID_PG_PARAM_INIT\n"); 451 gv11b_dbg_pmu("cmd post PMU_PG_CMD_ID_PG_PARAM_INIT\n");
452 nvgpu_pmu_cmd_post(g, &cmd, NULL, NULL, PMU_COMMAND_QUEUE_HPQ, 452 nvgpu_pmu_cmd_post(g, &cmd, NULL, NULL, PMU_COMMAND_QUEUE_HPQ,
@@ -476,7 +476,15 @@ int gv11b_pg_set_subfeature_mask(struct gk20a *g, u32 pg_engine_id)
476 cmd.cmd.pg.sf_mask_update.ctrl_id = 476 cmd.cmd.pg.sf_mask_update.ctrl_id =
477 PMU_PG_ELPG_ENGINE_ID_GRAPHICS; 477 PMU_PG_ELPG_ENGINE_ID_GRAPHICS;
478 cmd.cmd.pg.sf_mask_update.enabled_mask = 478 cmd.cmd.pg.sf_mask_update.enabled_mask =
479 PMU_PG_FEATURE_GR_POWER_GATING_ENABLED; 479 NVGPU_PMU_GR_FEATURE_MASK_POWER_GATING |
480 NVGPU_PMU_GR_FEATURE_MASK_PRIV_RING |
481 NVGPU_PMU_GR_FEATURE_MASK_UNBIND |
482 NVGPU_PMU_GR_FEATURE_MASK_SAVE_GLOBAL_STATE |
483 NVGPU_PMU_GR_FEATURE_MASK_RESET_ENTRY |
484 NVGPU_PMU_GR_FEATURE_MASK_HW_SEQUENCE |
485 NVGPU_PMU_GR_FEATURE_MASK_ELPG_SRAM |
486 NVGPU_PMU_GR_FEATURE_MASK_ELPG_LOGIC |
487 NVGPU_PMU_GR_FEATURE_MASK_ELPG_L2RPPG;
480 488
481 gv11b_dbg_pmu("cmd post PMU_PG_CMD_SUB_FEATURE_MASK_UPDATE\n"); 489 gv11b_dbg_pmu("cmd post PMU_PG_CMD_SUB_FEATURE_MASK_UPDATE\n");
482 nvgpu_pmu_cmd_post(g, &cmd, NULL, NULL, PMU_COMMAND_QUEUE_HPQ, 490 nvgpu_pmu_cmd_post(g, &cmd, NULL, NULL, PMU_COMMAND_QUEUE_HPQ,
diff --git a/drivers/gpu/nvgpu/include/nvgpu/pmuif/gpmuif_pg.h b/drivers/gpu/nvgpu/include/nvgpu/pmuif/gpmuif_pg.h
index b1077821..91656156 100644
--- a/drivers/gpu/nvgpu/include/nvgpu/pmuif/gpmuif_pg.h
+++ b/drivers/gpu/nvgpu/include/nvgpu/pmuif/gpmuif_pg.h
@@ -128,14 +128,31 @@ enum {
128#define PMU_PG_PARAM_CMD_POST_INIT 0x06 128#define PMU_PG_PARAM_CMD_POST_INIT 0x06
129#define PMU_PG_PARAM_CMD_SUB_FEATURE_MASK_UPDATE 0x07 129#define PMU_PG_PARAM_CMD_SUB_FEATURE_MASK_UPDATE 0x07
130 130
131#define PMU_PG_FEATURE_GR_SDIV_SLOWDOWN_ENABLED (1 << 0) 131#define NVGPU_PMU_GR_FEATURE_MASK_SDIV_SLOWDOWN (1 << 0)
132#define PMU_PG_FEATURE_GR_POWER_GATING_ENABLED (1 << 2) 132#define NVGPU_PMU_GR_FEATURE_MASK_POWER_GATING (1 << 2)
133#define PMU_PG_FEATURE_GR_RPPG_ENABLED (1 << 3) 133#define NVGPU_PMU_GR_FEATURE_MASK_RPPG (1 << 3)
134#define NVGPU_PMU_GR_FEATURE_MASK_PRIV_RING (1 << 5)
135#define NVGPU_PMU_GR_FEATURE_MASK_UNBIND (1 << 6)
136#define NVGPU_PMU_GR_FEATURE_MASK_SAVE_GLOBAL_STATE (1 << 7)
137#define NVGPU_PMU_GR_FEATURE_MASK_RESET_ENTRY (1 << 8)
138#define NVGPU_PMU_GR_FEATURE_MASK_HW_SEQUENCE (1 << 9)
139#define NVGPU_PMU_GR_FEATURE_MASK_ELPG_SRAM (1 << 10)
140#define NVGPU_PMU_GR_FEATURE_MASK_ELPG_LOGIC (1 << 11)
141#define NVGPU_PMU_GR_FEATURE_MASK_ELPG_L2RPPG (1 << 12)
134 142
135#define NVGPU_PMU_GR_FEATURE_MASK_RPPG (1 << 3)
136#define NVGPU_PMU_GR_FEATURE_MASK_ALL \ 143#define NVGPU_PMU_GR_FEATURE_MASK_ALL \
137 ( \ 144 ( \
138 NVGPU_PMU_GR_FEATURE_MASK_RPPG \ 145 NVGPU_PMU_GR_FEATURE_MASK_SDIV_SLOWDOWN |\
146 NVGPU_PMU_GR_FEATURE_MASK_POWER_GATING |\
147 NVGPU_PMU_GR_FEATURE_MASK_RPPG |\
148 NVGPU_PMU_GR_FEATURE_MASK_PRIV_RING |\
149 NVGPU_PMU_GR_FEATURE_MASK_UNBIND |\
150 NVGPU_PMU_GR_FEATURE_MASK_SAVE_GLOBAL_STATE |\
151 NVGPU_PMU_GR_FEATURE_MASK_RESET_ENTRY |\
152 NVGPU_PMU_GR_FEATURE_MASK_HW_SEQUENCE |\
153 NVGPU_PMU_GR_FEATURE_MASK_ELPG_SRAM |\
154 NVGPU_PMU_GR_FEATURE_MASK_ELPG_LOGIC |\
155 NVGPU_PMU_GR_FEATURE_MASK_ELPG_L2RPPG \
139 ) 156 )
140 157
141#define NVGPU_PMU_MS_FEATURE_MASK_CLOCK_GATING (1 << 0) 158#define NVGPU_PMU_MS_FEATURE_MASK_CLOCK_GATING (1 << 0)