aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDmytro Laktyushkin <Dmytro.Laktyushkin@amd.com>2018-09-13 17:42:14 -0400
committerAlex Deucher <alexander.deucher@amd.com>2018-11-05 14:20:40 -0500
commit24f7dd7ea98dc54fa45a0dd10c7a472e00ca01d4 (patch)
tree0d1de64685041661d4b0b73708b85da62d461a88
parent72942b3de8dadf92095cd14ca1aeb92c54280799 (diff)
drm/amd/display: move pplib/smu notification to dccg block
This is done to clear up the clock programming sequence since the only time we need to notify pplib is after clock update. This also renames the clk block to dccg, at the moment this block contains both clock management and dccg functionality. Signed-off-by: Dmytro Laktyushkin <Dmytro.Laktyushkin@amd.com> Reviewed-by: Tony Cheng <Tony.Cheng@amd.com> Acked-by: Bhawanpreet Lakha <Bhawanpreet.Lakha@amd.com> Signed-off-by: Alex Deucher <alexander.deucher@amd.com>
-rw-r--r--drivers/gpu/drm/amd/display/dc/core/dc.c8
-rw-r--r--drivers/gpu/drm/amd/display/dc/core/dc_link.c21
-rw-r--r--drivers/gpu/drm/amd/display/dc/core/dc_resource.c2
-rw-r--r--drivers/gpu/drm/amd/display/dc/dc.h5
-rw-r--r--drivers/gpu/drm/amd/display/dc/dce/dce_clocks.c659
-rw-r--r--drivers/gpu/drm/amd/display/dc/dce/dce_clocks.h6
-rw-r--r--drivers/gpu/drm/amd/display/dc/dce100/dce100_hw_sequencer.c60
-rw-r--r--drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c4
-rw-r--r--drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c212
-rw-r--r--drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.h10
-rw-r--r--drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c8
-rw-r--r--drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c14
-rw-r--r--drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c9
-rw-r--r--drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c4
-rw-r--r--drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c51
-rw-r--r--drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c2
-rw-r--r--drivers/gpu/drm/amd/display/dc/inc/core_types.h2
-rw-r--r--drivers/gpu/drm/amd/display/dc/inc/hw/display_clock.h13
-rw-r--r--drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h5
-rw-r--r--drivers/gpu/drm/amd/display/dc/inc/resource.h3
20 files changed, 475 insertions, 623 deletions
diff --git a/drivers/gpu/drm/amd/display/dc/core/dc.c b/drivers/gpu/drm/amd/display/dc/core/dc.c
index 7c491c91465f..2bbc39de10cc 100644
--- a/drivers/gpu/drm/amd/display/dc/core/dc.c
+++ b/drivers/gpu/drm/amd/display/dc/core/dc.c
@@ -957,8 +957,6 @@ static enum dc_status dc_commit_state_no_check(struct dc *dc, struct dc_state *c
957 } 957 }
958 958
959 /* Program hardware */ 959 /* Program hardware */
960 dc->hwss.ready_shared_resources(dc, context);
961
962 for (i = 0; i < dc->res_pool->pipe_count; i++) { 960 for (i = 0; i < dc->res_pool->pipe_count; i++) {
963 pipe = &context->res_ctx.pipe_ctx[i]; 961 pipe = &context->res_ctx.pipe_ctx[i];
964 dc->hwss.wait_for_mpcc_disconnect(dc, dc->res_pool, pipe); 962 dc->hwss.wait_for_mpcc_disconnect(dc, dc->res_pool, pipe);
@@ -1020,8 +1018,6 @@ static enum dc_status dc_commit_state_no_check(struct dc *dc, struct dc_state *c
1020 1018
1021 dc_retain_state(dc->current_state); 1019 dc_retain_state(dc->current_state);
1022 1020
1023 dc->hwss.optimize_shared_resources(dc);
1024
1025 return result; 1021 return result;
1026} 1022}
1027 1023
@@ -1448,12 +1444,8 @@ static void commit_planes_do_stream_update(struct dc *dc,
1448 if (stream_update->dpms_off) { 1444 if (stream_update->dpms_off) {
1449 if (*stream_update->dpms_off) { 1445 if (*stream_update->dpms_off) {
1450 core_link_disable_stream(pipe_ctx, KEEP_ACQUIRED_RESOURCE); 1446 core_link_disable_stream(pipe_ctx, KEEP_ACQUIRED_RESOURCE);
1451 dc->hwss.pplib_apply_display_requirements(
1452 dc, dc->current_state);
1453 notify_display_count_to_smu(dc, dc->current_state); 1447 notify_display_count_to_smu(dc, dc->current_state);
1454 } else { 1448 } else {
1455 dc->hwss.pplib_apply_display_requirements(
1456 dc, dc->current_state);
1457 notify_display_count_to_smu(dc, dc->current_state); 1449 notify_display_count_to_smu(dc, dc->current_state);
1458 core_link_enable_stream(dc->current_state, pipe_ctx); 1450 core_link_enable_stream(dc->current_state, pipe_ctx);
1459 } 1451 }
diff --git a/drivers/gpu/drm/amd/display/dc/core/dc_link.c b/drivers/gpu/drm/amd/display/dc/core/dc_link.c
index fb04a4ad141f..f4936f7c5545 100644
--- a/drivers/gpu/drm/amd/display/dc/core/dc_link.c
+++ b/drivers/gpu/drm/amd/display/dc/core/dc_link.c
@@ -1357,28 +1357,13 @@ static enum dc_status enable_link_dp(
1357 struct dc_link *link = stream->sink->link; 1357 struct dc_link *link = stream->sink->link;
1358 struct dc_link_settings link_settings = {0}; 1358 struct dc_link_settings link_settings = {0};
1359 enum dp_panel_mode panel_mode; 1359 enum dp_panel_mode panel_mode;
1360 enum dc_link_rate max_link_rate = LINK_RATE_HIGH2;
1361 1360
1362 /* get link settings for video mode timing */ 1361 /* get link settings for video mode timing */
1363 decide_link_settings(stream, &link_settings); 1362 decide_link_settings(stream, &link_settings);
1364 1363
1365 /* raise clock state for HBR3 if required. Confirmed with HW DCE/DPCS 1364 pipe_ctx->stream_res.pix_clk_params.requested_sym_clk =
1366 * logic for HBR3 still needs Nominal (0.8V) on VDDC rail 1365 link_settings.link_rate * LINK_RATE_REF_FREQ_IN_KHZ;
1367 */ 1366 state->dccg->funcs->update_clocks(state->dccg, state, false);
1368 if (link->link_enc->features.flags.bits.IS_HBR3_CAPABLE)
1369 max_link_rate = LINK_RATE_HIGH3;
1370
1371 if (link_settings.link_rate == max_link_rate) {
1372 struct dc_clocks clocks = state->bw.dcn.clk;
1373
1374 /* dce/dcn compat, do not update dispclk */
1375 clocks.dispclk_khz = 0;
1376 /* 27mhz = 27000000hz= 27000khz */
1377 clocks.phyclk_khz = link_settings.link_rate * 27000;
1378
1379 state->dis_clk->funcs->update_clocks(
1380 state->dis_clk, &clocks, false);
1381 }
1382 1367
1383 dp_enable_link_phy( 1368 dp_enable_link_phy(
1384 link, 1369 link,
diff --git a/drivers/gpu/drm/amd/display/dc/core/dc_resource.c b/drivers/gpu/drm/amd/display/dc/core/dc_resource.c
index b6fe29b9fb65..b16650c6f477 100644
--- a/drivers/gpu/drm/amd/display/dc/core/dc_resource.c
+++ b/drivers/gpu/drm/amd/display/dc/core/dc_resource.c
@@ -2071,7 +2071,7 @@ void dc_resource_state_construct(
2071 const struct dc *dc, 2071 const struct dc *dc,
2072 struct dc_state *dst_ctx) 2072 struct dc_state *dst_ctx)
2073{ 2073{
2074 dst_ctx->dis_clk = dc->res_pool->dccg; 2074 dst_ctx->dccg = dc->res_pool->dccg;
2075} 2075}
2076 2076
2077enum dc_status dc_validate_global_state( 2077enum dc_status dc_validate_global_state(
diff --git a/drivers/gpu/drm/amd/display/dc/dc.h b/drivers/gpu/drm/amd/display/dc/dc.h
index 4a05f86aba12..d321e1c053e4 100644
--- a/drivers/gpu/drm/amd/display/dc/dc.h
+++ b/drivers/gpu/drm/amd/display/dc/dc.h
@@ -304,11 +304,6 @@ struct dc {
304 struct hw_sequencer_funcs hwss; 304 struct hw_sequencer_funcs hwss;
305 struct dce_hwseq *hwseq; 305 struct dce_hwseq *hwseq;
306 306
307 /* temp store of dm_pp_display_configuration
308 * to compare to see if display config changed
309 */
310 struct dm_pp_display_configuration prev_display_config;
311
312 bool optimized_required; 307 bool optimized_required;
313 308
314 /* FBC compressor */ 309 /* FBC compressor */
diff --git a/drivers/gpu/drm/amd/display/dc/dce/dce_clocks.c b/drivers/gpu/drm/amd/display/dc/dce/dce_clocks.c
index d89a097ba936..4d5a37b37919 100644
--- a/drivers/gpu/drm/amd/display/dc/dce/dce_clocks.c
+++ b/drivers/gpu/drm/amd/display/dc/dce/dce_clocks.c
@@ -23,34 +23,28 @@
23 * 23 *
24 */ 24 */
25 25
26#include "dce_clocks.h"
27#include "dm_services.h"
28#include "reg_helper.h" 26#include "reg_helper.h"
29#include "fixed31_32.h"
30#include "bios_parser_interface.h" 27#include "bios_parser_interface.h"
31#include "dc.h" 28#include "dc.h"
29#include "dce_clocks.h"
32#include "dmcu.h" 30#include "dmcu.h"
33#if defined(CONFIG_DRM_AMD_DC_DCN1_0)
34#include "dcn_calcs.h"
35#endif
36#include "core_types.h" 31#include "core_types.h"
37#include "dc_types.h"
38#include "dal_asic_id.h" 32#include "dal_asic_id.h"
39 33
40#define TO_DCE_CLOCKS(clocks)\ 34#define TO_DCE_DCCG(clocks)\
41 container_of(clocks, struct dce_dccg, base) 35 container_of(clocks, struct dce_dccg, base)
42 36
43#define REG(reg) \ 37#define REG(reg) \
44 (clk_dce->regs->reg) 38 (dccg_dce->regs->reg)
45 39
46#undef FN 40#undef FN
47#define FN(reg_name, field_name) \ 41#define FN(reg_name, field_name) \
48 clk_dce->clk_shift->field_name, clk_dce->clk_mask->field_name 42 dccg_dce->dccg_shift->field_name, dccg_dce->dccg_mask->field_name
49 43
50#define CTX \ 44#define CTX \
51 clk_dce->base.ctx 45 dccg_dce->base.ctx
52#define DC_LOGGER \ 46#define DC_LOGGER \
53 clk->ctx->logger 47 dccg->ctx->logger
54 48
55/* Max clock values for each state indexed by "enum clocks_state": */ 49/* Max clock values for each state indexed by "enum clocks_state": */
56static const struct state_dependent_clocks dce80_max_clks_by_state[] = { 50static const struct state_dependent_clocks dce80_max_clks_by_state[] = {
@@ -157,12 +151,12 @@ static int dentist_get_divider_from_did(int did)
157 (should not be case with CIK) then SW should program all rates 151 (should not be case with CIK) then SW should program all rates
158 generated according to average value (case as with previous ASICs) 152 generated according to average value (case as with previous ASICs)
159 */ 153 */
160static int dccg_adjust_dp_ref_freq_for_ss(struct dce_dccg *clk_dce, int dp_ref_clk_khz) 154static int dccg_adjust_dp_ref_freq_for_ss(struct dce_dccg *dccg_dce, int dp_ref_clk_khz)
161{ 155{
162 if (clk_dce->ss_on_dprefclk && clk_dce->dprefclk_ss_divider != 0) { 156 if (dccg_dce->ss_on_dprefclk && dccg_dce->dprefclk_ss_divider != 0) {
163 struct fixed31_32 ss_percentage = dc_fixpt_div_int( 157 struct fixed31_32 ss_percentage = dc_fixpt_div_int(
164 dc_fixpt_from_fraction(clk_dce->dprefclk_ss_percentage, 158 dc_fixpt_from_fraction(dccg_dce->dprefclk_ss_percentage,
165 clk_dce->dprefclk_ss_divider), 200); 159 dccg_dce->dprefclk_ss_divider), 200);
166 struct fixed31_32 adj_dp_ref_clk_khz; 160 struct fixed31_32 adj_dp_ref_clk_khz;
167 161
168 ss_percentage = dc_fixpt_sub(dc_fixpt_one, ss_percentage); 162 ss_percentage = dc_fixpt_sub(dc_fixpt_one, ss_percentage);
@@ -172,9 +166,9 @@ static int dccg_adjust_dp_ref_freq_for_ss(struct dce_dccg *clk_dce, int dp_ref_c
172 return dp_ref_clk_khz; 166 return dp_ref_clk_khz;
173} 167}
174 168
175static int dce_get_dp_ref_freq_khz(struct dccg *clk) 169static int dce_get_dp_ref_freq_khz(struct dccg *dccg)
176{ 170{
177 struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk); 171 struct dce_dccg *dccg_dce = TO_DCE_DCCG(dccg);
178 int dprefclk_wdivider; 172 int dprefclk_wdivider;
179 int dprefclk_src_sel; 173 int dprefclk_src_sel;
180 int dp_ref_clk_khz = 600000; 174 int dp_ref_clk_khz = 600000;
@@ -193,76 +187,110 @@ static int dce_get_dp_ref_freq_khz(struct dccg *clk)
193 187
194 /* Calculate the current DFS clock, in kHz.*/ 188 /* Calculate the current DFS clock, in kHz.*/
195 dp_ref_clk_khz = (DENTIST_DIVIDER_RANGE_SCALE_FACTOR 189 dp_ref_clk_khz = (DENTIST_DIVIDER_RANGE_SCALE_FACTOR
196 * clk_dce->dentist_vco_freq_khz) / target_div; 190 * dccg_dce->dentist_vco_freq_khz) / target_div;
197 191
198 return dccg_adjust_dp_ref_freq_for_ss(clk_dce, dp_ref_clk_khz); 192 return dccg_adjust_dp_ref_freq_for_ss(dccg_dce, dp_ref_clk_khz);
199} 193}
200 194
201static int dce12_get_dp_ref_freq_khz(struct dccg *clk) 195static int dce12_get_dp_ref_freq_khz(struct dccg *dccg)
202{ 196{
203 struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk); 197 struct dce_dccg *dccg_dce = TO_DCE_DCCG(dccg);
204 198
205 return dccg_adjust_dp_ref_freq_for_ss(clk_dce, clk_dce->dprefclk_khz); 199 return dccg_adjust_dp_ref_freq_for_ss(dccg_dce, dccg_dce->dprefclk_khz);
200}
201
202/* unit: in_khz before mode set, get pixel clock from context. ASIC register
203 * may not be programmed yet
204 */
205static uint32_t get_max_pixel_clock_for_all_paths(struct dc_state *context)
206{
207 uint32_t max_pix_clk = 0;
208 int i;
209
210 for (i = 0; i < MAX_PIPES; i++) {
211 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
212
213 if (pipe_ctx->stream == NULL)
214 continue;
215
216 /* do not check under lay */
217 if (pipe_ctx->top_pipe)
218 continue;
219
220 if (pipe_ctx->stream_res.pix_clk_params.requested_pix_clk > max_pix_clk)
221 max_pix_clk = pipe_ctx->stream_res.pix_clk_params.requested_pix_clk;
222
223 /* raise clock state for HBR3/2 if required. Confirmed with HW DCE/DPCS
224 * logic for HBR3 still needs Nominal (0.8V) on VDDC rail
225 */
226 if (dc_is_dp_signal(pipe_ctx->stream->signal) &&
227 pipe_ctx->stream_res.pix_clk_params.requested_sym_clk > max_pix_clk)
228 max_pix_clk = pipe_ctx->stream_res.pix_clk_params.requested_sym_clk;
229 }
230
231 return max_pix_clk;
206} 232}
207 233
208static enum dm_pp_clocks_state dce_get_required_clocks_state( 234static enum dm_pp_clocks_state dce_get_required_clocks_state(
209 struct dccg *clk, 235 struct dccg *dccg,
210 struct dc_clocks *req_clocks) 236 struct dc_state *context)
211{ 237{
212 struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk); 238 struct dce_dccg *dccg_dce = TO_DCE_DCCG(dccg);
213 int i; 239 int i;
214 enum dm_pp_clocks_state low_req_clk; 240 enum dm_pp_clocks_state low_req_clk;
241 int max_pix_clk = get_max_pixel_clock_for_all_paths(context);
215 242
216 /* Iterate from highest supported to lowest valid state, and update 243 /* Iterate from highest supported to lowest valid state, and update
217 * lowest RequiredState with the lowest state that satisfies 244 * lowest RequiredState with the lowest state that satisfies
218 * all required clocks 245 * all required clocks
219 */ 246 */
220 for (i = clk->max_clks_state; i >= DM_PP_CLOCKS_STATE_ULTRA_LOW; i--) 247 for (i = dccg->max_clks_state; i >= DM_PP_CLOCKS_STATE_ULTRA_LOW; i--)
221 if (req_clocks->dispclk_khz > 248 if (context->bw.dce.dispclk_khz >
222 clk_dce->max_clks_by_state[i].display_clk_khz 249 dccg_dce->max_clks_by_state[i].display_clk_khz
223 || req_clocks->phyclk_khz > 250 || max_pix_clk >
224 clk_dce->max_clks_by_state[i].pixel_clk_khz) 251 dccg_dce->max_clks_by_state[i].pixel_clk_khz)
225 break; 252 break;
226 253
227 low_req_clk = i + 1; 254 low_req_clk = i + 1;
228 if (low_req_clk > clk->max_clks_state) { 255 if (low_req_clk > dccg->max_clks_state) {
229 /* set max clock state for high phyclock, invalid on exceeding display clock */ 256 /* set max clock state for high phyclock, invalid on exceeding display clock */
230 if (clk_dce->max_clks_by_state[clk->max_clks_state].display_clk_khz 257 if (dccg_dce->max_clks_by_state[dccg->max_clks_state].display_clk_khz
231 < req_clocks->dispclk_khz) 258 < context->bw.dce.dispclk_khz)
232 low_req_clk = DM_PP_CLOCKS_STATE_INVALID; 259 low_req_clk = DM_PP_CLOCKS_STATE_INVALID;
233 else 260 else
234 low_req_clk = clk->max_clks_state; 261 low_req_clk = dccg->max_clks_state;
235 } 262 }
236 263
237 return low_req_clk; 264 return low_req_clk;
238} 265}
239 266
240static int dce_set_clock( 267static int dce_set_clock(
241 struct dccg *clk, 268 struct dccg *dccg,
242 int requested_clk_khz) 269 int requested_clk_khz)
243{ 270{
244 struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk); 271 struct dce_dccg *dccg_dce = TO_DCE_DCCG(dccg);
245 struct bp_pixel_clock_parameters pxl_clk_params = { 0 }; 272 struct bp_pixel_clock_parameters pxl_clk_params = { 0 };
246 struct dc_bios *bp = clk->ctx->dc_bios; 273 struct dc_bios *bp = dccg->ctx->dc_bios;
247 int actual_clock = requested_clk_khz; 274 int actual_clock = requested_clk_khz;
275 struct dmcu *dmcu = dccg_dce->base.ctx->dc->res_pool->dmcu;
248 276
249 /* Make sure requested clock isn't lower than minimum threshold*/ 277 /* Make sure requested clock isn't lower than minimum threshold*/
250 if (requested_clk_khz > 0) 278 if (requested_clk_khz > 0)
251 requested_clk_khz = max(requested_clk_khz, 279 requested_clk_khz = max(requested_clk_khz,
252 clk_dce->dentist_vco_freq_khz / 64); 280 dccg_dce->dentist_vco_freq_khz / 64);
253 281
254 /* Prepare to program display clock*/ 282 /* Prepare to program display clock*/
255 pxl_clk_params.target_pixel_clock = requested_clk_khz; 283 pxl_clk_params.target_pixel_clock = requested_clk_khz;
256 pxl_clk_params.pll_id = CLOCK_SOURCE_ID_DFS; 284 pxl_clk_params.pll_id = CLOCK_SOURCE_ID_DFS;
257 285
258 if (clk_dce->dfs_bypass_active) 286 if (dccg_dce->dfs_bypass_active)
259 pxl_clk_params.flags.SET_DISPCLK_DFS_BYPASS = true; 287 pxl_clk_params.flags.SET_DISPCLK_DFS_BYPASS = true;
260 288
261 bp->funcs->program_display_engine_pll(bp, &pxl_clk_params); 289 bp->funcs->program_display_engine_pll(bp, &pxl_clk_params);
262 290
263 if (clk_dce->dfs_bypass_active) { 291 if (dccg_dce->dfs_bypass_active) {
264 /* Cache the fixed display clock*/ 292 /* Cache the fixed display clock*/
265 clk_dce->dfs_bypass_disp_clk = 293 dccg_dce->dfs_bypass_disp_clk =
266 pxl_clk_params.dfs_bypass_display_clock; 294 pxl_clk_params.dfs_bypass_display_clock;
267 actual_clock = pxl_clk_params.dfs_bypass_display_clock; 295 actual_clock = pxl_clk_params.dfs_bypass_display_clock;
268 } 296 }
@@ -270,34 +298,21 @@ static int dce_set_clock(
270 /* from power down, we need mark the clock state as ClocksStateNominal 298 /* from power down, we need mark the clock state as ClocksStateNominal
271 * from HWReset, so when resume we will call pplib voltage regulator.*/ 299 * from HWReset, so when resume we will call pplib voltage regulator.*/
272 if (requested_clk_khz == 0) 300 if (requested_clk_khz == 0)
273 clk->cur_min_clks_state = DM_PP_CLOCKS_STATE_NOMINAL; 301 dccg->cur_min_clks_state = DM_PP_CLOCKS_STATE_NOMINAL;
274 return actual_clock;
275}
276
277static int dce_psr_set_clock(
278 struct dccg *clk,
279 int requested_clk_khz)
280{
281 struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk);
282 struct dc_context *ctx = clk_dce->base.ctx;
283 struct dc *core_dc = ctx->dc;
284 struct dmcu *dmcu = core_dc->res_pool->dmcu;
285 int actual_clk_khz = requested_clk_khz;
286 302
287 actual_clk_khz = dce_set_clock(clk, requested_clk_khz); 303 dmcu->funcs->set_psr_wait_loop(dmcu, actual_clock / 1000 / 7);
288 304
289 dmcu->funcs->set_psr_wait_loop(dmcu, actual_clk_khz / 1000 / 7); 305 return actual_clock;
290 return actual_clk_khz;
291} 306}
292 307
293static int dce112_set_clock( 308static int dce112_set_clock(
294 struct dccg *clk, 309 struct dccg *dccg,
295 int requested_clk_khz) 310 int requested_clk_khz)
296{ 311{
297 struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk); 312 struct dce_dccg *dccg_dce = TO_DCE_DCCG(dccg);
298 struct bp_set_dce_clock_parameters dce_clk_params; 313 struct bp_set_dce_clock_parameters dce_clk_params;
299 struct dc_bios *bp = clk->ctx->dc_bios; 314 struct dc_bios *bp = dccg->ctx->dc_bios;
300 struct dc *core_dc = clk->ctx->dc; 315 struct dc *core_dc = dccg->ctx->dc;
301 struct dmcu *dmcu = core_dc->res_pool->dmcu; 316 struct dmcu *dmcu = core_dc->res_pool->dmcu;
302 int actual_clock = requested_clk_khz; 317 int actual_clock = requested_clk_khz;
303 /* Prepare to program display clock*/ 318 /* Prepare to program display clock*/
@@ -306,7 +321,7 @@ static int dce112_set_clock(
306 /* Make sure requested clock isn't lower than minimum threshold*/ 321 /* Make sure requested clock isn't lower than minimum threshold*/
307 if (requested_clk_khz > 0) 322 if (requested_clk_khz > 0)
308 requested_clk_khz = max(requested_clk_khz, 323 requested_clk_khz = max(requested_clk_khz,
309 clk_dce->dentist_vco_freq_khz / 62); 324 dccg_dce->dentist_vco_freq_khz / 62);
310 325
311 dce_clk_params.target_clock_frequency = requested_clk_khz; 326 dce_clk_params.target_clock_frequency = requested_clk_khz;
312 dce_clk_params.pll_id = CLOCK_SOURCE_ID_DFS; 327 dce_clk_params.pll_id = CLOCK_SOURCE_ID_DFS;
@@ -318,13 +333,13 @@ static int dce112_set_clock(
318 /* from power down, we need mark the clock state as ClocksStateNominal 333 /* from power down, we need mark the clock state as ClocksStateNominal
319 * from HWReset, so when resume we will call pplib voltage regulator.*/ 334 * from HWReset, so when resume we will call pplib voltage regulator.*/
320 if (requested_clk_khz == 0) 335 if (requested_clk_khz == 0)
321 clk->cur_min_clks_state = DM_PP_CLOCKS_STATE_NOMINAL; 336 dccg->cur_min_clks_state = DM_PP_CLOCKS_STATE_NOMINAL;
322 337
323 /*Program DP ref Clock*/ 338 /*Program DP ref Clock*/
324 /*VBIOS will determine DPREFCLK frequency, so we don't set it*/ 339 /*VBIOS will determine DPREFCLK frequency, so we don't set it*/
325 dce_clk_params.target_clock_frequency = 0; 340 dce_clk_params.target_clock_frequency = 0;
326 dce_clk_params.clock_type = DCECLOCK_TYPE_DPREFCLK; 341 dce_clk_params.clock_type = DCECLOCK_TYPE_DPREFCLK;
327 if (!ASICREV_IS_VEGA20_P(clk->ctx->asic_id.hw_internal_rev)) 342 if (!ASICREV_IS_VEGA20_P(dccg->ctx->asic_id.hw_internal_rev))
328 dce_clk_params.flags.USE_GENLOCK_AS_SOURCE_FOR_DPREFCLK = 343 dce_clk_params.flags.USE_GENLOCK_AS_SOURCE_FOR_DPREFCLK =
329 (dce_clk_params.pll_id == 344 (dce_clk_params.pll_id ==
330 CLOCK_SOURCE_COMBO_DISPLAY_PLL0); 345 CLOCK_SOURCE_COMBO_DISPLAY_PLL0);
@@ -334,19 +349,19 @@ static int dce112_set_clock(
334 bp->funcs->set_dce_clock(bp, &dce_clk_params); 349 bp->funcs->set_dce_clock(bp, &dce_clk_params);
335 350
336 if (!IS_FPGA_MAXIMUS_DC(core_dc->ctx->dce_environment)) { 351 if (!IS_FPGA_MAXIMUS_DC(core_dc->ctx->dce_environment)) {
337 if (clk_dce->dfs_bypass_disp_clk != actual_clock) 352 if (dccg_dce->dfs_bypass_disp_clk != actual_clock)
338 dmcu->funcs->set_psr_wait_loop(dmcu, 353 dmcu->funcs->set_psr_wait_loop(dmcu,
339 actual_clock / 1000 / 7); 354 actual_clock / 1000 / 7);
340 } 355 }
341 356
342 clk_dce->dfs_bypass_disp_clk = actual_clock; 357 dccg_dce->dfs_bypass_disp_clk = actual_clock;
343 return actual_clock; 358 return actual_clock;
344} 359}
345 360
346static void dce_clock_read_integrated_info(struct dce_dccg *clk_dce) 361static void dce_clock_read_integrated_info(struct dce_dccg *dccg_dce)
347{ 362{
348 struct dc_debug_options *debug = &clk_dce->base.ctx->dc->debug; 363 struct dc_debug_options *debug = &dccg_dce->base.ctx->dc->debug;
349 struct dc_bios *bp = clk_dce->base.ctx->dc_bios; 364 struct dc_bios *bp = dccg_dce->base.ctx->dc_bios;
350 struct integrated_info info = { { { 0 } } }; 365 struct integrated_info info = { { { 0 } } };
351 struct dc_firmware_info fw_info = { { 0 } }; 366 struct dc_firmware_info fw_info = { { 0 } };
352 int i; 367 int i;
@@ -354,13 +369,13 @@ static void dce_clock_read_integrated_info(struct dce_dccg *clk_dce)
354 if (bp->integrated_info) 369 if (bp->integrated_info)
355 info = *bp->integrated_info; 370 info = *bp->integrated_info;
356 371
357 clk_dce->dentist_vco_freq_khz = info.dentist_vco_freq; 372 dccg_dce->dentist_vco_freq_khz = info.dentist_vco_freq;
358 if (clk_dce->dentist_vco_freq_khz == 0) { 373 if (dccg_dce->dentist_vco_freq_khz == 0) {
359 bp->funcs->get_firmware_info(bp, &fw_info); 374 bp->funcs->get_firmware_info(bp, &fw_info);
360 clk_dce->dentist_vco_freq_khz = 375 dccg_dce->dentist_vco_freq_khz =
361 fw_info.smu_gpu_pll_output_freq; 376 fw_info.smu_gpu_pll_output_freq;
362 if (clk_dce->dentist_vco_freq_khz == 0) 377 if (dccg_dce->dentist_vco_freq_khz == 0)
363 clk_dce->dentist_vco_freq_khz = 3600000; 378 dccg_dce->dentist_vco_freq_khz = 3600000;
364 } 379 }
365 380
366 /*update the maximum display clock for each power state*/ 381 /*update the maximum display clock for each power state*/
@@ -392,18 +407,18 @@ static void dce_clock_read_integrated_info(struct dce_dccg *clk_dce)
392 /*Do not allow bad VBIOS/SBIOS to override with invalid values, 407 /*Do not allow bad VBIOS/SBIOS to override with invalid values,
393 * check for > 100MHz*/ 408 * check for > 100MHz*/
394 if (info.disp_clk_voltage[i].max_supported_clk >= 100000) 409 if (info.disp_clk_voltage[i].max_supported_clk >= 100000)
395 clk_dce->max_clks_by_state[clk_state].display_clk_khz = 410 dccg_dce->max_clks_by_state[clk_state].display_clk_khz =
396 info.disp_clk_voltage[i].max_supported_clk; 411 info.disp_clk_voltage[i].max_supported_clk;
397 } 412 }
398 413
399 if (!debug->disable_dfs_bypass && bp->integrated_info) 414 if (!debug->disable_dfs_bypass && bp->integrated_info)
400 if (bp->integrated_info->gpu_cap_info & DFS_BYPASS_ENABLE) 415 if (bp->integrated_info->gpu_cap_info & DFS_BYPASS_ENABLE)
401 clk_dce->dfs_bypass_enabled = true; 416 dccg_dce->dfs_bypass_enabled = true;
402} 417}
403 418
404static void dce_clock_read_ss_info(struct dce_dccg *clk_dce) 419static void dce_clock_read_ss_info(struct dce_dccg *dccg_dce)
405{ 420{
406 struct dc_bios *bp = clk_dce->base.ctx->dc_bios; 421 struct dc_bios *bp = dccg_dce->base.ctx->dc_bios;
407 int ss_info_num = bp->funcs->get_ss_entry_number( 422 int ss_info_num = bp->funcs->get_ss_entry_number(
408 bp, AS_SIGNAL_TYPE_GPU_PLL); 423 bp, AS_SIGNAL_TYPE_GPU_PLL);
409 424
@@ -419,14 +434,14 @@ static void dce_clock_read_ss_info(struct dce_dccg *clk_dce)
419 */ 434 */
420 if (result == BP_RESULT_OK && 435 if (result == BP_RESULT_OK &&
421 info.spread_spectrum_percentage != 0) { 436 info.spread_spectrum_percentage != 0) {
422 clk_dce->ss_on_dprefclk = true; 437 dccg_dce->ss_on_dprefclk = true;
423 clk_dce->dprefclk_ss_divider = info.spread_percentage_divider; 438 dccg_dce->dprefclk_ss_divider = info.spread_percentage_divider;
424 439
425 if (info.type.CENTER_MODE == 0) { 440 if (info.type.CENTER_MODE == 0) {
426 /* TODO: Currently for DP Reference clock we 441 /* TODO: Currently for DP Reference clock we
427 * need only SS percentage for 442 * need only SS percentage for
428 * downspread */ 443 * downspread */
429 clk_dce->dprefclk_ss_percentage = 444 dccg_dce->dprefclk_ss_percentage =
430 info.spread_spectrum_percentage; 445 info.spread_spectrum_percentage;
431 } 446 }
432 447
@@ -443,14 +458,14 @@ static void dce_clock_read_ss_info(struct dce_dccg *clk_dce)
443 */ 458 */
444 if (result == BP_RESULT_OK && 459 if (result == BP_RESULT_OK &&
445 info.spread_spectrum_percentage != 0) { 460 info.spread_spectrum_percentage != 0) {
446 clk_dce->ss_on_dprefclk = true; 461 dccg_dce->ss_on_dprefclk = true;
447 clk_dce->dprefclk_ss_divider = info.spread_percentage_divider; 462 dccg_dce->dprefclk_ss_divider = info.spread_percentage_divider;
448 463
449 if (info.type.CENTER_MODE == 0) { 464 if (info.type.CENTER_MODE == 0) {
450 /* Currently for DP Reference clock we 465 /* Currently for DP Reference clock we
451 * need only SS percentage for 466 * need only SS percentage for
452 * downspread */ 467 * downspread */
453 clk_dce->dprefclk_ss_percentage = 468 dccg_dce->dprefclk_ss_percentage =
454 info.spread_spectrum_percentage; 469 info.spread_spectrum_percentage;
455 } 470 }
456 } 471 }
@@ -462,31 +477,189 @@ static inline bool should_set_clock(bool safe_to_lower, int calc_clk, int cur_cl
462 return ((safe_to_lower && calc_clk < cur_clk) || calc_clk > cur_clk); 477 return ((safe_to_lower && calc_clk < cur_clk) || calc_clk > cur_clk);
463} 478}
464 479
465static void dce12_update_clocks(struct dccg *dccg, 480static void dce110_fill_display_configs(
466 struct dc_clocks *new_clocks, 481 const struct dc_state *context,
467 bool safe_to_lower) 482 struct dm_pp_display_configuration *pp_display_cfg)
468{ 483{
469 struct dm_pp_clock_for_voltage_req clock_voltage_req = {0}; 484 int j;
485 int num_cfgs = 0;
470 486
471 /* TODO: Investigate why this is needed to fix display corruption. */ 487 for (j = 0; j < context->stream_count; j++) {
472 new_clocks->dispclk_khz = new_clocks->dispclk_khz * 115 / 100; 488 int k;
473 489
474 if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, dccg->clks.dispclk_khz)) { 490 const struct dc_stream_state *stream = context->streams[j];
475 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DISPLAY_CLK; 491 struct dm_pp_single_disp_config *cfg =
476 clock_voltage_req.clocks_in_khz = new_clocks->dispclk_khz; 492 &pp_display_cfg->disp_configs[num_cfgs];
477 new_clocks->dispclk_khz = dccg->funcs->set_dispclk(dccg, new_clocks->dispclk_khz); 493 const struct pipe_ctx *pipe_ctx = NULL;
478 dccg->clks.dispclk_khz = new_clocks->dispclk_khz;
479 494
480 dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req); 495 for (k = 0; k < MAX_PIPES; k++)
496 if (stream == context->res_ctx.pipe_ctx[k].stream) {
497 pipe_ctx = &context->res_ctx.pipe_ctx[k];
498 break;
499 }
500
501 ASSERT(pipe_ctx != NULL);
502
503 /* only notify active stream */
504 if (stream->dpms_off)
505 continue;
506
507 num_cfgs++;
508 cfg->signal = pipe_ctx->stream->signal;
509 cfg->pipe_idx = pipe_ctx->stream_res.tg->inst;
510 cfg->src_height = stream->src.height;
511 cfg->src_width = stream->src.width;
512 cfg->ddi_channel_mapping =
513 stream->sink->link->ddi_channel_mapping.raw;
514 cfg->transmitter =
515 stream->sink->link->link_enc->transmitter;
516 cfg->link_settings.lane_count =
517 stream->sink->link->cur_link_settings.lane_count;
518 cfg->link_settings.link_rate =
519 stream->sink->link->cur_link_settings.link_rate;
520 cfg->link_settings.link_spread =
521 stream->sink->link->cur_link_settings.link_spread;
522 cfg->sym_clock = stream->phy_pix_clk;
523 /* Round v_refresh*/
524 cfg->v_refresh = stream->timing.pix_clk_khz * 1000;
525 cfg->v_refresh /= stream->timing.h_total;
526 cfg->v_refresh = (cfg->v_refresh + stream->timing.v_total / 2)
527 / stream->timing.v_total;
481 } 528 }
482 529
483 if (should_set_clock(safe_to_lower, new_clocks->phyclk_khz, dccg->clks.phyclk_khz)) { 530 pp_display_cfg->display_count = num_cfgs;
484 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DISPLAYPHYCLK; 531}
485 clock_voltage_req.clocks_in_khz = new_clocks->phyclk_khz;
486 dccg->clks.phyclk_khz = new_clocks->phyclk_khz;
487 532
488 dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req); 533static uint32_t dce110_get_min_vblank_time_us(const struct dc_state *context)
534{
535 uint8_t j;
536 uint32_t min_vertical_blank_time = -1;
537
538 for (j = 0; j < context->stream_count; j++) {
539 struct dc_stream_state *stream = context->streams[j];
540 uint32_t vertical_blank_in_pixels = 0;
541 uint32_t vertical_blank_time = 0;
542
543 vertical_blank_in_pixels = stream->timing.h_total *
544 (stream->timing.v_total
545 - stream->timing.v_addressable);
546
547 vertical_blank_time = vertical_blank_in_pixels
548 * 1000 / stream->timing.pix_clk_khz;
549
550 if (min_vertical_blank_time > vertical_blank_time)
551 min_vertical_blank_time = vertical_blank_time;
552 }
553
554 return min_vertical_blank_time;
555}
556
557static int determine_sclk_from_bounding_box(
558 const struct dc *dc,
559 int required_sclk)
560{
561 int i;
562
563 /*
564 * Some asics do not give us sclk levels, so we just report the actual
565 * required sclk
566 */
567 if (dc->sclk_lvls.num_levels == 0)
568 return required_sclk;
569
570 for (i = 0; i < dc->sclk_lvls.num_levels; i++) {
571 if (dc->sclk_lvls.clocks_in_khz[i] >= required_sclk)
572 return dc->sclk_lvls.clocks_in_khz[i];
573 }
574 /*
575 * even maximum level could not satisfy requirement, this
576 * is unexpected at this stage, should have been caught at
577 * validation time
578 */
579 ASSERT(0);
580 return dc->sclk_lvls.clocks_in_khz[dc->sclk_lvls.num_levels - 1];
581}
582
583static void dce_pplib_apply_display_requirements(
584 struct dc *dc,
585 struct dc_state *context)
586{
587 struct dm_pp_display_configuration *pp_display_cfg = &context->pp_display_cfg;
588
589 pp_display_cfg->avail_mclk_switch_time_us = dce110_get_min_vblank_time_us(context);
590
591 dce110_fill_display_configs(context, pp_display_cfg);
592
593 if (memcmp(&dc->current_state->pp_display_cfg, pp_display_cfg, sizeof(*pp_display_cfg)) != 0)
594 dm_pp_apply_display_requirements(dc->ctx, pp_display_cfg);
595}
596
597static void dce11_pplib_apply_display_requirements(
598 struct dc *dc,
599 struct dc_state *context)
600{
601 struct dm_pp_display_configuration *pp_display_cfg = &context->pp_display_cfg;
602
603 pp_display_cfg->all_displays_in_sync =
604 context->bw.dce.all_displays_in_sync;
605 pp_display_cfg->nb_pstate_switch_disable =
606 context->bw.dce.nbp_state_change_enable == false;
607 pp_display_cfg->cpu_cc6_disable =
608 context->bw.dce.cpuc_state_change_enable == false;
609 pp_display_cfg->cpu_pstate_disable =
610 context->bw.dce.cpup_state_change_enable == false;
611 pp_display_cfg->cpu_pstate_separation_time =
612 context->bw.dce.blackout_recovery_time_us;
613
614 pp_display_cfg->min_memory_clock_khz = context->bw.dce.yclk_khz
615 / MEMORY_TYPE_MULTIPLIER_CZ;
616
617 pp_display_cfg->min_engine_clock_khz = determine_sclk_from_bounding_box(
618 dc,
619 context->bw.dce.sclk_khz);
620
621 pp_display_cfg->min_engine_clock_deep_sleep_khz
622 = context->bw.dce.sclk_deep_sleep_khz;
623
624 pp_display_cfg->avail_mclk_switch_time_us =
625 dce110_get_min_vblank_time_us(context);
626 /* TODO: dce11.2*/
627 pp_display_cfg->avail_mclk_switch_time_in_disp_active_us = 0;
628
629 pp_display_cfg->disp_clk_khz = dc->res_pool->dccg->clks.dispclk_khz;
630
631 dce110_fill_display_configs(context, pp_display_cfg);
632
633 /* TODO: is this still applicable?*/
634 if (pp_display_cfg->display_count == 1) {
635 const struct dc_crtc_timing *timing =
636 &context->streams[0]->timing;
637
638 pp_display_cfg->crtc_index =
639 pp_display_cfg->disp_configs[0].pipe_idx;
640 pp_display_cfg->line_time_in_us = timing->h_total * 1000 / timing->pix_clk_khz;
489 } 641 }
642
643 if (memcmp(&dc->current_state->pp_display_cfg, pp_display_cfg, sizeof(*pp_display_cfg)) != 0)
644 dm_pp_apply_display_requirements(dc->ctx, pp_display_cfg);
645}
646
647static void dcn1_pplib_apply_display_requirements(
648 struct dc *dc,
649 struct dc_state *context)
650{
651 struct dm_pp_display_configuration *pp_display_cfg = &context->pp_display_cfg;
652
653 pp_display_cfg->min_engine_clock_khz = dc->res_pool->dccg->clks.dcfclk_khz;
654 pp_display_cfg->min_memory_clock_khz = dc->res_pool->dccg->clks.fclk_khz;
655 pp_display_cfg->min_engine_clock_deep_sleep_khz = dc->res_pool->dccg->clks.dcfclk_deep_sleep_khz;
656 pp_display_cfg->min_dcfc_deep_sleep_clock_khz = dc->res_pool->dccg->clks.dcfclk_deep_sleep_khz;
657 pp_display_cfg->min_dcfclock_khz = dc->res_pool->dccg->clks.dcfclk_khz;
658 pp_display_cfg->disp_clk_khz = dc->res_pool->dccg->clks.dispclk_khz;
659 dce110_fill_display_configs(context, pp_display_cfg);
660
661 if (memcmp(&dc->current_state->pp_display_cfg, pp_display_cfg, sizeof(*pp_display_cfg)) != 0)
662 dm_pp_apply_display_requirements(dc->ctx, pp_display_cfg);
490} 663}
491 664
492#ifdef CONFIG_DRM_AMD_DC_DCN1_0 665#ifdef CONFIG_DRM_AMD_DC_DCN1_0
@@ -544,7 +717,7 @@ static void dcn1_ramp_up_dispclk_with_dpp(struct dccg *dccg, struct dc_clocks *n
544 int i; 717 int i;
545 718
546 /* set disp clk to dpp clk threshold */ 719 /* set disp clk to dpp clk threshold */
547 dccg->funcs->set_dispclk(dccg, dispclk_to_dpp_threshold); 720 dce112_set_clock(dccg, dispclk_to_dpp_threshold);
548 721
549 /* update request dpp clk division option */ 722 /* update request dpp clk division option */
550 for (i = 0; i < dc->res_pool->pipe_count; i++) { 723 for (i = 0; i < dc->res_pool->pipe_count; i++) {
@@ -561,7 +734,7 @@ static void dcn1_ramp_up_dispclk_with_dpp(struct dccg *dccg, struct dc_clocks *n
561 734
562 /* If target clk not same as dppclk threshold, set to target clock */ 735 /* If target clk not same as dppclk threshold, set to target clock */
563 if (dispclk_to_dpp_threshold != new_clocks->dispclk_khz) 736 if (dispclk_to_dpp_threshold != new_clocks->dispclk_khz)
564 dccg->funcs->set_dispclk(dccg, new_clocks->dispclk_khz); 737 dce112_set_clock(dccg, new_clocks->dispclk_khz);
565 738
566 dccg->clks.dispclk_khz = new_clocks->dispclk_khz; 739 dccg->clks.dispclk_khz = new_clocks->dispclk_khz;
567 dccg->clks.dppclk_khz = new_clocks->dppclk_khz; 740 dccg->clks.dppclk_khz = new_clocks->dppclk_khz;
@@ -569,10 +742,11 @@ static void dcn1_ramp_up_dispclk_with_dpp(struct dccg *dccg, struct dc_clocks *n
569} 742}
570 743
571static void dcn1_update_clocks(struct dccg *dccg, 744static void dcn1_update_clocks(struct dccg *dccg,
572 struct dc_clocks *new_clocks, 745 struct dc_state *context,
573 bool safe_to_lower) 746 bool safe_to_lower)
574{ 747{
575 struct dc *dc = dccg->ctx->dc; 748 struct dc *dc = dccg->ctx->dc;
749 struct dc_clocks *new_clocks = &context->bw.dcn.clk;
576 struct pp_smu_display_requirement_rv *smu_req_cur = 750 struct pp_smu_display_requirement_rv *smu_req_cur =
577 &dc->res_pool->pp_smu_req; 751 &dc->res_pool->pp_smu_req;
578 struct pp_smu_display_requirement_rv smu_req = *smu_req_cur; 752 struct pp_smu_display_requirement_rv smu_req = *smu_req_cur;
@@ -633,6 +807,7 @@ static void dcn1_update_clocks(struct dccg *dccg,
633 dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req); 807 dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
634 if (pp_smu->set_display_requirement) 808 if (pp_smu->set_display_requirement)
635 pp_smu->set_display_requirement(&pp_smu->pp_smu, &smu_req); 809 pp_smu->set_display_requirement(&pp_smu->pp_smu, &smu_req);
810 dcn1_pplib_apply_display_requirements(dc, context);
636 } 811 }
637 812
638 /* dcn1 dppclk is tied to dispclk */ 813 /* dcn1 dppclk is tied to dispclk */
@@ -652,6 +827,7 @@ static void dcn1_update_clocks(struct dccg *dccg,
652 dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req); 827 dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
653 if (pp_smu->set_display_requirement) 828 if (pp_smu->set_display_requirement)
654 pp_smu->set_display_requirement(&pp_smu->pp_smu, &smu_req); 829 pp_smu->set_display_requirement(&pp_smu->pp_smu, &smu_req);
830 dcn1_pplib_apply_display_requirements(dc, context);
655 } 831 }
656 832
657 833
@@ -660,17 +836,18 @@ static void dcn1_update_clocks(struct dccg *dccg,
660#endif 836#endif
661 837
662static void dce_update_clocks(struct dccg *dccg, 838static void dce_update_clocks(struct dccg *dccg,
663 struct dc_clocks *new_clocks, 839 struct dc_state *context,
664 bool safe_to_lower) 840 bool safe_to_lower)
665{ 841{
666 struct dm_pp_power_level_change_request level_change_req; 842 struct dm_pp_power_level_change_request level_change_req;
667 struct dce_dccg *clk_dce = TO_DCE_CLOCKS(dccg); 843 int unpatched_disp_clk = context->bw.dce.dispclk_khz;
844 struct dce_dccg *dccg_dce = TO_DCE_DCCG(dccg);
668 845
669 /* TODO: Investigate why this is needed to fix display corruption. */ 846 /*TODO: W/A for dal3 linux, investigate why this works */
670 if (!clk_dce->dfs_bypass_active) 847 if (!dccg_dce->dfs_bypass_active)
671 new_clocks->dispclk_khz = new_clocks->dispclk_khz * 115 / 100; 848 context->bw.dce.dispclk_khz = context->bw.dce.dispclk_khz * 115 / 100;
672 849
673 level_change_req.power_level = dce_get_required_clocks_state(dccg, new_clocks); 850 level_change_req.power_level = dce_get_required_clocks_state(dccg, context);
674 /* get max clock state from PPLIB */ 851 /* get max clock state from PPLIB */
675 if ((level_change_req.power_level < dccg->cur_min_clks_state && safe_to_lower) 852 if ((level_change_req.power_level < dccg->cur_min_clks_state && safe_to_lower)
676 || level_change_req.power_level > dccg->cur_min_clks_state) { 853 || level_change_req.power_level > dccg->cur_min_clks_state) {
@@ -678,127 +855,143 @@ static void dce_update_clocks(struct dccg *dccg,
678 dccg->cur_min_clks_state = level_change_req.power_level; 855 dccg->cur_min_clks_state = level_change_req.power_level;
679 } 856 }
680 857
681 if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, dccg->clks.dispclk_khz)) { 858 if (should_set_clock(safe_to_lower, context->bw.dce.dispclk_khz, dccg->clks.dispclk_khz)) {
682 new_clocks->dispclk_khz = dccg->funcs->set_dispclk(dccg, new_clocks->dispclk_khz); 859 context->bw.dce.dispclk_khz = dce_set_clock(dccg, context->bw.dce.dispclk_khz);
683 dccg->clks.dispclk_khz = new_clocks->dispclk_khz; 860 dccg->clks.dispclk_khz = context->bw.dce.dispclk_khz;
861 }
862 dce_pplib_apply_display_requirements(dccg->ctx->dc, context);
863
864 context->bw.dce.dispclk_khz = unpatched_disp_clk;
865}
866
867static void dce11_update_clocks(struct dccg *dccg,
868 struct dc_state *context,
869 bool safe_to_lower)
870{
871 struct dm_pp_power_level_change_request level_change_req;
872
873 level_change_req.power_level = dce_get_required_clocks_state(dccg, context);
874 /* get max clock state from PPLIB */
875 if ((level_change_req.power_level < dccg->cur_min_clks_state && safe_to_lower)
876 || level_change_req.power_level > dccg->cur_min_clks_state) {
877 if (dm_pp_apply_power_level_change_request(dccg->ctx, &level_change_req))
878 dccg->cur_min_clks_state = level_change_req.power_level;
879 }
880
881 if (should_set_clock(safe_to_lower, context->bw.dce.dispclk_khz, dccg->clks.dispclk_khz)) {
882 context->bw.dce.dispclk_khz = dce_set_clock(dccg, context->bw.dce.dispclk_khz);
883 dccg->clks.dispclk_khz = context->bw.dce.dispclk_khz;
884 }
885 dce11_pplib_apply_display_requirements(dccg->ctx->dc, context);
886}
887
888static void dce112_update_clocks(struct dccg *dccg,
889 struct dc_state *context,
890 bool safe_to_lower)
891{
892 struct dm_pp_power_level_change_request level_change_req;
893
894 level_change_req.power_level = dce_get_required_clocks_state(dccg, context);
895 /* get max clock state from PPLIB */
896 if ((level_change_req.power_level < dccg->cur_min_clks_state && safe_to_lower)
897 || level_change_req.power_level > dccg->cur_min_clks_state) {
898 if (dm_pp_apply_power_level_change_request(dccg->ctx, &level_change_req))
899 dccg->cur_min_clks_state = level_change_req.power_level;
684 } 900 }
901
902 if (should_set_clock(safe_to_lower, context->bw.dce.dispclk_khz, dccg->clks.dispclk_khz)) {
903 context->bw.dce.dispclk_khz = dce112_set_clock(dccg, context->bw.dce.dispclk_khz);
904 dccg->clks.dispclk_khz = context->bw.dce.dispclk_khz;
905 }
906 dce11_pplib_apply_display_requirements(dccg->ctx->dc, context);
685} 907}
686 908
687static bool dce_update_dfs_bypass( 909static void dce12_update_clocks(struct dccg *dccg,
688 struct dccg *dccg, 910 struct dc_state *context,
689 struct dc *dc, 911 bool safe_to_lower)
690 struct dc_state *context,
691 int requested_clock_khz)
692{ 912{
693 struct dce_dccg *clk_dce = TO_DCE_CLOCKS(dccg); 913 struct dm_pp_clock_for_voltage_req clock_voltage_req = {0};
694 struct resource_context *res_ctx = &context->res_ctx; 914 int max_pix_clk = get_max_pixel_clock_for_all_paths(context);
695 enum signal_type signal_type = SIGNAL_TYPE_NONE; 915 int unpatched_disp_clk = context->bw.dce.dispclk_khz;
696 bool was_active = clk_dce->dfs_bypass_active; 916
697 int i; 917 /* W/A for dal3 linux */
698 918 context->bw.dce.dispclk_khz = context->bw.dce.dispclk_khz * 115 / 100;
699 /* Disable DFS bypass by default. */ 919
700 clk_dce->dfs_bypass_active = false; 920 if (should_set_clock(safe_to_lower, context->bw.dce.dispclk_khz, dccg->clks.dispclk_khz)) {
701 921 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DISPLAY_CLK;
702 /* Check that DFS bypass is available. */ 922 clock_voltage_req.clocks_in_khz = context->bw.dce.dispclk_khz;
703 if (!clk_dce->dfs_bypass_enabled) 923 context->bw.dce.dispclk_khz = dce112_set_clock(dccg, context->bw.dce.dispclk_khz);
704 goto update; 924 dccg->clks.dispclk_khz = context->bw.dce.dispclk_khz;
705 925
706 /* Check if the requested display clock is below the threshold. */ 926 dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
707 if (requested_clock_khz >= 400000) 927 }
708 goto update; 928
709 929 if (should_set_clock(safe_to_lower, max_pix_clk, dccg->clks.phyclk_khz)) {
710 /* DFS-bypass should only be enabled on single stream setups */ 930 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DISPLAYPHYCLK;
711 if (context->stream_count != 1) 931 clock_voltage_req.clocks_in_khz = max_pix_clk;
712 goto update; 932 dccg->clks.phyclk_khz = max_pix_clk;
713 933
714 /* Check that the stream's signal type is an embedded panel */ 934 dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
715 for (i = 0; i < dc->res_pool->pipe_count; i++) { 935 }
716 if (res_ctx->pipe_ctx[i].stream) { 936 dce11_pplib_apply_display_requirements(dccg->ctx->dc, context);
717 struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i]; 937
718 938 context->bw.dce.dispclk_khz = unpatched_disp_clk;
719 signal_type = pipe_ctx->stream->sink->link->connector_signal;
720 break;
721 }
722 }
723
724 if (signal_type == SIGNAL_TYPE_EDP ||
725 signal_type == SIGNAL_TYPE_LVDS)
726 clk_dce->dfs_bypass_active = true;
727
728update:
729 /* Update the clock state. We don't need to respect safe_to_lower
730 * because DFS bypass should always be greater than the current
731 * display clock frequency.
732 */
733 if (was_active != clk_dce->dfs_bypass_active) {
734 dccg->clks.dispclk_khz =
735 dccg->funcs->set_dispclk(dccg, dccg->clks.dispclk_khz);
736 return true;
737 }
738
739 return false;
740} 939}
741 940
742#ifdef CONFIG_DRM_AMD_DC_DCN1_0 941#ifdef CONFIG_DRM_AMD_DC_DCN1_0
743static const struct display_clock_funcs dcn1_funcs = { 942static const struct dccg_funcs dcn1_funcs = {
744 .get_dp_ref_clk_frequency = dce12_get_dp_ref_freq_khz, 943 .get_dp_ref_clk_frequency = dce12_get_dp_ref_freq_khz,
745 .set_dispclk = dce112_set_clock,
746 .update_clocks = dcn1_update_clocks 944 .update_clocks = dcn1_update_clocks
747}; 945};
748#endif 946#endif
749 947
750static const struct display_clock_funcs dce120_funcs = { 948static const struct dccg_funcs dce120_funcs = {
751 .get_dp_ref_clk_frequency = dce12_get_dp_ref_freq_khz, 949 .get_dp_ref_clk_frequency = dce12_get_dp_ref_freq_khz,
752 .set_dispclk = dce112_set_clock,
753 .update_clocks = dce12_update_clocks 950 .update_clocks = dce12_update_clocks
754}; 951};
755 952
756static const struct display_clock_funcs dce112_funcs = { 953static const struct dccg_funcs dce112_funcs = {
757 .get_dp_ref_clk_frequency = dce_get_dp_ref_freq_khz, 954 .get_dp_ref_clk_frequency = dce_get_dp_ref_freq_khz,
758 .set_dispclk = dce112_set_clock, 955 .update_clocks = dce112_update_clocks
759 .update_clocks = dce_update_clocks
760}; 956};
761 957
762static const struct display_clock_funcs dce110_funcs = { 958static const struct dccg_funcs dce110_funcs = {
763 .get_dp_ref_clk_frequency = dce_get_dp_ref_freq_khz, 959 .get_dp_ref_clk_frequency = dce_get_dp_ref_freq_khz,
764 .set_dispclk = dce_psr_set_clock, 960 .update_clocks = dce11_update_clocks,
765 .update_clocks = dce_update_clocks,
766 .update_dfs_bypass = dce_update_dfs_bypass
767}; 961};
768 962
769static const struct display_clock_funcs dce_funcs = { 963static const struct dccg_funcs dce_funcs = {
770 .get_dp_ref_clk_frequency = dce_get_dp_ref_freq_khz, 964 .get_dp_ref_clk_frequency = dce_get_dp_ref_freq_khz,
771 .set_dispclk = dce_set_clock,
772 .update_clocks = dce_update_clocks 965 .update_clocks = dce_update_clocks
773}; 966};
774 967
775static void dce_dccg_construct( 968static void dce_dccg_construct(
776 struct dce_dccg *clk_dce, 969 struct dce_dccg *dccg_dce,
777 struct dc_context *ctx, 970 struct dc_context *ctx,
778 const struct dccg_registers *regs, 971 const struct dccg_registers *regs,
779 const struct dccg_shift *clk_shift, 972 const struct dccg_shift *clk_shift,
780 const struct dccg_mask *clk_mask) 973 const struct dccg_mask *clk_mask)
781{ 974{
782 struct dccg *base = &clk_dce->base; 975 struct dccg *base = &dccg_dce->base;
783 976
784 base->ctx = ctx; 977 base->ctx = ctx;
785 base->funcs = &dce_funcs; 978 base->funcs = &dce_funcs;
786 979
787 clk_dce->regs = regs; 980 dccg_dce->regs = regs;
788 clk_dce->clk_shift = clk_shift; 981 dccg_dce->dccg_shift = clk_shift;
789 clk_dce->clk_mask = clk_mask; 982 dccg_dce->dccg_mask = clk_mask;
790 983
791 clk_dce->dfs_bypass_disp_clk = 0; 984 dccg_dce->dfs_bypass_disp_clk = 0;
792 985
793 clk_dce->dprefclk_ss_percentage = 0; 986 dccg_dce->dprefclk_ss_percentage = 0;
794 clk_dce->dprefclk_ss_divider = 1000; 987 dccg_dce->dprefclk_ss_divider = 1000;
795 clk_dce->ss_on_dprefclk = false; 988 dccg_dce->ss_on_dprefclk = false;
796 989
797 base->max_clks_state = DM_PP_CLOCKS_STATE_NOMINAL; 990 base->max_clks_state = DM_PP_CLOCKS_STATE_NOMINAL;
798 base->cur_min_clks_state = DM_PP_CLOCKS_STATE_INVALID; 991 base->cur_min_clks_state = DM_PP_CLOCKS_STATE_INVALID;
799 992
800 dce_clock_read_integrated_info(clk_dce); 993 dce_clock_read_integrated_info(dccg_dce);
801 dce_clock_read_ss_info(clk_dce); 994 dce_clock_read_ss_info(dccg_dce);
802} 995}
803 996
804struct dccg *dce_dccg_create( 997struct dccg *dce_dccg_create(
@@ -807,21 +1000,21 @@ struct dccg *dce_dccg_create(
807 const struct dccg_shift *clk_shift, 1000 const struct dccg_shift *clk_shift,
808 const struct dccg_mask *clk_mask) 1001 const struct dccg_mask *clk_mask)
809{ 1002{
810 struct dce_dccg *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL); 1003 struct dce_dccg *dccg_dce = kzalloc(sizeof(*dccg_dce), GFP_KERNEL);
811 1004
812 if (clk_dce == NULL) { 1005 if (dccg_dce == NULL) {
813 BREAK_TO_DEBUGGER(); 1006 BREAK_TO_DEBUGGER();
814 return NULL; 1007 return NULL;
815 } 1008 }
816 1009
817 memcpy(clk_dce->max_clks_by_state, 1010 memcpy(dccg_dce->max_clks_by_state,
818 dce80_max_clks_by_state, 1011 dce80_max_clks_by_state,
819 sizeof(dce80_max_clks_by_state)); 1012 sizeof(dce80_max_clks_by_state));
820 1013
821 dce_dccg_construct( 1014 dce_dccg_construct(
822 clk_dce, ctx, regs, clk_shift, clk_mask); 1015 dccg_dce, ctx, regs, clk_shift, clk_mask);
823 1016
824 return &clk_dce->base; 1017 return &dccg_dce->base;
825} 1018}
826 1019
827struct dccg *dce110_dccg_create( 1020struct dccg *dce110_dccg_create(
@@ -830,23 +1023,23 @@ struct dccg *dce110_dccg_create(
830 const struct dccg_shift *clk_shift, 1023 const struct dccg_shift *clk_shift,
831 const struct dccg_mask *clk_mask) 1024 const struct dccg_mask *clk_mask)
832{ 1025{
833 struct dce_dccg *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL); 1026 struct dce_dccg *dccg_dce = kzalloc(sizeof(*dccg_dce), GFP_KERNEL);
834 1027
835 if (clk_dce == NULL) { 1028 if (dccg_dce == NULL) {
836 BREAK_TO_DEBUGGER(); 1029 BREAK_TO_DEBUGGER();
837 return NULL; 1030 return NULL;
838 } 1031 }
839 1032
840 memcpy(clk_dce->max_clks_by_state, 1033 memcpy(dccg_dce->max_clks_by_state,
841 dce110_max_clks_by_state, 1034 dce110_max_clks_by_state,
842 sizeof(dce110_max_clks_by_state)); 1035 sizeof(dce110_max_clks_by_state));
843 1036
844 dce_dccg_construct( 1037 dce_dccg_construct(
845 clk_dce, ctx, regs, clk_shift, clk_mask); 1038 dccg_dce, ctx, regs, clk_shift, clk_mask);
846 1039
847 clk_dce->base.funcs = &dce110_funcs; 1040 dccg_dce->base.funcs = &dce110_funcs;
848 1041
849 return &clk_dce->base; 1042 return &dccg_dce->base;
850} 1043}
851 1044
852struct dccg *dce112_dccg_create( 1045struct dccg *dce112_dccg_create(
@@ -855,45 +1048,45 @@ struct dccg *dce112_dccg_create(
855 const struct dccg_shift *clk_shift, 1048 const struct dccg_shift *clk_shift,
856 const struct dccg_mask *clk_mask) 1049 const struct dccg_mask *clk_mask)
857{ 1050{
858 struct dce_dccg *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL); 1051 struct dce_dccg *dccg_dce = kzalloc(sizeof(*dccg_dce), GFP_KERNEL);
859 1052
860 if (clk_dce == NULL) { 1053 if (dccg_dce == NULL) {
861 BREAK_TO_DEBUGGER(); 1054 BREAK_TO_DEBUGGER();
862 return NULL; 1055 return NULL;
863 } 1056 }
864 1057
865 memcpy(clk_dce->max_clks_by_state, 1058 memcpy(dccg_dce->max_clks_by_state,
866 dce112_max_clks_by_state, 1059 dce112_max_clks_by_state,
867 sizeof(dce112_max_clks_by_state)); 1060 sizeof(dce112_max_clks_by_state));
868 1061
869 dce_dccg_construct( 1062 dce_dccg_construct(
870 clk_dce, ctx, regs, clk_shift, clk_mask); 1063 dccg_dce, ctx, regs, clk_shift, clk_mask);
871 1064
872 clk_dce->base.funcs = &dce112_funcs; 1065 dccg_dce->base.funcs = &dce112_funcs;
873 1066
874 return &clk_dce->base; 1067 return &dccg_dce->base;
875} 1068}
876 1069
877struct dccg *dce120_dccg_create(struct dc_context *ctx) 1070struct dccg *dce120_dccg_create(struct dc_context *ctx)
878{ 1071{
879 struct dce_dccg *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL); 1072 struct dce_dccg *dccg_dce = kzalloc(sizeof(*dccg_dce), GFP_KERNEL);
880 1073
881 if (clk_dce == NULL) { 1074 if (dccg_dce == NULL) {
882 BREAK_TO_DEBUGGER(); 1075 BREAK_TO_DEBUGGER();
883 return NULL; 1076 return NULL;
884 } 1077 }
885 1078
886 memcpy(clk_dce->max_clks_by_state, 1079 memcpy(dccg_dce->max_clks_by_state,
887 dce120_max_clks_by_state, 1080 dce120_max_clks_by_state,
888 sizeof(dce120_max_clks_by_state)); 1081 sizeof(dce120_max_clks_by_state));
889 1082
890 dce_dccg_construct( 1083 dce_dccg_construct(
891 clk_dce, ctx, NULL, NULL, NULL); 1084 dccg_dce, ctx, NULL, NULL, NULL);
892 1085
893 clk_dce->dprefclk_khz = 600000; 1086 dccg_dce->dprefclk_khz = 600000;
894 clk_dce->base.funcs = &dce120_funcs; 1087 dccg_dce->base.funcs = &dce120_funcs;
895 1088
896 return &clk_dce->base; 1089 return &dccg_dce->base;
897} 1090}
898 1091
899#ifdef CONFIG_DRM_AMD_DC_DCN1_0 1092#ifdef CONFIG_DRM_AMD_DC_DCN1_0
@@ -902,46 +1095,46 @@ struct dccg *dcn1_dccg_create(struct dc_context *ctx)
902 struct dc_debug_options *debug = &ctx->dc->debug; 1095 struct dc_debug_options *debug = &ctx->dc->debug;
903 struct dc_bios *bp = ctx->dc_bios; 1096 struct dc_bios *bp = ctx->dc_bios;
904 struct dc_firmware_info fw_info = { { 0 } }; 1097 struct dc_firmware_info fw_info = { { 0 } };
905 struct dce_dccg *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL); 1098 struct dce_dccg *dccg_dce = kzalloc(sizeof(*dccg_dce), GFP_KERNEL);
906 1099
907 if (clk_dce == NULL) { 1100 if (dccg_dce == NULL) {
908 BREAK_TO_DEBUGGER(); 1101 BREAK_TO_DEBUGGER();
909 return NULL; 1102 return NULL;
910 } 1103 }
911 1104
912 clk_dce->base.ctx = ctx; 1105 dccg_dce->base.ctx = ctx;
913 clk_dce->base.funcs = &dcn1_funcs; 1106 dccg_dce->base.funcs = &dcn1_funcs;
914 1107
915 clk_dce->dfs_bypass_disp_clk = 0; 1108 dccg_dce->dfs_bypass_disp_clk = 0;
916 1109
917 clk_dce->dprefclk_ss_percentage = 0; 1110 dccg_dce->dprefclk_ss_percentage = 0;
918 clk_dce->dprefclk_ss_divider = 1000; 1111 dccg_dce->dprefclk_ss_divider = 1000;
919 clk_dce->ss_on_dprefclk = false; 1112 dccg_dce->ss_on_dprefclk = false;
920 1113
921 clk_dce->dprefclk_khz = 600000; 1114 dccg_dce->dprefclk_khz = 600000;
922 if (bp->integrated_info) 1115 if (bp->integrated_info)
923 clk_dce->dentist_vco_freq_khz = bp->integrated_info->dentist_vco_freq; 1116 dccg_dce->dentist_vco_freq_khz = bp->integrated_info->dentist_vco_freq;
924 if (clk_dce->dentist_vco_freq_khz == 0) { 1117 if (dccg_dce->dentist_vco_freq_khz == 0) {
925 bp->funcs->get_firmware_info(bp, &fw_info); 1118 bp->funcs->get_firmware_info(bp, &fw_info);
926 clk_dce->dentist_vco_freq_khz = fw_info.smu_gpu_pll_output_freq; 1119 dccg_dce->dentist_vco_freq_khz = fw_info.smu_gpu_pll_output_freq;
927 if (clk_dce->dentist_vco_freq_khz == 0) 1120 if (dccg_dce->dentist_vco_freq_khz == 0)
928 clk_dce->dentist_vco_freq_khz = 3600000; 1121 dccg_dce->dentist_vco_freq_khz = 3600000;
929 } 1122 }
930 1123
931 if (!debug->disable_dfs_bypass && bp->integrated_info) 1124 if (!debug->disable_dfs_bypass && bp->integrated_info)
932 if (bp->integrated_info->gpu_cap_info & DFS_BYPASS_ENABLE) 1125 if (bp->integrated_info->gpu_cap_info & DFS_BYPASS_ENABLE)
933 clk_dce->dfs_bypass_enabled = true; 1126 dccg_dce->dfs_bypass_enabled = true;
934 1127
935 dce_clock_read_ss_info(clk_dce); 1128 dce_clock_read_ss_info(dccg_dce);
936 1129
937 return &clk_dce->base; 1130 return &dccg_dce->base;
938} 1131}
939#endif 1132#endif
940 1133
941void dce_dccg_destroy(struct dccg **dccg) 1134void dce_dccg_destroy(struct dccg **dccg)
942{ 1135{
943 struct dce_dccg *clk_dce = TO_DCE_CLOCKS(*dccg); 1136 struct dce_dccg *dccg_dce = TO_DCE_DCCG(*dccg);
944 1137
945 kfree(clk_dce); 1138 kfree(dccg_dce);
946 *dccg = NULL; 1139 *dccg = NULL;
947} 1140}
diff --git a/drivers/gpu/drm/amd/display/dc/dce/dce_clocks.h b/drivers/gpu/drm/amd/display/dc/dce/dce_clocks.h
index 34fdb386c884..b9ac5776ce76 100644
--- a/drivers/gpu/drm/amd/display/dc/dce/dce_clocks.h
+++ b/drivers/gpu/drm/amd/display/dc/dce/dce_clocks.h
@@ -29,6 +29,8 @@
29 29
30#include "display_clock.h" 30#include "display_clock.h"
31 31
32#define MEMORY_TYPE_MULTIPLIER_CZ 4
33
32#define CLK_COMMON_REG_LIST_DCE_BASE() \ 34#define CLK_COMMON_REG_LIST_DCE_BASE() \
33 .DPREFCLK_CNTL = mmDPREFCLK_CNTL, \ 35 .DPREFCLK_CNTL = mmDPREFCLK_CNTL, \
34 .DENTIST_DISPCLK_CNTL = mmDENTIST_DISPCLK_CNTL 36 .DENTIST_DISPCLK_CNTL = mmDENTIST_DISPCLK_CNTL
@@ -69,8 +71,8 @@ struct dccg_registers {
69struct dce_dccg { 71struct dce_dccg {
70 struct dccg base; 72 struct dccg base;
71 const struct dccg_registers *regs; 73 const struct dccg_registers *regs;
72 const struct dccg_shift *clk_shift; 74 const struct dccg_shift *dccg_shift;
73 const struct dccg_mask *clk_mask; 75 const struct dccg_mask *dccg_mask;
74 76
75 struct state_dependent_clocks max_clks_by_state[DM_PP_CLOCKS_MAX_STATES]; 77 struct state_dependent_clocks max_clks_by_state[DM_PP_CLOCKS_MAX_STATES];
76 78
diff --git a/drivers/gpu/drm/amd/display/dc/dce100/dce100_hw_sequencer.c b/drivers/gpu/drm/amd/display/dc/dce100/dce100_hw_sequencer.c
index 74c05e878807..2725eac4baab 100644
--- a/drivers/gpu/drm/amd/display/dc/dce100/dce100_hw_sequencer.c
+++ b/drivers/gpu/drm/amd/display/dc/dce100/dce100_hw_sequencer.c
@@ -105,74 +105,24 @@ bool dce100_enable_display_power_gating(
105 return false; 105 return false;
106} 106}
107 107
108static void dce100_pplib_apply_display_requirements(
109 struct dc *dc,
110 struct dc_state *context)
111{
112 struct dm_pp_display_configuration *pp_display_cfg = &context->pp_display_cfg;
113
114 pp_display_cfg->avail_mclk_switch_time_us =
115 dce110_get_min_vblank_time_us(context);
116 /*pp_display_cfg->min_memory_clock_khz = context->bw.dce.yclk_khz
117 / MEMORY_TYPE_MULTIPLIER;*/
118
119 dce110_fill_display_configs(context, pp_display_cfg);
120
121 if (memcmp(&dc->prev_display_config, pp_display_cfg, sizeof(
122 struct dm_pp_display_configuration)) != 0)
123 dm_pp_apply_display_requirements(dc->ctx, pp_display_cfg);
124
125 dc->prev_display_config = *pp_display_cfg;
126}
127
128/* unit: in_khz before mode set, get pixel clock from context. ASIC register
129 * may not be programmed yet
130 */
131static uint32_t get_max_pixel_clock_for_all_paths(
132 struct dc *dc,
133 struct dc_state *context)
134{
135 uint32_t max_pix_clk = 0;
136 int i;
137
138 for (i = 0; i < MAX_PIPES; i++) {
139 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
140
141 if (pipe_ctx->stream == NULL)
142 continue;
143
144 /* do not check under lay */
145 if (pipe_ctx->top_pipe)
146 continue;
147
148 if (pipe_ctx->stream_res.pix_clk_params.requested_pix_clk > max_pix_clk)
149 max_pix_clk =
150 pipe_ctx->stream_res.pix_clk_params.requested_pix_clk;
151 }
152 return max_pix_clk;
153}
154
155void dce100_set_bandwidth( 108void dce100_set_bandwidth(
156 struct dc *dc, 109 struct dc *dc,
157 struct dc_state *context, 110 struct dc_state *context,
158 bool decrease_allowed) 111 bool decrease_allowed)
159{ 112{
160 struct dc_clocks req_clks; 113 int dispclk_khz = context->bw.dce.dispclk_khz;
161 114
162 req_clks.dispclk_khz = context->bw.dce.dispclk_khz * 115 / 100; 115 context->bw.dce.dispclk_khz = context->bw.dce.dispclk_khz * 115 / 100;
163 req_clks.phyclk_khz = get_max_pixel_clock_for_all_paths(dc, context);
164 116
165 dce110_set_safe_displaymarks(&context->res_ctx, dc->res_pool); 117 dce110_set_safe_displaymarks(&context->res_ctx, dc->res_pool);
166 118
167 dc->res_pool->dccg->funcs->update_clocks( 119 dc->res_pool->dccg->funcs->update_clocks(
168 dc->res_pool->dccg, 120 dc->res_pool->dccg,
169 &req_clks, 121 context,
170 decrease_allowed); 122 decrease_allowed);
171 123 context->bw.dce.dispclk_khz = dispclk_khz;
172 dce100_pplib_apply_display_requirements(dc, context);
173} 124}
174 125
175
176/**************************************************************************/ 126/**************************************************************************/
177 127
178void dce100_hw_sequencer_construct(struct dc *dc) 128void dce100_hw_sequencer_construct(struct dc *dc)
@@ -181,7 +131,5 @@ void dce100_hw_sequencer_construct(struct dc *dc)
181 131
182 dc->hwss.enable_display_power_gating = dce100_enable_display_power_gating; 132 dc->hwss.enable_display_power_gating = dce100_enable_display_power_gating;
183 dc->hwss.set_bandwidth = dce100_set_bandwidth; 133 dc->hwss.set_bandwidth = dce100_set_bandwidth;
184 dc->hwss.pplib_apply_display_requirements =
185 dce100_pplib_apply_display_requirements;
186} 134}
187 135
diff --git a/drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c b/drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c
index 14754a87156c..ae7000480525 100644
--- a/drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c
+++ b/drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c
@@ -22,6 +22,7 @@
22 * Authors: AMD 22 * Authors: AMD
23 * 23 *
24 */ 24 */
25#include "../dce/dce_clocks.h"
25#include "dm_services.h" 26#include "dm_services.h"
26 27
27#include "link_encoder.h" 28#include "link_encoder.h"
@@ -40,7 +41,6 @@
40#include "dce/dce_ipp.h" 41#include "dce/dce_ipp.h"
41#include "dce/dce_transform.h" 42#include "dce/dce_transform.h"
42#include "dce/dce_opp.h" 43#include "dce/dce_opp.h"
43#include "dce/dce_clocks.h"
44#include "dce/dce_clock_source.h" 44#include "dce/dce_clock_source.h"
45#include "dce/dce_audio.h" 45#include "dce/dce_audio.h"
46#include "dce/dce_hwseq.h" 46#include "dce/dce_hwseq.h"
@@ -767,7 +767,7 @@ bool dce100_validate_bandwidth(
767 if (at_least_one_pipe) { 767 if (at_least_one_pipe) {
768 /* TODO implement when needed but for now hardcode max value*/ 768 /* TODO implement when needed but for now hardcode max value*/
769 context->bw.dce.dispclk_khz = 681000; 769 context->bw.dce.dispclk_khz = 681000;
770 context->bw.dce.yclk_khz = 250000 * MEMORY_TYPE_MULTIPLIER; 770 context->bw.dce.yclk_khz = 250000 * MEMORY_TYPE_MULTIPLIER_CZ;
771 } else { 771 } else {
772 context->bw.dce.dispclk_khz = 0; 772 context->bw.dce.dispclk_khz = 0;
773 context->bw.dce.yclk_khz = 0; 773 context->bw.dce.yclk_khz = 0;
diff --git a/drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c b/drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c
index b75ede5f84f7..0d25dcf6408a 100644
--- a/drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c
+++ b/drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c
@@ -1192,8 +1192,8 @@ static void build_audio_output(
1192 if (pipe_ctx->stream->signal == SIGNAL_TYPE_DISPLAY_PORT || 1192 if (pipe_ctx->stream->signal == SIGNAL_TYPE_DISPLAY_PORT ||
1193 pipe_ctx->stream->signal == SIGNAL_TYPE_DISPLAY_PORT_MST) { 1193 pipe_ctx->stream->signal == SIGNAL_TYPE_DISPLAY_PORT_MST) {
1194 audio_output->pll_info.dp_dto_source_clock_in_khz = 1194 audio_output->pll_info.dp_dto_source_clock_in_khz =
1195 state->dis_clk->funcs->get_dp_ref_clk_frequency( 1195 state->dccg->funcs->get_dp_ref_clk_frequency(
1196 state->dis_clk); 1196 state->dccg);
1197 } 1197 }
1198 1198
1199 audio_output->pll_info.feed_back_divider = 1199 audio_output->pll_info.feed_back_divider =
@@ -1743,34 +1743,6 @@ static void set_static_screen_control(struct pipe_ctx **pipe_ctx,
1743 set_static_screen_control(pipe_ctx[i]->stream_res.tg, value); 1743 set_static_screen_control(pipe_ctx[i]->stream_res.tg, value);
1744} 1744}
1745 1745
1746/* unit: in_khz before mode set, get pixel clock from context. ASIC register
1747 * may not be programmed yet
1748 */
1749static uint32_t get_max_pixel_clock_for_all_paths(
1750 struct dc *dc,
1751 struct dc_state *context)
1752{
1753 uint32_t max_pix_clk = 0;
1754 int i;
1755
1756 for (i = 0; i < MAX_PIPES; i++) {
1757 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
1758
1759 if (pipe_ctx->stream == NULL)
1760 continue;
1761
1762 /* do not check under lay */
1763 if (pipe_ctx->top_pipe)
1764 continue;
1765
1766 if (pipe_ctx->stream_res.pix_clk_params.requested_pix_clk > max_pix_clk)
1767 max_pix_clk =
1768 pipe_ctx->stream_res.pix_clk_params.requested_pix_clk;
1769 }
1770
1771 return max_pix_clk;
1772}
1773
1774/* 1746/*
1775 * Check if FBC can be enabled 1747 * Check if FBC can be enabled
1776 */ 1748 */
@@ -2380,191 +2352,22 @@ static void init_hw(struct dc *dc)
2380 2352
2381} 2353}
2382 2354
2383void dce110_fill_display_configs( 2355void dce110_set_bandwidth(
2384 const struct dc_state *context,
2385 struct dm_pp_display_configuration *pp_display_cfg)
2386{
2387 int j;
2388 int num_cfgs = 0;
2389
2390 for (j = 0; j < context->stream_count; j++) {
2391 int k;
2392
2393 const struct dc_stream_state *stream = context->streams[j];
2394 struct dm_pp_single_disp_config *cfg =
2395 &pp_display_cfg->disp_configs[num_cfgs];
2396 const struct pipe_ctx *pipe_ctx = NULL;
2397
2398 for (k = 0; k < MAX_PIPES; k++)
2399 if (stream == context->res_ctx.pipe_ctx[k].stream) {
2400 pipe_ctx = &context->res_ctx.pipe_ctx[k];
2401 break;
2402 }
2403
2404 ASSERT(pipe_ctx != NULL);
2405
2406 /* only notify active stream */
2407 if (stream->dpms_off)
2408 continue;
2409
2410 num_cfgs++;
2411 cfg->signal = pipe_ctx->stream->signal;
2412 cfg->pipe_idx = pipe_ctx->stream_res.tg->inst;
2413 cfg->src_height = stream->src.height;
2414 cfg->src_width = stream->src.width;
2415 cfg->ddi_channel_mapping =
2416 stream->sink->link->ddi_channel_mapping.raw;
2417 cfg->transmitter =
2418 stream->sink->link->link_enc->transmitter;
2419 cfg->link_settings.lane_count =
2420 stream->sink->link->cur_link_settings.lane_count;
2421 cfg->link_settings.link_rate =
2422 stream->sink->link->cur_link_settings.link_rate;
2423 cfg->link_settings.link_spread =
2424 stream->sink->link->cur_link_settings.link_spread;
2425 cfg->sym_clock = stream->phy_pix_clk;
2426 /* Round v_refresh*/
2427 cfg->v_refresh = stream->timing.pix_clk_khz * 1000;
2428 cfg->v_refresh /= stream->timing.h_total;
2429 cfg->v_refresh = (cfg->v_refresh + stream->timing.v_total / 2)
2430 / stream->timing.v_total;
2431 }
2432
2433 pp_display_cfg->display_count = num_cfgs;
2434}
2435
2436uint32_t dce110_get_min_vblank_time_us(const struct dc_state *context)
2437{
2438 uint8_t j;
2439 uint32_t min_vertical_blank_time = -1;
2440
2441 for (j = 0; j < context->stream_count; j++) {
2442 struct dc_stream_state *stream = context->streams[j];
2443 uint32_t vertical_blank_in_pixels = 0;
2444 uint32_t vertical_blank_time = 0;
2445
2446 vertical_blank_in_pixels = stream->timing.h_total *
2447 (stream->timing.v_total
2448 - stream->timing.v_addressable);
2449
2450 vertical_blank_time = vertical_blank_in_pixels
2451 * 1000 / stream->timing.pix_clk_khz;
2452
2453 if (min_vertical_blank_time > vertical_blank_time)
2454 min_vertical_blank_time = vertical_blank_time;
2455 }
2456
2457 return min_vertical_blank_time;
2458}
2459
2460static int determine_sclk_from_bounding_box(
2461 const struct dc *dc,
2462 int required_sclk)
2463{
2464 int i;
2465
2466 /*
2467 * Some asics do not give us sclk levels, so we just report the actual
2468 * required sclk
2469 */
2470 if (dc->sclk_lvls.num_levels == 0)
2471 return required_sclk;
2472
2473 for (i = 0; i < dc->sclk_lvls.num_levels; i++) {
2474 if (dc->sclk_lvls.clocks_in_khz[i] >= required_sclk)
2475 return dc->sclk_lvls.clocks_in_khz[i];
2476 }
2477 /*
2478 * even maximum level could not satisfy requirement, this
2479 * is unexpected at this stage, should have been caught at
2480 * validation time
2481 */
2482 ASSERT(0);
2483 return dc->sclk_lvls.clocks_in_khz[dc->sclk_lvls.num_levels - 1];
2484}
2485
2486static void pplib_apply_display_requirements(
2487 struct dc *dc,
2488 struct dc_state *context)
2489{
2490 struct dm_pp_display_configuration *pp_display_cfg = &context->pp_display_cfg;
2491
2492 pp_display_cfg->all_displays_in_sync =
2493 context->bw.dce.all_displays_in_sync;
2494 pp_display_cfg->nb_pstate_switch_disable =
2495 context->bw.dce.nbp_state_change_enable == false;
2496 pp_display_cfg->cpu_cc6_disable =
2497 context->bw.dce.cpuc_state_change_enable == false;
2498 pp_display_cfg->cpu_pstate_disable =
2499 context->bw.dce.cpup_state_change_enable == false;
2500 pp_display_cfg->cpu_pstate_separation_time =
2501 context->bw.dce.blackout_recovery_time_us;
2502
2503 pp_display_cfg->min_memory_clock_khz = context->bw.dce.yclk_khz
2504 / MEMORY_TYPE_MULTIPLIER;
2505
2506 pp_display_cfg->min_engine_clock_khz = determine_sclk_from_bounding_box(
2507 dc,
2508 context->bw.dce.sclk_khz);
2509
2510 pp_display_cfg->min_engine_clock_deep_sleep_khz
2511 = context->bw.dce.sclk_deep_sleep_khz;
2512
2513 pp_display_cfg->avail_mclk_switch_time_us =
2514 dce110_get_min_vblank_time_us(context);
2515 /* TODO: dce11.2*/
2516 pp_display_cfg->avail_mclk_switch_time_in_disp_active_us = 0;
2517
2518 pp_display_cfg->disp_clk_khz = dc->res_pool->dccg->clks.dispclk_khz;
2519
2520 dce110_fill_display_configs(context, pp_display_cfg);
2521
2522 /* TODO: is this still applicable?*/
2523 if (pp_display_cfg->display_count == 1) {
2524 const struct dc_crtc_timing *timing =
2525 &context->streams[0]->timing;
2526
2527 pp_display_cfg->crtc_index =
2528 pp_display_cfg->disp_configs[0].pipe_idx;
2529 pp_display_cfg->line_time_in_us = timing->h_total * 1000
2530 / timing->pix_clk_khz;
2531 }
2532
2533 if (memcmp(&dc->prev_display_config, pp_display_cfg, sizeof(
2534 struct dm_pp_display_configuration)) != 0)
2535 dm_pp_apply_display_requirements(dc->ctx, pp_display_cfg);
2536
2537 dc->prev_display_config = *pp_display_cfg;
2538}
2539
2540static void dce110_set_bandwidth(
2541 struct dc *dc, 2356 struct dc *dc,
2542 struct dc_state *context, 2357 struct dc_state *context,
2543 bool decrease_allowed) 2358 bool decrease_allowed)
2544{ 2359{
2545 struct dc_clocks req_clks;
2546 struct dccg *dccg = dc->res_pool->dccg; 2360 struct dccg *dccg = dc->res_pool->dccg;
2547 2361
2548 req_clks.dispclk_khz = context->bw.dce.dispclk_khz;
2549 req_clks.phyclk_khz = get_max_pixel_clock_for_all_paths(dc, context);
2550
2551 if (decrease_allowed) 2362 if (decrease_allowed)
2552 dce110_set_displaymarks(dc, context); 2363 dce110_set_displaymarks(dc, context);
2553 else 2364 else
2554 dce110_set_safe_displaymarks(&context->res_ctx, dc->res_pool); 2365 dce110_set_safe_displaymarks(&context->res_ctx, dc->res_pool);
2555 2366
2556 if (dccg->funcs->update_dfs_bypass)
2557 dccg->funcs->update_dfs_bypass(
2558 dccg,
2559 dc,
2560 context,
2561 req_clks.dispclk_khz);
2562
2563 dccg->funcs->update_clocks( 2367 dccg->funcs->update_clocks(
2564 dccg, 2368 dccg,
2565 &req_clks, 2369 context,
2566 decrease_allowed); 2370 decrease_allowed);
2567 pplib_apply_display_requirements(dc, context);
2568} 2371}
2569 2372
2570static void dce110_program_front_end_for_pipe( 2373static void dce110_program_front_end_for_pipe(
@@ -2839,10 +2642,6 @@ void dce110_set_cursor_attribute(struct pipe_ctx *pipe_ctx)
2839 pipe_ctx->plane_res.xfm, attributes); 2642 pipe_ctx->plane_res.xfm, attributes);
2840} 2643}
2841 2644
2842static void ready_shared_resources(struct dc *dc, struct dc_state *context) {}
2843
2844static void optimize_shared_resources(struct dc *dc) {}
2845
2846static const struct hw_sequencer_funcs dce110_funcs = { 2645static const struct hw_sequencer_funcs dce110_funcs = {
2847 .program_gamut_remap = program_gamut_remap, 2646 .program_gamut_remap = program_gamut_remap,
2848 .program_csc_matrix = program_csc_matrix, 2647 .program_csc_matrix = program_csc_matrix,
@@ -2877,9 +2676,6 @@ static const struct hw_sequencer_funcs dce110_funcs = {
2877 .setup_stereo = NULL, 2676 .setup_stereo = NULL,
2878 .set_avmute = dce110_set_avmute, 2677 .set_avmute = dce110_set_avmute,
2879 .wait_for_mpcc_disconnect = dce110_wait_for_mpcc_disconnect, 2678 .wait_for_mpcc_disconnect = dce110_wait_for_mpcc_disconnect,
2880 .ready_shared_resources = ready_shared_resources,
2881 .optimize_shared_resources = optimize_shared_resources,
2882 .pplib_apply_display_requirements = pplib_apply_display_requirements,
2883 .edp_backlight_control = hwss_edp_backlight_control, 2679 .edp_backlight_control = hwss_edp_backlight_control,
2884 .edp_power_control = hwss_edp_power_control, 2680 .edp_power_control = hwss_edp_power_control,
2885 .edp_wait_for_hpd_ready = hwss_edp_wait_for_hpd_ready, 2681 .edp_wait_for_hpd_ready = hwss_edp_wait_for_hpd_ready,
diff --git a/drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.h b/drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.h
index d6db3dbd9015..c5e04f856e2c 100644
--- a/drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.h
+++ b/drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.h
@@ -40,7 +40,6 @@ enum dc_status dce110_apply_ctx_to_hw(
40 struct dc_state *context); 40 struct dc_state *context);
41 41
42 42
43
44void dce110_enable_stream(struct pipe_ctx *pipe_ctx); 43void dce110_enable_stream(struct pipe_ctx *pipe_ctx);
45 44
46void dce110_disable_stream(struct pipe_ctx *pipe_ctx, int option); 45void dce110_disable_stream(struct pipe_ctx *pipe_ctx, int option);
@@ -64,11 +63,10 @@ void dce110_set_safe_displaymarks(
64 struct resource_context *res_ctx, 63 struct resource_context *res_ctx,
65 const struct resource_pool *pool); 64 const struct resource_pool *pool);
66 65
67void dce110_fill_display_configs( 66void dce110_set_bandwidth(
68 const struct dc_state *context, 67 struct dc *dc,
69 struct dm_pp_display_configuration *pp_display_cfg); 68 struct dc_state *context,
70 69 bool decrease_allowed);
71uint32_t dce110_get_min_vblank_time_us(const struct dc_state *context);
72 70
73void dp_receiver_power_ctrl(struct dc_link *link, bool on); 71void dp_receiver_power_ctrl(struct dc_link *link, bool on);
74 72
diff --git a/drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c b/drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c
index e3624ca24574..2b031d136537 100644
--- a/drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c
+++ b/drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c
@@ -31,6 +31,7 @@
31#include "resource.h" 31#include "resource.h"
32#include "dce110/dce110_resource.h" 32#include "dce110/dce110_resource.h"
33 33
34#include "../dce/dce_clocks.h"
34#include "include/irq_service_interface.h" 35#include "include/irq_service_interface.h"
35#include "dce/dce_audio.h" 36#include "dce/dce_audio.h"
36#include "dce110/dce110_timing_generator.h" 37#include "dce110/dce110_timing_generator.h"
@@ -45,7 +46,6 @@
45#include "dce110/dce110_transform_v.h" 46#include "dce110/dce110_transform_v.h"
46#include "dce/dce_opp.h" 47#include "dce/dce_opp.h"
47#include "dce110/dce110_opp_v.h" 48#include "dce110/dce110_opp_v.h"
48#include "dce/dce_clocks.h"
49#include "dce/dce_clock_source.h" 49#include "dce/dce_clock_source.h"
50#include "dce/dce_hwseq.h" 50#include "dce/dce_hwseq.h"
51#include "dce110/dce110_hw_sequencer.h" 51#include "dce110/dce110_hw_sequencer.h"
@@ -1173,12 +1173,12 @@ static void bw_calcs_data_update_from_pplib(struct dc *dc)
1173 &clks); 1173 &clks);
1174 1174
1175 dc->bw_vbios->low_yclk = bw_frc_to_fixed( 1175 dc->bw_vbios->low_yclk = bw_frc_to_fixed(
1176 clks.clocks_in_khz[0] * MEMORY_TYPE_MULTIPLIER, 1000); 1176 clks.clocks_in_khz[0] * MEMORY_TYPE_MULTIPLIER_CZ, 1000);
1177 dc->bw_vbios->mid_yclk = bw_frc_to_fixed( 1177 dc->bw_vbios->mid_yclk = bw_frc_to_fixed(
1178 clks.clocks_in_khz[clks.num_levels>>1] * MEMORY_TYPE_MULTIPLIER, 1178 clks.clocks_in_khz[clks.num_levels>>1] * MEMORY_TYPE_MULTIPLIER_CZ,
1179 1000); 1179 1000);
1180 dc->bw_vbios->high_yclk = bw_frc_to_fixed( 1180 dc->bw_vbios->high_yclk = bw_frc_to_fixed(
1181 clks.clocks_in_khz[clks.num_levels-1] * MEMORY_TYPE_MULTIPLIER, 1181 clks.clocks_in_khz[clks.num_levels-1] * MEMORY_TYPE_MULTIPLIER_CZ,
1182 1000); 1182 1000);
1183} 1183}
1184 1184
diff --git a/drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c b/drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c
index 3ce79c208ddf..c7e2189429d9 100644
--- a/drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c
+++ b/drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c
@@ -23,6 +23,7 @@
23 * 23 *
24 */ 24 */
25 25
26#include "../dce/dce_clocks.h"
26#include "dm_services.h" 27#include "dm_services.h"
27 28
28#include "link_encoder.h" 29#include "link_encoder.h"
@@ -42,7 +43,6 @@
42#include "dce/dce_audio.h" 43#include "dce/dce_audio.h"
43#include "dce/dce_opp.h" 44#include "dce/dce_opp.h"
44#include "dce/dce_ipp.h" 45#include "dce/dce_ipp.h"
45#include "dce/dce_clocks.h"
46#include "dce/dce_clock_source.h" 46#include "dce/dce_clock_source.h"
47 47
48#include "dce/dce_hwseq.h" 48#include "dce/dce_hwseq.h"
@@ -1015,12 +1015,12 @@ static void bw_calcs_data_update_from_pplib(struct dc *dc)
1015 &clks); 1015 &clks);
1016 1016
1017 dc->bw_vbios->low_yclk = bw_frc_to_fixed( 1017 dc->bw_vbios->low_yclk = bw_frc_to_fixed(
1018 clks.clocks_in_khz[0] * MEMORY_TYPE_MULTIPLIER, 1000); 1018 clks.clocks_in_khz[0] * MEMORY_TYPE_MULTIPLIER_CZ, 1000);
1019 dc->bw_vbios->mid_yclk = bw_frc_to_fixed( 1019 dc->bw_vbios->mid_yclk = bw_frc_to_fixed(
1020 clks.clocks_in_khz[clks.num_levels>>1] * MEMORY_TYPE_MULTIPLIER, 1020 clks.clocks_in_khz[clks.num_levels>>1] * MEMORY_TYPE_MULTIPLIER_CZ,
1021 1000); 1021 1000);
1022 dc->bw_vbios->high_yclk = bw_frc_to_fixed( 1022 dc->bw_vbios->high_yclk = bw_frc_to_fixed(
1023 clks.clocks_in_khz[clks.num_levels-1] * MEMORY_TYPE_MULTIPLIER, 1023 clks.clocks_in_khz[clks.num_levels-1] * MEMORY_TYPE_MULTIPLIER_CZ,
1024 1000); 1024 1000);
1025 1025
1026 return; 1026 return;
@@ -1056,12 +1056,12 @@ static void bw_calcs_data_update_from_pplib(struct dc *dc)
1056 * YCLK = UMACLK*m_memoryTypeMultiplier 1056 * YCLK = UMACLK*m_memoryTypeMultiplier
1057 */ 1057 */
1058 dc->bw_vbios->low_yclk = bw_frc_to_fixed( 1058 dc->bw_vbios->low_yclk = bw_frc_to_fixed(
1059 mem_clks.data[0].clocks_in_khz * MEMORY_TYPE_MULTIPLIER, 1000); 1059 mem_clks.data[0].clocks_in_khz * MEMORY_TYPE_MULTIPLIER_CZ, 1000);
1060 dc->bw_vbios->mid_yclk = bw_frc_to_fixed( 1060 dc->bw_vbios->mid_yclk = bw_frc_to_fixed(
1061 mem_clks.data[mem_clks.num_levels>>1].clocks_in_khz * MEMORY_TYPE_MULTIPLIER, 1061 mem_clks.data[mem_clks.num_levels>>1].clocks_in_khz * MEMORY_TYPE_MULTIPLIER_CZ,
1062 1000); 1062 1000);
1063 dc->bw_vbios->high_yclk = bw_frc_to_fixed( 1063 dc->bw_vbios->high_yclk = bw_frc_to_fixed(
1064 mem_clks.data[mem_clks.num_levels-1].clocks_in_khz * MEMORY_TYPE_MULTIPLIER, 1064 mem_clks.data[mem_clks.num_levels-1].clocks_in_khz * MEMORY_TYPE_MULTIPLIER_CZ,
1065 1000); 1065 1000);
1066 1066
1067 /* Now notify PPLib/SMU about which Watermarks sets they should select 1067 /* Now notify PPLib/SMU about which Watermarks sets they should select
diff --git a/drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c b/drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c
index 79ab5f9f9115..da2d50d2d720 100644
--- a/drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c
+++ b/drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c
@@ -31,6 +31,8 @@
31#include "resource.h" 31#include "resource.h"
32#include "include/irq_service_interface.h" 32#include "include/irq_service_interface.h"
33#include "dce120_resource.h" 33#include "dce120_resource.h"
34
35#include "../dce/dce_clocks.h"
34#include "dce112/dce112_resource.h" 36#include "dce112/dce112_resource.h"
35 37
36#include "dce110/dce110_resource.h" 38#include "dce110/dce110_resource.h"
@@ -39,7 +41,6 @@
39#include "irq/dce120/irq_service_dce120.h" 41#include "irq/dce120/irq_service_dce120.h"
40#include "dce/dce_opp.h" 42#include "dce/dce_opp.h"
41#include "dce/dce_clock_source.h" 43#include "dce/dce_clock_source.h"
42#include "dce/dce_clocks.h"
43#include "dce/dce_ipp.h" 44#include "dce/dce_ipp.h"
44#include "dce/dce_mem_input.h" 45#include "dce/dce_mem_input.h"
45 46
@@ -834,12 +835,12 @@ static void bw_calcs_data_update_from_pplib(struct dc *dc)
834 * YCLK = UMACLK*m_memoryTypeMultiplier 835 * YCLK = UMACLK*m_memoryTypeMultiplier
835 */ 836 */
836 dc->bw_vbios->low_yclk = bw_frc_to_fixed( 837 dc->bw_vbios->low_yclk = bw_frc_to_fixed(
837 mem_clks.data[0].clocks_in_khz * MEMORY_TYPE_MULTIPLIER, 1000); 838 mem_clks.data[0].clocks_in_khz * MEMORY_TYPE_MULTIPLIER_CZ, 1000);
838 dc->bw_vbios->mid_yclk = bw_frc_to_fixed( 839 dc->bw_vbios->mid_yclk = bw_frc_to_fixed(
839 mem_clks.data[mem_clks.num_levels>>1].clocks_in_khz * MEMORY_TYPE_MULTIPLIER, 840 mem_clks.data[mem_clks.num_levels>>1].clocks_in_khz * MEMORY_TYPE_MULTIPLIER_CZ,
840 1000); 841 1000);
841 dc->bw_vbios->high_yclk = bw_frc_to_fixed( 842 dc->bw_vbios->high_yclk = bw_frc_to_fixed(
842 mem_clks.data[mem_clks.num_levels-1].clocks_in_khz * MEMORY_TYPE_MULTIPLIER, 843 mem_clks.data[mem_clks.num_levels-1].clocks_in_khz * MEMORY_TYPE_MULTIPLIER_CZ,
843 1000); 844 1000);
844 845
845 /* Now notify PPLib/SMU about which Watermarks sets they should select 846 /* Now notify PPLib/SMU about which Watermarks sets they should select
diff --git a/drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c b/drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c
index d68f951f9869..76f58c6a6130 100644
--- a/drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c
+++ b/drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c
@@ -23,6 +23,7 @@
23 * 23 *
24 */ 24 */
25 25
26#include "../dce/dce_clocks.h"
26#include "dce/dce_8_0_d.h" 27#include "dce/dce_8_0_d.h"
27#include "dce/dce_8_0_sh_mask.h" 28#include "dce/dce_8_0_sh_mask.h"
28 29
@@ -44,7 +45,6 @@
44#include "dce/dce_ipp.h" 45#include "dce/dce_ipp.h"
45#include "dce/dce_transform.h" 46#include "dce/dce_transform.h"
46#include "dce/dce_opp.h" 47#include "dce/dce_opp.h"
47#include "dce/dce_clocks.h"
48#include "dce/dce_clock_source.h" 48#include "dce/dce_clock_source.h"
49#include "dce/dce_audio.h" 49#include "dce/dce_audio.h"
50#include "dce/dce_hwseq.h" 50#include "dce/dce_hwseq.h"
@@ -793,7 +793,7 @@ bool dce80_validate_bandwidth(
793{ 793{
794 /* TODO implement when needed but for now hardcode max value*/ 794 /* TODO implement when needed but for now hardcode max value*/
795 context->bw.dce.dispclk_khz = 681000; 795 context->bw.dce.dispclk_khz = 681000;
796 context->bw.dce.yclk_khz = 250000 * MEMORY_TYPE_MULTIPLIER; 796 context->bw.dce.yclk_khz = 250000 * MEMORY_TYPE_MULTIPLIER_CZ;
797 797
798 return true; 798 return true;
799} 799}
diff --git a/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c b/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c
index 193184affefb..4976230f78e4 100644
--- a/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c
+++ b/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c
@@ -2257,46 +2257,6 @@ static void program_all_pipe_in_tree(
2257 } 2257 }
2258} 2258}
2259 2259
2260static void dcn10_pplib_apply_display_requirements(
2261 struct dc *dc,
2262 struct dc_state *context)
2263{
2264 struct dm_pp_display_configuration *pp_display_cfg = &context->pp_display_cfg;
2265
2266 pp_display_cfg->min_engine_clock_khz = dc->res_pool->dccg->clks.dcfclk_khz;
2267 pp_display_cfg->min_memory_clock_khz = dc->res_pool->dccg->clks.fclk_khz;
2268 pp_display_cfg->min_engine_clock_deep_sleep_khz = dc->res_pool->dccg->clks.dcfclk_deep_sleep_khz;
2269 pp_display_cfg->min_dcfc_deep_sleep_clock_khz = dc->res_pool->dccg->clks.dcfclk_deep_sleep_khz;
2270 pp_display_cfg->min_dcfclock_khz = dc->res_pool->dccg->clks.dcfclk_khz;
2271 pp_display_cfg->disp_clk_khz = dc->res_pool->dccg->clks.dispclk_khz;
2272 dce110_fill_display_configs(context, pp_display_cfg);
2273
2274 if (memcmp(&dc->prev_display_config, pp_display_cfg, sizeof(
2275 struct dm_pp_display_configuration)) != 0)
2276 dm_pp_apply_display_requirements(dc->ctx, pp_display_cfg);
2277
2278 dc->prev_display_config = *pp_display_cfg;
2279}
2280
2281static void optimize_shared_resources(struct dc *dc)
2282{
2283 if (dc->current_state->stream_count == 0) {
2284 /* S0i2 message */
2285 dcn10_pplib_apply_display_requirements(dc, dc->current_state);
2286 }
2287
2288 if (dc->debug.pplib_wm_report_mode == WM_REPORT_OVERRIDE)
2289 dcn_bw_notify_pplib_of_wm_ranges(dc);
2290}
2291
2292static void ready_shared_resources(struct dc *dc, struct dc_state *context)
2293{
2294 /* S0i2 message */
2295 if (dc->current_state->stream_count == 0 &&
2296 context->stream_count != 0)
2297 dcn10_pplib_apply_display_requirements(dc, context);
2298}
2299
2300static struct pipe_ctx *find_top_pipe_for_stream( 2260static struct pipe_ctx *find_top_pipe_for_stream(
2301 struct dc *dc, 2261 struct dc *dc,
2302 struct dc_state *context, 2262 struct dc_state *context,
@@ -2412,10 +2372,8 @@ static void dcn10_set_bandwidth(
2412 2372
2413 dc->res_pool->dccg->funcs->update_clocks( 2373 dc->res_pool->dccg->funcs->update_clocks(
2414 dc->res_pool->dccg, 2374 dc->res_pool->dccg,
2415 &context->bw.dcn.clk, 2375 context,
2416 safe_to_lower); 2376 safe_to_lower);
2417
2418 dcn10_pplib_apply_display_requirements(dc, context);
2419 } 2377 }
2420 2378
2421 hubbub1_program_watermarks(dc->res_pool->hubbub, 2379 hubbub1_program_watermarks(dc->res_pool->hubbub,
@@ -2423,6 +2381,9 @@ static void dcn10_set_bandwidth(
2423 dc->res_pool->ref_clock_inKhz / 1000, 2381 dc->res_pool->ref_clock_inKhz / 1000,
2424 true); 2382 true);
2425 2383
2384 if (dc->debug.pplib_wm_report_mode == WM_REPORT_OVERRIDE)
2385 dcn_bw_notify_pplib_of_wm_ranges(dc);
2386
2426 if (dc->debug.sanity_checks) 2387 if (dc->debug.sanity_checks)
2427 dcn10_verify_allow_pstate_change_high(dc); 2388 dcn10_verify_allow_pstate_change_high(dc);
2428} 2389}
@@ -2732,10 +2693,6 @@ static const struct hw_sequencer_funcs dcn10_funcs = {
2732 .log_hw_state = dcn10_log_hw_state, 2693 .log_hw_state = dcn10_log_hw_state,
2733 .get_hw_state = dcn10_get_hw_state, 2694 .get_hw_state = dcn10_get_hw_state,
2734 .wait_for_mpcc_disconnect = dcn10_wait_for_mpcc_disconnect, 2695 .wait_for_mpcc_disconnect = dcn10_wait_for_mpcc_disconnect,
2735 .ready_shared_resources = ready_shared_resources,
2736 .optimize_shared_resources = optimize_shared_resources,
2737 .pplib_apply_display_requirements =
2738 dcn10_pplib_apply_display_requirements,
2739 .edp_backlight_control = hwss_edp_backlight_control, 2696 .edp_backlight_control = hwss_edp_backlight_control,
2740 .edp_power_control = hwss_edp_power_control, 2697 .edp_power_control = hwss_edp_power_control,
2741 .edp_wait_for_hpd_ready = hwss_edp_wait_for_hpd_ready, 2698 .edp_wait_for_hpd_ready = hwss_edp_wait_for_hpd_ready,
diff --git a/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c b/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c
index a71453a15ae3..6227db6b8abc 100644
--- a/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c
+++ b/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c
@@ -40,7 +40,7 @@
40#include "dcn10/dcn10_opp.h" 40#include "dcn10/dcn10_opp.h"
41#include "dcn10/dcn10_link_encoder.h" 41#include "dcn10/dcn10_link_encoder.h"
42#include "dcn10/dcn10_stream_encoder.h" 42#include "dcn10/dcn10_stream_encoder.h"
43#include "dce/dce_clocks.h" 43#include "../dce/dce_clocks.h"
44#include "dce/dce_clock_source.h" 44#include "dce/dce_clock_source.h"
45#include "dce/dce_audio.h" 45#include "dce/dce_audio.h"
46#include "dce/dce_hwseq.h" 46#include "dce/dce_hwseq.h"
diff --git a/drivers/gpu/drm/amd/display/dc/inc/core_types.h b/drivers/gpu/drm/amd/display/dc/inc/core_types.h
index c1976c175b57..40b8bc92e698 100644
--- a/drivers/gpu/drm/amd/display/dc/inc/core_types.h
+++ b/drivers/gpu/drm/amd/display/dc/inc/core_types.h
@@ -287,7 +287,7 @@ struct dc_state {
287 struct dcn_bw_internal_vars dcn_bw_vars; 287 struct dcn_bw_internal_vars dcn_bw_vars;
288#endif 288#endif
289 289
290 struct dccg *dis_clk; 290 struct dccg *dccg;
291 291
292 struct kref refcount; 292 struct kref refcount;
293}; 293};
diff --git a/drivers/gpu/drm/amd/display/dc/inc/hw/display_clock.h b/drivers/gpu/drm/amd/display/dc/inc/hw/display_clock.h
index 689faa16c0ae..14eb0e420e76 100644
--- a/drivers/gpu/drm/amd/display/dc/inc/hw/display_clock.h
+++ b/drivers/gpu/drm/amd/display/dc/inc/hw/display_clock.h
@@ -38,26 +38,19 @@ struct state_dependent_clocks {
38 38
39struct dccg { 39struct dccg {
40 struct dc_context *ctx; 40 struct dc_context *ctx;
41 const struct display_clock_funcs *funcs; 41 const struct dccg_funcs *funcs;
42 42
43 enum dm_pp_clocks_state max_clks_state; 43 enum dm_pp_clocks_state max_clks_state;
44 enum dm_pp_clocks_state cur_min_clks_state; 44 enum dm_pp_clocks_state cur_min_clks_state;
45 struct dc_clocks clks; 45 struct dc_clocks clks;
46}; 46};
47 47
48struct display_clock_funcs { 48struct dccg_funcs {
49 void (*update_clocks)(struct dccg *dccg, 49 void (*update_clocks)(struct dccg *dccg,
50 struct dc_clocks *new_clocks, 50 struct dc_state *context,
51 bool safe_to_lower); 51 bool safe_to_lower);
52 int (*set_dispclk)(struct dccg *dccg,
53 int requested_clock_khz);
54 52
55 int (*get_dp_ref_clk_frequency)(struct dccg *dccg); 53 int (*get_dp_ref_clk_frequency)(struct dccg *dccg);
56
57 bool (*update_dfs_bypass)(struct dccg *dccg,
58 struct dc *dc,
59 struct dc_state *context,
60 int requested_clock_khz);
61}; 54};
62 55
63#endif /* __DISPLAY_CLOCK_H__ */ 56#endif /* __DISPLAY_CLOCK_H__ */
diff --git a/drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h b/drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h
index 26f29d5da3d8..c673d3ef67cc 100644
--- a/drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h
+++ b/drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h
@@ -210,11 +210,6 @@ struct hw_sequencer_funcs {
210 struct resource_pool *res_pool, 210 struct resource_pool *res_pool,
211 struct pipe_ctx *pipe_ctx); 211 struct pipe_ctx *pipe_ctx);
212 212
213 void (*ready_shared_resources)(struct dc *dc, struct dc_state *context);
214 void (*optimize_shared_resources)(struct dc *dc);
215 void (*pplib_apply_display_requirements)(
216 struct dc *dc,
217 struct dc_state *context);
218 void (*edp_power_control)( 213 void (*edp_power_control)(
219 struct dc_link *link, 214 struct dc_link *link,
220 bool enable); 215 bool enable);
diff --git a/drivers/gpu/drm/amd/display/dc/inc/resource.h b/drivers/gpu/drm/amd/display/dc/inc/resource.h
index 33b99e3ab10d..0086a2f1d21a 100644
--- a/drivers/gpu/drm/amd/display/dc/inc/resource.h
+++ b/drivers/gpu/drm/amd/display/dc/inc/resource.h
@@ -30,9 +30,6 @@
30#include "dal_asic_id.h" 30#include "dal_asic_id.h"
31#include "dm_pp_smu.h" 31#include "dm_pp_smu.h"
32 32
33/* TODO unhardcode, 4 for CZ*/
34#define MEMORY_TYPE_MULTIPLIER 4
35
36enum dce_version resource_parse_asic_id( 33enum dce_version resource_parse_asic_id(
37 struct hw_asic_id asic_id); 34 struct hw_asic_id asic_id);
38 35