aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorArchit Taneja <architt@codeaurora.org>2018-01-17 01:05:26 -0500
committerRob Clark <robdclark@gmail.com>2018-02-20 10:41:20 -0500
commit28e4309ab9c2bade2a93bd3b4c583be5ec440b84 (patch)
treecc7eaf9d5c57928b4df308523af6fe7f0719752f
parent973e02db35c2c4036693e32ed6f250eefd8c322c (diff)
drm/msm/dsi: Populate PLL 10nm clock ops
Populate PLL clock ops from downstream. This contains the VCO PLL ops and the registration of standard clk_divider and clk_mux clocks. Unlike 14nm PLL, the postdividers/mux of the slave PLL doesn't need to be set to the same values of the postdivs/mux of the master PLL. Hence, we don't need special postdivider clock ops like we did with the 14nm PLL driver. Like the previous PLL drivers, the implementation is slightly different from downstream. We don't use shadow clocks, but have the ability to reparent the RCGs to a different source. Signed-off-by: Archit Taneja <architt@codeaurora.org> Signed-off-by: Rob Clark <robdclark@gmail.com>
-rw-r--r--drivers/gpu/drm/msm/dsi/pll/dsi_pll_10nm.c662
1 files changed, 654 insertions, 8 deletions
diff --git a/drivers/gpu/drm/msm/dsi/pll/dsi_pll_10nm.c b/drivers/gpu/drm/msm/dsi/pll/dsi_pll_10nm.c
index 34c24442d34b..c4c37a7df637 100644
--- a/drivers/gpu/drm/msm/dsi/pll/dsi_pll_10nm.c
+++ b/drivers/gpu/drm/msm/dsi/pll/dsi_pll_10nm.c
@@ -10,6 +10,78 @@
10#include "dsi_pll.h" 10#include "dsi_pll.h"
11#include "dsi.xml.h" 11#include "dsi.xml.h"
12 12
13/*
14 * DSI PLL 10nm - clock diagram (eg: DSI0):
15 *
16 * dsi0_pll_out_div_clk dsi0_pll_bit_clk
17 * | |
18 * | |
19 * +---------+ | +----------+ | +----+
20 * dsi0vco_clk ---| out_div |--o--| divl_3_0 |--o--| /8 |-- dsi0pllbyte
21 * +---------+ | +----------+ | +----+
22 * | |
23 * | | dsi0_pll_by_2_bit_clk
24 * | | |
25 * | | +----+ | |\ dsi0_pclk_mux
26 * | |--| /2 |--o--| \ |
27 * | | +----+ | \ | +---------+
28 * | --------------| |--o--| div_7_4 |-- dsi0pll
29 * |------------------------------| / +---------+
30 * | +-----+ | /
31 * -----------| /4? |--o----------|/
32 * +-----+ | |
33 * | |dsiclk_sel
34 * |
35 * dsi0_pll_post_out_div_clk
36 */
37
38#define DSI_BYTE_PLL_CLK 0
39#define DSI_PIXEL_PLL_CLK 1
40#define NUM_PROVIDED_CLKS 2
41
42struct dsi_pll_regs {
43 u32 pll_prop_gain_rate;
44 u32 pll_lockdet_rate;
45 u32 decimal_div_start;
46 u32 frac_div_start_low;
47 u32 frac_div_start_mid;
48 u32 frac_div_start_high;
49 u32 pll_clock_inverters;
50 u32 ssc_stepsize_low;
51 u32 ssc_stepsize_high;
52 u32 ssc_div_per_low;
53 u32 ssc_div_per_high;
54 u32 ssc_adjper_low;
55 u32 ssc_adjper_high;
56 u32 ssc_control;
57};
58
59struct dsi_pll_config {
60 u32 ref_freq;
61 bool div_override;
62 u32 output_div;
63 bool ignore_frac;
64 bool disable_prescaler;
65 bool enable_ssc;
66 bool ssc_center;
67 u32 dec_bits;
68 u32 frac_bits;
69 u32 lock_timer;
70 u32 ssc_freq;
71 u32 ssc_offset;
72 u32 ssc_adj_per;
73 u32 thresh_cycles;
74 u32 refclk_cycles;
75};
76
77struct pll_10nm_cached_state {
78 unsigned long vco_rate;
79 u8 bit_clk_div;
80 u8 pix_clk_div;
81 u8 pll_out_div;
82 u8 pll_mux;
83};
84
13struct dsi_pll_10nm { 85struct dsi_pll_10nm {
14 struct msm_dsi_pll base; 86 struct msm_dsi_pll base;
15 87
@@ -19,7 +91,24 @@ struct dsi_pll_10nm {
19 void __iomem *phy_cmn_mmio; 91 void __iomem *phy_cmn_mmio;
20 void __iomem *mmio; 92 void __iomem *mmio;
21 93
94 u64 vco_ref_clk_rate;
95 u64 vco_current_rate;
96
97 /* protects REG_DSI_10nm_PHY_CMN_CLK_CFG0 register */
98 spinlock_t postdiv_lock;
99
22 int vco_delay; 100 int vco_delay;
101 struct dsi_pll_config pll_configuration;
102 struct dsi_pll_regs reg_setup;
103
104 /* private clocks: */
105 struct clk_hw *hws[NUM_DSI_CLOCKS_MAX];
106 u32 num_hws;
107
108 /* clock-provider: */
109 struct clk_hw_onecell_data *hw_data;
110
111 struct pll_10nm_cached_state cached_state;
23 112
24 enum msm_dsi_phy_usecase uc; 113 enum msm_dsi_phy_usecase uc;
25 struct dsi_pll_10nm *slave; 114 struct dsi_pll_10nm *slave;
@@ -33,6 +122,190 @@ struct dsi_pll_10nm {
33 */ 122 */
34static struct dsi_pll_10nm *pll_10nm_list[DSI_MAX]; 123static struct dsi_pll_10nm *pll_10nm_list[DSI_MAX];
35 124
125static void dsi_pll_setup_config(struct dsi_pll_10nm *pll)
126{
127 struct dsi_pll_config *config = &pll->pll_configuration;
128
129 config->ref_freq = pll->vco_ref_clk_rate;
130 config->output_div = 1;
131 config->dec_bits = 8;
132 config->frac_bits = 18;
133 config->lock_timer = 64;
134 config->ssc_freq = 31500;
135 config->ssc_offset = 5000;
136 config->ssc_adj_per = 2;
137 config->thresh_cycles = 32;
138 config->refclk_cycles = 256;
139
140 config->div_override = false;
141 config->ignore_frac = false;
142 config->disable_prescaler = false;
143
144 config->enable_ssc = false;
145 config->ssc_center = 0;
146}
147
148static void dsi_pll_calc_dec_frac(struct dsi_pll_10nm *pll)
149{
150 struct dsi_pll_config *config = &pll->pll_configuration;
151 struct dsi_pll_regs *regs = &pll->reg_setup;
152 u64 fref = pll->vco_ref_clk_rate;
153 u64 pll_freq;
154 u64 divider;
155 u64 dec, dec_multiple;
156 u32 frac;
157 u64 multiplier;
158
159 pll_freq = pll->vco_current_rate;
160
161 if (config->disable_prescaler)
162 divider = fref;
163 else
164 divider = fref * 2;
165
166 multiplier = 1 << config->frac_bits;
167 dec_multiple = div_u64(pll_freq * multiplier, divider);
168 div_u64_rem(dec_multiple, multiplier, &frac);
169
170 dec = div_u64(dec_multiple, multiplier);
171
172 if (pll_freq <= 1900000000UL)
173 regs->pll_prop_gain_rate = 8;
174 else if (pll_freq <= 3000000000UL)
175 regs->pll_prop_gain_rate = 10;
176 else
177 regs->pll_prop_gain_rate = 12;
178 if (pll_freq < 1100000000UL)
179 regs->pll_clock_inverters = 8;
180 else
181 regs->pll_clock_inverters = 0;
182
183 regs->pll_lockdet_rate = config->lock_timer;
184 regs->decimal_div_start = dec;
185 regs->frac_div_start_low = (frac & 0xff);
186 regs->frac_div_start_mid = (frac & 0xff00) >> 8;
187 regs->frac_div_start_high = (frac & 0x30000) >> 16;
188}
189
190#define SSC_CENTER BIT(0)
191#define SSC_EN BIT(1)
192
193static void dsi_pll_calc_ssc(struct dsi_pll_10nm *pll)
194{
195 struct dsi_pll_config *config = &pll->pll_configuration;
196 struct dsi_pll_regs *regs = &pll->reg_setup;
197 u32 ssc_per;
198 u32 ssc_mod;
199 u64 ssc_step_size;
200 u64 frac;
201
202 if (!config->enable_ssc) {
203 DBG("SSC not enabled\n");
204 return;
205 }
206
207 ssc_per = DIV_ROUND_CLOSEST(config->ref_freq, config->ssc_freq) / 2 - 1;
208 ssc_mod = (ssc_per + 1) % (config->ssc_adj_per + 1);
209 ssc_per -= ssc_mod;
210
211 frac = regs->frac_div_start_low |
212 (regs->frac_div_start_mid << 8) |
213 (regs->frac_div_start_high << 16);
214 ssc_step_size = regs->decimal_div_start;
215 ssc_step_size *= (1 << config->frac_bits);
216 ssc_step_size += frac;
217 ssc_step_size *= config->ssc_offset;
218 ssc_step_size *= (config->ssc_adj_per + 1);
219 ssc_step_size = div_u64(ssc_step_size, (ssc_per + 1));
220 ssc_step_size = DIV_ROUND_CLOSEST_ULL(ssc_step_size, 1000000);
221
222 regs->ssc_div_per_low = ssc_per & 0xFF;
223 regs->ssc_div_per_high = (ssc_per & 0xFF00) >> 8;
224 regs->ssc_stepsize_low = (u32)(ssc_step_size & 0xFF);
225 regs->ssc_stepsize_high = (u32)((ssc_step_size & 0xFF00) >> 8);
226 regs->ssc_adjper_low = config->ssc_adj_per & 0xFF;
227 regs->ssc_adjper_high = (config->ssc_adj_per & 0xFF00) >> 8;
228
229 regs->ssc_control = config->ssc_center ? SSC_CENTER : 0;
230
231 pr_debug("SCC: Dec:%d, frac:%llu, frac_bits:%d\n",
232 regs->decimal_div_start, frac, config->frac_bits);
233 pr_debug("SSC: div_per:0x%X, stepsize:0x%X, adjper:0x%X\n",
234 ssc_per, (u32)ssc_step_size, config->ssc_adj_per);
235}
236
237static void dsi_pll_ssc_commit(struct dsi_pll_10nm *pll)
238{
239 void __iomem *base = pll->mmio;
240 struct dsi_pll_regs *regs = &pll->reg_setup;
241
242 if (pll->pll_configuration.enable_ssc) {
243 pr_debug("SSC is enabled\n");
244
245 pll_write(base + REG_DSI_10nm_PHY_PLL_SSC_STEPSIZE_LOW_1,
246 regs->ssc_stepsize_low);
247 pll_write(base + REG_DSI_10nm_PHY_PLL_SSC_STEPSIZE_HIGH_1,
248 regs->ssc_stepsize_high);
249 pll_write(base + REG_DSI_10nm_PHY_PLL_SSC_DIV_PER_LOW_1,
250 regs->ssc_div_per_low);
251 pll_write(base + REG_DSI_10nm_PHY_PLL_SSC_DIV_PER_HIGH_1,
252 regs->ssc_div_per_high);
253 pll_write(base + REG_DSI_10nm_PHY_PLL_SSC_DIV_ADJPER_LOW_1,
254 regs->ssc_adjper_low);
255 pll_write(base + REG_DSI_10nm_PHY_PLL_SSC_DIV_ADJPER_HIGH_1,
256 regs->ssc_adjper_high);
257 pll_write(base + REG_DSI_10nm_PHY_PLL_SSC_CONTROL,
258 SSC_EN | regs->ssc_control);
259 }
260}
261
262static void dsi_pll_config_hzindep_reg(struct dsi_pll_10nm *pll)
263{
264 void __iomem *base = pll->mmio;
265
266 pll_write(base + REG_DSI_10nm_PHY_PLL_ANALOG_CONTROLS_ONE, 0x80);
267 pll_write(base + REG_DSI_10nm_PHY_PLL_ANALOG_CONTROLS_TWO, 0x03);
268 pll_write(base + REG_DSI_10nm_PHY_PLL_ANALOG_CONTROLS_THREE, 0x00);
269 pll_write(base + REG_DSI_10nm_PHY_PLL_DSM_DIVIDER, 0x00);
270 pll_write(base + REG_DSI_10nm_PHY_PLL_FEEDBACK_DIVIDER, 0x4e);
271 pll_write(base + REG_DSI_10nm_PHY_PLL_CALIBRATION_SETTINGS, 0x40);
272 pll_write(base + REG_DSI_10nm_PHY_PLL_BAND_SEL_CAL_SETTINGS_THREE,
273 0xba);
274 pll_write(base + REG_DSI_10nm_PHY_PLL_FREQ_DETECT_SETTINGS_ONE, 0x0c);
275 pll_write(base + REG_DSI_10nm_PHY_PLL_OUTDIV, 0x00);
276 pll_write(base + REG_DSI_10nm_PHY_PLL_CORE_OVERRIDE, 0x00);
277 pll_write(base + REG_DSI_10nm_PHY_PLL_PLL_DIGITAL_TIMERS_TWO, 0x08);
278 pll_write(base + REG_DSI_10nm_PHY_PLL_PLL_PROP_GAIN_RATE_1, 0x08);
279 pll_write(base + REG_DSI_10nm_PHY_PLL_PLL_BAND_SET_RATE_1, 0xc0);
280 pll_write(base + REG_DSI_10nm_PHY_PLL_PLL_INT_GAIN_IFILT_BAND_1, 0xfa);
281 pll_write(base + REG_DSI_10nm_PHY_PLL_PLL_FL_INT_GAIN_PFILT_BAND_1,
282 0x4c);
283 pll_write(base + REG_DSI_10nm_PHY_PLL_PLL_LOCK_OVERRIDE, 0x80);
284 pll_write(base + REG_DSI_10nm_PHY_PLL_PFILT, 0x29);
285 pll_write(base + REG_DSI_10nm_PHY_PLL_IFILT, 0x3f);
286}
287
288static void dsi_pll_commit(struct dsi_pll_10nm *pll)
289{
290 void __iomem *base = pll->mmio;
291 struct dsi_pll_regs *reg = &pll->reg_setup;
292
293 pll_write(base + REG_DSI_10nm_PHY_PLL_CORE_INPUT_OVERRIDE, 0x12);
294 pll_write(base + REG_DSI_10nm_PHY_PLL_DECIMAL_DIV_START_1,
295 reg->decimal_div_start);
296 pll_write(base + REG_DSI_10nm_PHY_PLL_FRAC_DIV_START_LOW_1,
297 reg->frac_div_start_low);
298 pll_write(base + REG_DSI_10nm_PHY_PLL_FRAC_DIV_START_MID_1,
299 reg->frac_div_start_mid);
300 pll_write(base + REG_DSI_10nm_PHY_PLL_FRAC_DIV_START_HIGH_1,
301 reg->frac_div_start_high);
302 pll_write(base + REG_DSI_10nm_PHY_PLL_PLL_LOCKDET_RATE_1, 0x40);
303 pll_write(base + REG_DSI_10nm_PHY_PLL_PLL_LOCK_DELAY, 0x06);
304 pll_write(base + REG_DSI_10nm_PHY_PLL_CMODE, 0x10);
305 pll_write(base + REG_DSI_10nm_PHY_PLL_CLOCK_INVERTERS,
306 reg->pll_clock_inverters);
307}
308
36static int dsi_pll_10nm_vco_set_rate(struct clk_hw *hw, unsigned long rate, 309static int dsi_pll_10nm_vco_set_rate(struct clk_hw *hw, unsigned long rate,
37 unsigned long parent_rate) 310 unsigned long parent_rate)
38{ 311{
@@ -42,18 +315,192 @@ static int dsi_pll_10nm_vco_set_rate(struct clk_hw *hw, unsigned long rate,
42 DBG("DSI PLL%d rate=%lu, parent's=%lu", pll_10nm->id, rate, 315 DBG("DSI PLL%d rate=%lu, parent's=%lu", pll_10nm->id, rate,
43 parent_rate); 316 parent_rate);
44 317
318 pll_10nm->vco_current_rate = rate;
319 pll_10nm->vco_ref_clk_rate = parent_rate;
320
321 dsi_pll_setup_config(pll_10nm);
322
323 dsi_pll_calc_dec_frac(pll_10nm);
324
325 dsi_pll_calc_ssc(pll_10nm);
326
327 dsi_pll_commit(pll_10nm);
328
329 dsi_pll_config_hzindep_reg(pll_10nm);
330
331 dsi_pll_ssc_commit(pll_10nm);
332
333 /* flush, ensure all register writes are done*/
334 wmb();
335
45 return 0; 336 return 0;
46} 337}
47 338
339static int dsi_pll_10nm_lock_status(struct dsi_pll_10nm *pll)
340{
341 int rc;
342 u32 status = 0;
343 u32 const delay_us = 100;
344 u32 const timeout_us = 5000;
345
346 rc = readl_poll_timeout_atomic(pll->mmio +
347 REG_DSI_10nm_PHY_PLL_COMMON_STATUS_ONE,
348 status,
349 ((status & BIT(0)) > 0),
350 delay_us,
351 timeout_us);
352 if (rc)
353 pr_err("DSI PLL(%d) lock failed, status=0x%08x\n",
354 pll->id, status);
355
356 return rc;
357}
358
359static void dsi_pll_disable_pll_bias(struct dsi_pll_10nm *pll)
360{
361 u32 data = pll_read(pll->phy_cmn_mmio + REG_DSI_10nm_PHY_CMN_CTRL_0);
362
363 pll_write(pll->mmio + REG_DSI_10nm_PHY_PLL_SYSTEM_MUXES, 0);
364 pll_write(pll->phy_cmn_mmio + REG_DSI_10nm_PHY_CMN_CTRL_0,
365 data & ~BIT(5));
366 ndelay(250);
367}
368
369static void dsi_pll_enable_pll_bias(struct dsi_pll_10nm *pll)
370{
371 u32 data = pll_read(pll->phy_cmn_mmio + REG_DSI_10nm_PHY_CMN_CTRL_0);
372
373 pll_write(pll->phy_cmn_mmio + REG_DSI_10nm_PHY_CMN_CTRL_0,
374 data | BIT(5));
375 pll_write(pll->mmio + REG_DSI_10nm_PHY_PLL_SYSTEM_MUXES, 0xc0);
376 ndelay(250);
377}
378
379static void dsi_pll_disable_global_clk(struct dsi_pll_10nm *pll)
380{
381 u32 data;
382
383 data = pll_read(pll->phy_cmn_mmio + REG_DSI_10nm_PHY_CMN_CLK_CFG1);
384 pll_write(pll->phy_cmn_mmio + REG_DSI_10nm_PHY_CMN_CLK_CFG1,
385 data & ~BIT(5));
386}
387
388static void dsi_pll_enable_global_clk(struct dsi_pll_10nm *pll)
389{
390 u32 data;
391
392 data = pll_read(pll->phy_cmn_mmio + REG_DSI_10nm_PHY_CMN_CLK_CFG1);
393 pll_write(pll->phy_cmn_mmio + REG_DSI_10nm_PHY_CMN_CLK_CFG1,
394 data | BIT(5));
395}
396
397static int dsi_pll_10nm_vco_prepare(struct clk_hw *hw)
398{
399 struct msm_dsi_pll *pll = hw_clk_to_pll(hw);
400 struct dsi_pll_10nm *pll_10nm = to_pll_10nm(pll);
401 int rc;
402
403 dsi_pll_enable_pll_bias(pll_10nm);
404 if (pll_10nm->slave)
405 dsi_pll_enable_pll_bias(pll_10nm->slave);
406
407 /* Start PLL */
408 pll_write(pll_10nm->phy_cmn_mmio + REG_DSI_10nm_PHY_CMN_PLL_CNTRL,
409 0x01);
410
411 /*
412 * ensure all PLL configurations are written prior to checking
413 * for PLL lock.
414 */
415 wmb();
416
417 /* Check for PLL lock */
418 rc = dsi_pll_10nm_lock_status(pll_10nm);
419 if (rc) {
420 pr_err("PLL(%d) lock failed\n", pll_10nm->id);
421 goto error;
422 }
423
424 pll->pll_on = true;
425
426 dsi_pll_enable_global_clk(pll_10nm);
427 if (pll_10nm->slave)
428 dsi_pll_enable_global_clk(pll_10nm->slave);
429
430 pll_write(pll_10nm->phy_cmn_mmio + REG_DSI_10nm_PHY_CMN_RBUF_CTRL,
431 0x01);
432 if (pll_10nm->slave)
433 pll_write(pll_10nm->slave->phy_cmn_mmio +
434 REG_DSI_10nm_PHY_CMN_RBUF_CTRL, 0x01);
435
436error:
437 return rc;
438}
439
440static void dsi_pll_disable_sub(struct dsi_pll_10nm *pll)
441{
442 pll_write(pll->phy_cmn_mmio + REG_DSI_10nm_PHY_CMN_RBUF_CTRL, 0);
443 dsi_pll_disable_pll_bias(pll);
444}
445
446static void dsi_pll_10nm_vco_unprepare(struct clk_hw *hw)
447{
448 struct msm_dsi_pll *pll = hw_clk_to_pll(hw);
449 struct dsi_pll_10nm *pll_10nm = to_pll_10nm(pll);
450
451 /*
452 * To avoid any stray glitches while abruptly powering down the PLL
453 * make sure to gate the clock using the clock enable bit before
454 * powering down the PLL
455 */
456 dsi_pll_disable_global_clk(pll_10nm);
457 pll_write(pll_10nm->phy_cmn_mmio + REG_DSI_10nm_PHY_CMN_PLL_CNTRL, 0);
458 dsi_pll_disable_sub(pll_10nm);
459 if (pll_10nm->slave) {
460 dsi_pll_disable_global_clk(pll_10nm->slave);
461 dsi_pll_disable_sub(pll_10nm->slave);
462 }
463 /* flush, ensure all register writes are done */
464 wmb();
465 pll->pll_on = false;
466}
467
48static unsigned long dsi_pll_10nm_vco_recalc_rate(struct clk_hw *hw, 468static unsigned long dsi_pll_10nm_vco_recalc_rate(struct clk_hw *hw,
49 unsigned long parent_rate) 469 unsigned long parent_rate)
50{ 470{
51 struct msm_dsi_pll *pll = hw_clk_to_pll(hw); 471 struct msm_dsi_pll *pll = hw_clk_to_pll(hw);
52 struct dsi_pll_10nm *pll_10nm = to_pll_10nm(pll); 472 struct dsi_pll_10nm *pll_10nm = to_pll_10nm(pll);
473 void __iomem *base = pll_10nm->mmio;
474 u64 ref_clk = pll_10nm->vco_ref_clk_rate;
53 u64 vco_rate = 0x0; 475 u64 vco_rate = 0x0;
54 476 u64 multiplier;
55 DBG("DSI PLL%d returning vco rate = %lu", pll_10nm->id, 477 u32 frac;
56 (unsigned long)vco_rate); 478 u32 dec;
479 u64 pll_freq, tmp64;
480
481 dec = pll_read(base + REG_DSI_10nm_PHY_PLL_DECIMAL_DIV_START_1);
482 dec &= 0xff;
483
484 frac = pll_read(base + REG_DSI_10nm_PHY_PLL_FRAC_DIV_START_LOW_1);
485 frac |= ((pll_read(base + REG_DSI_10nm_PHY_PLL_FRAC_DIV_START_MID_1) &
486 0xff) << 8);
487 frac |= ((pll_read(base + REG_DSI_10nm_PHY_PLL_FRAC_DIV_START_HIGH_1) &
488 0x3) << 16);
489
490 /*
491 * TODO:
492 * 1. Assumes prescaler is disabled
493 * 2. Multiplier is 2^18. it should be 2^(num_of_frac_bits)
494 */
495 multiplier = 1 << 18;
496 pll_freq = dec * (ref_clk * 2);
497 tmp64 = (ref_clk * 2 * frac);
498 pll_freq += div_u64(tmp64, multiplier);
499
500 vco_rate = pll_freq;
501
502 DBG("DSI PLL%d returning vco rate = %lu, dec = %x, frac = %x",
503 pll_10nm->id, (unsigned long)vco_rate, dec, frac);
57 504
58 return (unsigned long)vco_rate; 505 return (unsigned long)vco_rate;
59} 506}
@@ -62,8 +509,8 @@ static const struct clk_ops clk_ops_dsi_pll_10nm_vco = {
62 .round_rate = msm_dsi_pll_helper_clk_round_rate, 509 .round_rate = msm_dsi_pll_helper_clk_round_rate,
63 .set_rate = dsi_pll_10nm_vco_set_rate, 510 .set_rate = dsi_pll_10nm_vco_set_rate,
64 .recalc_rate = dsi_pll_10nm_vco_recalc_rate, 511 .recalc_rate = dsi_pll_10nm_vco_recalc_rate,
65 .prepare = msm_dsi_pll_helper_clk_prepare, 512 .prepare = dsi_pll_10nm_vco_prepare,
66 .unprepare = msm_dsi_pll_helper_clk_unprepare, 513 .unprepare = dsi_pll_10nm_vco_unprepare,
67}; 514};
68 515
69/* 516/*
@@ -73,13 +520,45 @@ static const struct clk_ops clk_ops_dsi_pll_10nm_vco = {
73static void dsi_pll_10nm_save_state(struct msm_dsi_pll *pll) 520static void dsi_pll_10nm_save_state(struct msm_dsi_pll *pll)
74{ 521{
75 struct dsi_pll_10nm *pll_10nm = to_pll_10nm(pll); 522 struct dsi_pll_10nm *pll_10nm = to_pll_10nm(pll);
523 struct pll_10nm_cached_state *cached = &pll_10nm->cached_state;
524 void __iomem *phy_base = pll_10nm->phy_cmn_mmio;
525 u32 cmn_clk_cfg0, cmn_clk_cfg1;
76 526
77 DBG("DSI PLL%d", pll_10nm->id); 527 cached->pll_out_div = pll_read(pll_10nm->mmio +
528 REG_DSI_10nm_PHY_PLL_PLL_OUTDIV_RATE);
529 cached->pll_out_div &= 0x3;
530
531 cmn_clk_cfg0 = pll_read(phy_base + REG_DSI_10nm_PHY_CMN_CLK_CFG0);
532 cached->bit_clk_div = cmn_clk_cfg0 & 0xf;
533 cached->pix_clk_div = (cmn_clk_cfg0 & 0xf0) >> 4;
534
535 cmn_clk_cfg1 = pll_read(phy_base + REG_DSI_10nm_PHY_CMN_CLK_CFG1);
536 cached->pll_mux = cmn_clk_cfg1 & 0x3;
537
538 DBG("DSI PLL%d outdiv %x bit_clk_div %x pix_clk_div %x pll_mux %x",
539 pll_10nm->id, cached->pll_out_div, cached->bit_clk_div,
540 cached->pix_clk_div, cached->pll_mux);
78} 541}
79 542
80static int dsi_pll_10nm_restore_state(struct msm_dsi_pll *pll) 543static int dsi_pll_10nm_restore_state(struct msm_dsi_pll *pll)
81{ 544{
82 struct dsi_pll_10nm *pll_10nm = to_pll_10nm(pll); 545 struct dsi_pll_10nm *pll_10nm = to_pll_10nm(pll);
546 struct pll_10nm_cached_state *cached = &pll_10nm->cached_state;
547 void __iomem *phy_base = pll_10nm->phy_cmn_mmio;
548 u32 val;
549
550 val = pll_read(pll_10nm->mmio + REG_DSI_10nm_PHY_PLL_PLL_OUTDIV_RATE);
551 val &= ~0x3;
552 val |= cached->pll_out_div;
553 pll_write(pll_10nm->mmio + REG_DSI_10nm_PHY_PLL_PLL_OUTDIV_RATE, val);
554
555 pll_write(phy_base + REG_DSI_10nm_PHY_CMN_CLK_CFG0,
556 cached->bit_clk_div | (cached->pix_clk_div << 4));
557
558 val = pll_read(phy_base + REG_DSI_10nm_PHY_CMN_CLK_CFG1);
559 val &= ~0x3;
560 val |= cached->pll_mux;
561 pll_write(phy_base + REG_DSI_10nm_PHY_CMN_CLK_CFG1, val);
83 562
84 DBG("DSI PLL%d", pll_10nm->id); 563 DBG("DSI PLL%d", pll_10nm->id);
85 564
@@ -90,9 +569,29 @@ static int dsi_pll_10nm_set_usecase(struct msm_dsi_pll *pll,
90 enum msm_dsi_phy_usecase uc) 569 enum msm_dsi_phy_usecase uc)
91{ 570{
92 struct dsi_pll_10nm *pll_10nm = to_pll_10nm(pll); 571 struct dsi_pll_10nm *pll_10nm = to_pll_10nm(pll);
572 void __iomem *base = pll_10nm->phy_cmn_mmio;
573 u32 data = 0x0; /* internal PLL */
93 574
94 DBG("DSI PLL%d", pll_10nm->id); 575 DBG("DSI PLL%d", pll_10nm->id);
95 576
577 switch (uc) {
578 case MSM_DSI_PHY_STANDALONE:
579 break;
580 case MSM_DSI_PHY_MASTER:
581 pll_10nm->slave = pll_10nm_list[(pll_10nm->id + 1) % DSI_MAX];
582 break;
583 case MSM_DSI_PHY_SLAVE:
584 data = 0x1; /* external PLL */
585 break;
586 default:
587 return -EINVAL;
588 }
589
590 /* set PLL src */
591 pll_write(base + REG_DSI_10nm_PHY_CMN_CLK_CFG1, (data << 2));
592
593 pll_10nm->uc = uc;
594
96 return 0; 595 return 0;
97} 596}
98 597
@@ -101,13 +600,14 @@ static int dsi_pll_10nm_get_provider(struct msm_dsi_pll *pll,
101 struct clk **pixel_clk_provider) 600 struct clk **pixel_clk_provider)
102{ 601{
103 struct dsi_pll_10nm *pll_10nm = to_pll_10nm(pll); 602 struct dsi_pll_10nm *pll_10nm = to_pll_10nm(pll);
603 struct clk_hw_onecell_data *hw_data = pll_10nm->hw_data;
104 604
105 DBG("DSI PLL%d", pll_10nm->id); 605 DBG("DSI PLL%d", pll_10nm->id);
106 606
107 if (byte_clk_provider) 607 if (byte_clk_provider)
108 *byte_clk_provider = NULL; 608 *byte_clk_provider = hw_data->hws[DSI_BYTE_PLL_CLK]->clk;
109 if (pixel_clk_provider) 609 if (pixel_clk_provider)
110 *pixel_clk_provider = NULL; 610 *pixel_clk_provider = hw_data->hws[DSI_PIXEL_PLL_CLK]->clk;
111 611
112 return 0; 612 return 0;
113} 613}
@@ -119,8 +619,151 @@ static void dsi_pll_10nm_destroy(struct msm_dsi_pll *pll)
119 DBG("DSI PLL%d", pll_10nm->id); 619 DBG("DSI PLL%d", pll_10nm->id);
120} 620}
121 621
622/*
623 * The post dividers and mux clocks are created using the standard divider and
624 * mux API. Unlike the 14nm PHY, the slave PLL doesn't need its dividers/mux
625 * state to follow the master PLL's divider/mux state. Therefore, we don't
626 * require special clock ops that also configure the slave PLL registers
627 */
122static int pll_10nm_register(struct dsi_pll_10nm *pll_10nm) 628static int pll_10nm_register(struct dsi_pll_10nm *pll_10nm)
123{ 629{
630 char clk_name[32], parent[32], vco_name[32];
631 char parent2[32], parent3[32], parent4[32];
632 struct clk_init_data vco_init = {
633 .parent_names = (const char *[]){ "xo" },
634 .num_parents = 1,
635 .name = vco_name,
636 .flags = CLK_IGNORE_UNUSED,
637 .ops = &clk_ops_dsi_pll_10nm_vco,
638 };
639 struct device *dev = &pll_10nm->pdev->dev;
640 struct clk_hw **hws = pll_10nm->hws;
641 struct clk_hw_onecell_data *hw_data;
642 struct clk_hw *hw;
643 int num = 0;
644 int ret;
645
646 DBG("DSI%d", pll_10nm->id);
647
648 hw_data = devm_kzalloc(dev, sizeof(*hw_data) +
649 NUM_PROVIDED_CLKS * sizeof(struct clk_hw *),
650 GFP_KERNEL);
651 if (!hw_data)
652 return -ENOMEM;
653
654 snprintf(vco_name, 32, "dsi%dvco_clk", pll_10nm->id);
655 pll_10nm->base.clk_hw.init = &vco_init;
656
657 ret = clk_hw_register(dev, &pll_10nm->base.clk_hw);
658 if (ret)
659 return ret;
660
661 hws[num++] = &pll_10nm->base.clk_hw;
662
663 snprintf(clk_name, 32, "dsi%d_pll_out_div_clk", pll_10nm->id);
664 snprintf(parent, 32, "dsi%dvco_clk", pll_10nm->id);
665
666 hw = clk_hw_register_divider(dev, clk_name,
667 parent, CLK_SET_RATE_PARENT,
668 pll_10nm->mmio +
669 REG_DSI_10nm_PHY_PLL_PLL_OUTDIV_RATE,
670 0, 2, CLK_DIVIDER_POWER_OF_TWO, NULL);
671 if (IS_ERR(hw))
672 return PTR_ERR(hw);
673
674 hws[num++] = hw;
675
676 snprintf(clk_name, 32, "dsi%d_pll_bit_clk", pll_10nm->id);
677 snprintf(parent, 32, "dsi%d_pll_out_div_clk", pll_10nm->id);
678
679 /* BIT CLK: DIV_CTRL_3_0 */
680 hw = clk_hw_register_divider(dev, clk_name, parent,
681 CLK_SET_RATE_PARENT,
682 pll_10nm->phy_cmn_mmio +
683 REG_DSI_10nm_PHY_CMN_CLK_CFG0,
684 0, 4, CLK_DIVIDER_ONE_BASED,
685 &pll_10nm->postdiv_lock);
686 if (IS_ERR(hw))
687 return PTR_ERR(hw);
688
689 hws[num++] = hw;
690
691 snprintf(clk_name, 32, "dsi%dpllbyte", pll_10nm->id);
692 snprintf(parent, 32, "dsi%d_pll_bit_clk", pll_10nm->id);
693
694 /* DSI Byte clock = VCO_CLK / OUT_DIV / BIT_DIV / 8 */
695 hw = clk_hw_register_fixed_factor(dev, clk_name, parent,
696 CLK_SET_RATE_PARENT, 1, 8);
697 if (IS_ERR(hw))
698 return PTR_ERR(hw);
699
700 hws[num++] = hw;
701 hw_data->hws[DSI_BYTE_PLL_CLK] = hw;
702
703 snprintf(clk_name, 32, "dsi%d_pll_by_2_bit_clk", pll_10nm->id);
704 snprintf(parent, 32, "dsi%d_pll_bit_clk", pll_10nm->id);
705
706 hw = clk_hw_register_fixed_factor(dev, clk_name, parent,
707 0, 1, 2);
708 if (IS_ERR(hw))
709 return PTR_ERR(hw);
710
711 hws[num++] = hw;
712
713 snprintf(clk_name, 32, "dsi%d_pll_post_out_div_clk", pll_10nm->id);
714 snprintf(parent, 32, "dsi%d_pll_out_div_clk", pll_10nm->id);
715
716 hw = clk_hw_register_fixed_factor(dev, clk_name, parent,
717 0, 1, 4);
718 if (IS_ERR(hw))
719 return PTR_ERR(hw);
720
721 hws[num++] = hw;
722
723 snprintf(clk_name, 32, "dsi%d_pclk_mux", pll_10nm->id);
724 snprintf(parent, 32, "dsi%d_pll_bit_clk", pll_10nm->id);
725 snprintf(parent2, 32, "dsi%d_pll_by_2_bit_clk", pll_10nm->id);
726 snprintf(parent3, 32, "dsi%d_pll_out_div_clk", pll_10nm->id);
727 snprintf(parent4, 32, "dsi%d_pll_post_out_div_clk", pll_10nm->id);
728
729 hw = clk_hw_register_mux(dev, clk_name,
730 (const char *[]){
731 parent, parent2, parent3, parent4
732 }, 4, 0, pll_10nm->phy_cmn_mmio +
733 REG_DSI_10nm_PHY_CMN_CLK_CFG1,
734 0, 2, 0, NULL);
735 if (IS_ERR(hw))
736 return PTR_ERR(hw);
737
738 hws[num++] = hw;
739
740 snprintf(clk_name, 32, "dsi%dpll", pll_10nm->id);
741 snprintf(parent, 32, "dsi%d_pclk_mux", pll_10nm->id);
742
743 /* PIX CLK DIV : DIV_CTRL_7_4*/
744 hw = clk_hw_register_divider(dev, clk_name, parent,
745 0, pll_10nm->phy_cmn_mmio +
746 REG_DSI_10nm_PHY_CMN_CLK_CFG0,
747 4, 4, CLK_DIVIDER_ONE_BASED,
748 &pll_10nm->postdiv_lock);
749 if (IS_ERR(hw))
750 return PTR_ERR(hw);
751
752 hws[num++] = hw;
753 hw_data->hws[DSI_PIXEL_PLL_CLK] = hw;
754
755 pll_10nm->num_hws = num;
756
757 hw_data->num = NUM_PROVIDED_CLKS;
758 pll_10nm->hw_data = hw_data;
759
760 ret = of_clk_add_hw_provider(dev->of_node, of_clk_hw_onecell_get,
761 pll_10nm->hw_data);
762 if (ret) {
763 dev_err(dev, "failed to register clk provider: %d\n", ret);
764 return ret;
765 }
766
124 return 0; 767 return 0;
125} 768}
126 769
@@ -172,5 +815,8 @@ struct msm_dsi_pll *msm_dsi_pll_10nm_init(struct platform_device *pdev, int id)
172 return ERR_PTR(ret); 815 return ERR_PTR(ret);
173 } 816 }
174 817
818 /* TODO: Remove this when we have proper display handover support */
819 msm_dsi_pll_save_state(pll);
820
175 return pll; 821 return pll;
176} 822}