2 * SPDX-License-Identifier: GPL-2.0
3 * Copyright (c) 2018, The Linux Foundation
7 #include <linux/clk-provider.h>
8 #include <linux/iopoll.h>
14 * DSI PLL 10nm - clock diagram (eg: DSI0):
16 * dsi0_pll_out_div_clk dsi0_pll_bit_clk
19 * +---------+ | +----------+ | +----+
20 * dsi0vco_clk ---| out_div |--o--| divl_3_0 |--o--| /8 |-- dsi0_phy_pll_out_byteclk
21 * +---------+ | +----------+ | +----+
23 * | | dsi0_pll_by_2_bit_clk
25 * | | +----+ | |\ dsi0_pclk_mux
26 * | |--| /2 |--o--| \ |
27 * | | +----+ | \ | +---------+
28 * | --------------| |--o--| div_7_4 |-- dsi0_phy_pll_out_dsiclk
29 * |------------------------------| / +---------+
31 * -----------| /4? |--o----------|/
35 * dsi0_pll_post_out_div_clk
38 #define VCO_REF_CLK_RATE 19200000
41 u32 pll_prop_gain_rate;
43 u32 decimal_div_start;
44 u32 frac_div_start_low;
45 u32 frac_div_start_mid;
46 u32 frac_div_start_high;
47 u32 pll_clock_inverters;
49 u32 ssc_stepsize_high;
57 /* v3.0.0 10nm implementation that requires the old timings settings */
58 #define DSI_PHY_10NM_QUIRK_OLD_TIMINGS BIT(0)
60 struct dsi_pll_config {
65 bool disable_prescaler;
78 struct pll_10nm_cached_state {
79 unsigned long vco_rate;
90 struct platform_device *pdev;
92 struct msm_dsi_phy *phy;
97 /* protects REG_DSI_10nm_PHY_CMN_CLK_CFG0 register */
98 spinlock_t postdiv_lock;
100 struct dsi_pll_config pll_configuration;
101 struct dsi_pll_regs reg_setup;
103 struct pll_10nm_cached_state cached_state;
105 struct dsi_pll_10nm *slave;
108 #define to_pll_10nm(x) container_of(x, struct dsi_pll_10nm, clk_hw)
111 * Global list of private DSI PLL struct pointers. We need this for Dual DSI
112 * mode, where the master PLL's clk_ops needs access the slave's private data
114 static struct dsi_pll_10nm *pll_10nm_list[DSI_MAX];
116 static void dsi_pll_setup_config(struct dsi_pll_10nm *pll)
118 struct dsi_pll_config *config = &pll->pll_configuration;
120 config->ref_freq = pll->vco_ref_clk_rate;
121 config->output_div = 1;
122 config->dec_bits = 8;
123 config->frac_bits = 18;
124 config->lock_timer = 64;
125 config->ssc_freq = 31500;
126 config->ssc_offset = 5000;
127 config->ssc_adj_per = 2;
128 config->thresh_cycles = 32;
129 config->refclk_cycles = 256;
131 config->div_override = false;
132 config->ignore_frac = false;
133 config->disable_prescaler = false;
135 config->enable_ssc = false;
136 config->ssc_center = 0;
139 static void dsi_pll_calc_dec_frac(struct dsi_pll_10nm *pll)
141 struct dsi_pll_config *config = &pll->pll_configuration;
142 struct dsi_pll_regs *regs = &pll->reg_setup;
143 u64 fref = pll->vco_ref_clk_rate;
146 u64 dec, dec_multiple;
150 pll_freq = pll->vco_current_rate;
152 if (config->disable_prescaler)
157 multiplier = 1 << config->frac_bits;
158 dec_multiple = div_u64(pll_freq * multiplier, divider);
159 dec = div_u64_rem(dec_multiple, multiplier, &frac);
161 if (pll_freq <= 1900000000UL)
162 regs->pll_prop_gain_rate = 8;
163 else if (pll_freq <= 3000000000UL)
164 regs->pll_prop_gain_rate = 10;
166 regs->pll_prop_gain_rate = 12;
167 if (pll_freq < 1100000000UL)
168 regs->pll_clock_inverters = 8;
170 regs->pll_clock_inverters = 0;
172 regs->pll_lockdet_rate = config->lock_timer;
173 regs->decimal_div_start = dec;
174 regs->frac_div_start_low = (frac & 0xff);
175 regs->frac_div_start_mid = (frac & 0xff00) >> 8;
176 regs->frac_div_start_high = (frac & 0x30000) >> 16;
179 #define SSC_CENTER BIT(0)
180 #define SSC_EN BIT(1)
182 static void dsi_pll_calc_ssc(struct dsi_pll_10nm *pll)
184 struct dsi_pll_config *config = &pll->pll_configuration;
185 struct dsi_pll_regs *regs = &pll->reg_setup;
191 if (!config->enable_ssc) {
192 DBG("SSC not enabled\n");
196 ssc_per = DIV_ROUND_CLOSEST(config->ref_freq, config->ssc_freq) / 2 - 1;
197 ssc_mod = (ssc_per + 1) % (config->ssc_adj_per + 1);
200 frac = regs->frac_div_start_low |
201 (regs->frac_div_start_mid << 8) |
202 (regs->frac_div_start_high << 16);
203 ssc_step_size = regs->decimal_div_start;
204 ssc_step_size *= (1 << config->frac_bits);
205 ssc_step_size += frac;
206 ssc_step_size *= config->ssc_offset;
207 ssc_step_size *= (config->ssc_adj_per + 1);
208 ssc_step_size = div_u64(ssc_step_size, (ssc_per + 1));
209 ssc_step_size = DIV_ROUND_CLOSEST_ULL(ssc_step_size, 1000000);
211 regs->ssc_div_per_low = ssc_per & 0xFF;
212 regs->ssc_div_per_high = (ssc_per & 0xFF00) >> 8;
213 regs->ssc_stepsize_low = (u32)(ssc_step_size & 0xFF);
214 regs->ssc_stepsize_high = (u32)((ssc_step_size & 0xFF00) >> 8);
215 regs->ssc_adjper_low = config->ssc_adj_per & 0xFF;
216 regs->ssc_adjper_high = (config->ssc_adj_per & 0xFF00) >> 8;
218 regs->ssc_control = config->ssc_center ? SSC_CENTER : 0;
220 pr_debug("SCC: Dec:%d, frac:%llu, frac_bits:%d\n",
221 regs->decimal_div_start, frac, config->frac_bits);
222 pr_debug("SSC: div_per:0x%X, stepsize:0x%X, adjper:0x%X\n",
223 ssc_per, (u32)ssc_step_size, config->ssc_adj_per);
226 static void dsi_pll_ssc_commit(struct dsi_pll_10nm *pll)
228 void __iomem *base = pll->phy->pll_base;
229 struct dsi_pll_regs *regs = &pll->reg_setup;
231 if (pll->pll_configuration.enable_ssc) {
232 pr_debug("SSC is enabled\n");
234 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_SSC_STEPSIZE_LOW_1,
235 regs->ssc_stepsize_low);
236 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_SSC_STEPSIZE_HIGH_1,
237 regs->ssc_stepsize_high);
238 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_SSC_DIV_PER_LOW_1,
239 regs->ssc_div_per_low);
240 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_SSC_DIV_PER_HIGH_1,
241 regs->ssc_div_per_high);
242 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_SSC_DIV_ADJPER_LOW_1,
243 regs->ssc_adjper_low);
244 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_SSC_DIV_ADJPER_HIGH_1,
245 regs->ssc_adjper_high);
246 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_SSC_CONTROL,
247 SSC_EN | regs->ssc_control);
251 static void dsi_pll_config_hzindep_reg(struct dsi_pll_10nm *pll)
253 void __iomem *base = pll->phy->pll_base;
255 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_ANALOG_CONTROLS_ONE, 0x80);
256 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_ANALOG_CONTROLS_TWO, 0x03);
257 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_ANALOG_CONTROLS_THREE, 0x00);
258 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_DSM_DIVIDER, 0x00);
259 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_FEEDBACK_DIVIDER, 0x4e);
260 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_CALIBRATION_SETTINGS, 0x40);
261 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_BAND_SEL_CAL_SETTINGS_THREE,
263 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_FREQ_DETECT_SETTINGS_ONE, 0x0c);
264 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_OUTDIV, 0x00);
265 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_CORE_OVERRIDE, 0x00);
266 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_PLL_DIGITAL_TIMERS_TWO, 0x08);
267 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_PLL_PROP_GAIN_RATE_1, 0x08);
268 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_PLL_BAND_SET_RATE_1, 0xc0);
269 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_PLL_INT_GAIN_IFILT_BAND_1, 0xfa);
270 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_PLL_FL_INT_GAIN_PFILT_BAND_1,
272 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_PLL_LOCK_OVERRIDE, 0x80);
273 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_PFILT, 0x29);
274 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_IFILT, 0x3f);
277 static void dsi_pll_commit(struct dsi_pll_10nm *pll)
279 void __iomem *base = pll->phy->pll_base;
280 struct dsi_pll_regs *reg = &pll->reg_setup;
282 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_CORE_INPUT_OVERRIDE, 0x12);
283 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_DECIMAL_DIV_START_1,
284 reg->decimal_div_start);
285 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_FRAC_DIV_START_LOW_1,
286 reg->frac_div_start_low);
287 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_FRAC_DIV_START_MID_1,
288 reg->frac_div_start_mid);
289 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_FRAC_DIV_START_HIGH_1,
290 reg->frac_div_start_high);
291 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_PLL_LOCKDET_RATE_1,
292 reg->pll_lockdet_rate);
293 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_PLL_LOCK_DELAY, 0x06);
294 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_CMODE, 0x10);
295 dsi_phy_write(base + REG_DSI_10nm_PHY_PLL_CLOCK_INVERTERS,
296 reg->pll_clock_inverters);
299 static int dsi_pll_10nm_vco_set_rate(struct clk_hw *hw, unsigned long rate,
300 unsigned long parent_rate)
302 struct dsi_pll_10nm *pll_10nm = to_pll_10nm(hw);
304 DBG("DSI PLL%d rate=%lu, parent's=%lu", pll_10nm->id, rate,
307 pll_10nm->vco_current_rate = rate;
308 pll_10nm->vco_ref_clk_rate = VCO_REF_CLK_RATE;
310 dsi_pll_setup_config(pll_10nm);
312 dsi_pll_calc_dec_frac(pll_10nm);
314 dsi_pll_calc_ssc(pll_10nm);
316 dsi_pll_commit(pll_10nm);
318 dsi_pll_config_hzindep_reg(pll_10nm);
320 dsi_pll_ssc_commit(pll_10nm);
322 /* flush, ensure all register writes are done*/
328 static int dsi_pll_10nm_lock_status(struct dsi_pll_10nm *pll)
330 struct device *dev = &pll->pdev->dev;
333 u32 const delay_us = 100;
334 u32 const timeout_us = 5000;
336 rc = readl_poll_timeout_atomic(pll->phy->pll_base +
337 REG_DSI_10nm_PHY_PLL_COMMON_STATUS_ONE,
339 ((status & BIT(0)) > 0),
343 DRM_DEV_ERROR(dev, "DSI PLL(%d) lock failed, status=0x%08x\n",
349 static void dsi_pll_disable_pll_bias(struct dsi_pll_10nm *pll)
351 u32 data = dsi_phy_read(pll->phy->base + REG_DSI_10nm_PHY_CMN_CTRL_0);
353 dsi_phy_write(pll->phy->pll_base + REG_DSI_10nm_PHY_PLL_SYSTEM_MUXES, 0);
354 dsi_phy_write(pll->phy->base + REG_DSI_10nm_PHY_CMN_CTRL_0,
359 static void dsi_pll_enable_pll_bias(struct dsi_pll_10nm *pll)
361 u32 data = dsi_phy_read(pll->phy->base + REG_DSI_10nm_PHY_CMN_CTRL_0);
363 dsi_phy_write(pll->phy->base + REG_DSI_10nm_PHY_CMN_CTRL_0,
365 dsi_phy_write(pll->phy->pll_base + REG_DSI_10nm_PHY_PLL_SYSTEM_MUXES, 0xc0);
369 static void dsi_pll_disable_global_clk(struct dsi_pll_10nm *pll)
373 data = dsi_phy_read(pll->phy->base + REG_DSI_10nm_PHY_CMN_CLK_CFG1);
374 dsi_phy_write(pll->phy->base + REG_DSI_10nm_PHY_CMN_CLK_CFG1,
378 static void dsi_pll_enable_global_clk(struct dsi_pll_10nm *pll)
382 data = dsi_phy_read(pll->phy->base + REG_DSI_10nm_PHY_CMN_CLK_CFG1);
383 dsi_phy_write(pll->phy->base + REG_DSI_10nm_PHY_CMN_CLK_CFG1,
387 static int dsi_pll_10nm_vco_prepare(struct clk_hw *hw)
389 struct dsi_pll_10nm *pll_10nm = to_pll_10nm(hw);
390 struct device *dev = &pll_10nm->pdev->dev;
393 dsi_pll_enable_pll_bias(pll_10nm);
395 dsi_pll_enable_pll_bias(pll_10nm->slave);
397 rc = dsi_pll_10nm_vco_set_rate(hw,pll_10nm->vco_current_rate, 0);
399 DRM_DEV_ERROR(dev, "vco_set_rate failed, rc=%d\n", rc);
404 dsi_phy_write(pll_10nm->phy->base + REG_DSI_10nm_PHY_CMN_PLL_CNTRL,
408 * ensure all PLL configurations are written prior to checking
413 /* Check for PLL lock */
414 rc = dsi_pll_10nm_lock_status(pll_10nm);
416 DRM_DEV_ERROR(dev, "PLL(%d) lock failed\n", pll_10nm->id);
420 pll_10nm->phy->pll_on = true;
422 dsi_pll_enable_global_clk(pll_10nm);
424 dsi_pll_enable_global_clk(pll_10nm->slave);
426 dsi_phy_write(pll_10nm->phy->base + REG_DSI_10nm_PHY_CMN_RBUF_CTRL,
429 dsi_phy_write(pll_10nm->slave->phy->base +
430 REG_DSI_10nm_PHY_CMN_RBUF_CTRL, 0x01);
436 static void dsi_pll_disable_sub(struct dsi_pll_10nm *pll)
438 dsi_phy_write(pll->phy->base + REG_DSI_10nm_PHY_CMN_RBUF_CTRL, 0);
439 dsi_pll_disable_pll_bias(pll);
442 static void dsi_pll_10nm_vco_unprepare(struct clk_hw *hw)
444 struct dsi_pll_10nm *pll_10nm = to_pll_10nm(hw);
447 * To avoid any stray glitches while abruptly powering down the PLL
448 * make sure to gate the clock using the clock enable bit before
449 * powering down the PLL
451 dsi_pll_disable_global_clk(pll_10nm);
452 dsi_phy_write(pll_10nm->phy->base + REG_DSI_10nm_PHY_CMN_PLL_CNTRL, 0);
453 dsi_pll_disable_sub(pll_10nm);
454 if (pll_10nm->slave) {
455 dsi_pll_disable_global_clk(pll_10nm->slave);
456 dsi_pll_disable_sub(pll_10nm->slave);
458 /* flush, ensure all register writes are done */
460 pll_10nm->phy->pll_on = false;
463 static unsigned long dsi_pll_10nm_vco_recalc_rate(struct clk_hw *hw,
464 unsigned long parent_rate)
466 struct dsi_pll_10nm *pll_10nm = to_pll_10nm(hw);
467 struct dsi_pll_config *config = &pll_10nm->pll_configuration;
468 void __iomem *base = pll_10nm->phy->pll_base;
469 u64 ref_clk = pll_10nm->vco_ref_clk_rate;
476 dec = dsi_phy_read(base + REG_DSI_10nm_PHY_PLL_DECIMAL_DIV_START_1);
479 frac = dsi_phy_read(base + REG_DSI_10nm_PHY_PLL_FRAC_DIV_START_LOW_1);
480 frac |= ((dsi_phy_read(base + REG_DSI_10nm_PHY_PLL_FRAC_DIV_START_MID_1) &
482 frac |= ((dsi_phy_read(base + REG_DSI_10nm_PHY_PLL_FRAC_DIV_START_HIGH_1) &
487 * 1. Assumes prescaler is disabled
489 multiplier = 1 << config->frac_bits;
490 pll_freq = dec * (ref_clk * 2);
491 tmp64 = (ref_clk * 2 * frac);
492 pll_freq += div_u64(tmp64, multiplier);
496 DBG("DSI PLL%d returning vco rate = %lu, dec = %x, frac = %x",
497 pll_10nm->id, (unsigned long)vco_rate, dec, frac);
499 return (unsigned long)vco_rate;
502 static long dsi_pll_10nm_clk_round_rate(struct clk_hw *hw,
503 unsigned long rate, unsigned long *parent_rate)
505 struct dsi_pll_10nm *pll_10nm = to_pll_10nm(hw);
507 if (rate < pll_10nm->phy->cfg->min_pll_rate)
508 return pll_10nm->phy->cfg->min_pll_rate;
509 else if (rate > pll_10nm->phy->cfg->max_pll_rate)
510 return pll_10nm->phy->cfg->max_pll_rate;
515 static const struct clk_ops clk_ops_dsi_pll_10nm_vco = {
516 .round_rate = dsi_pll_10nm_clk_round_rate,
517 .set_rate = dsi_pll_10nm_vco_set_rate,
518 .recalc_rate = dsi_pll_10nm_vco_recalc_rate,
519 .prepare = dsi_pll_10nm_vco_prepare,
520 .unprepare = dsi_pll_10nm_vco_unprepare,
527 static void dsi_10nm_pll_save_state(struct msm_dsi_phy *phy)
529 struct dsi_pll_10nm *pll_10nm = to_pll_10nm(phy->vco_hw);
530 struct pll_10nm_cached_state *cached = &pll_10nm->cached_state;
531 void __iomem *phy_base = pll_10nm->phy->base;
532 u32 cmn_clk_cfg0, cmn_clk_cfg1;
534 cached->pll_out_div = dsi_phy_read(pll_10nm->phy->pll_base +
535 REG_DSI_10nm_PHY_PLL_PLL_OUTDIV_RATE);
536 cached->pll_out_div &= 0x3;
538 cmn_clk_cfg0 = dsi_phy_read(phy_base + REG_DSI_10nm_PHY_CMN_CLK_CFG0);
539 cached->bit_clk_div = cmn_clk_cfg0 & 0xf;
540 cached->pix_clk_div = (cmn_clk_cfg0 & 0xf0) >> 4;
542 cmn_clk_cfg1 = dsi_phy_read(phy_base + REG_DSI_10nm_PHY_CMN_CLK_CFG1);
543 cached->pll_mux = cmn_clk_cfg1 & 0x3;
545 DBG("DSI PLL%d outdiv %x bit_clk_div %x pix_clk_div %x pll_mux %x",
546 pll_10nm->id, cached->pll_out_div, cached->bit_clk_div,
547 cached->pix_clk_div, cached->pll_mux);
550 static int dsi_10nm_pll_restore_state(struct msm_dsi_phy *phy)
552 struct dsi_pll_10nm *pll_10nm = to_pll_10nm(phy->vco_hw);
553 struct pll_10nm_cached_state *cached = &pll_10nm->cached_state;
554 void __iomem *phy_base = pll_10nm->phy->base;
558 val = dsi_phy_read(pll_10nm->phy->pll_base + REG_DSI_10nm_PHY_PLL_PLL_OUTDIV_RATE);
560 val |= cached->pll_out_div;
561 dsi_phy_write(pll_10nm->phy->pll_base + REG_DSI_10nm_PHY_PLL_PLL_OUTDIV_RATE, val);
563 dsi_phy_write(phy_base + REG_DSI_10nm_PHY_CMN_CLK_CFG0,
564 cached->bit_clk_div | (cached->pix_clk_div << 4));
566 val = dsi_phy_read(phy_base + REG_DSI_10nm_PHY_CMN_CLK_CFG1);
568 val |= cached->pll_mux;
569 dsi_phy_write(phy_base + REG_DSI_10nm_PHY_CMN_CLK_CFG1, val);
571 ret = dsi_pll_10nm_vco_set_rate(phy->vco_hw,
572 pll_10nm->vco_current_rate,
573 pll_10nm->vco_ref_clk_rate);
575 DRM_DEV_ERROR(&pll_10nm->pdev->dev,
576 "restore vco rate failed. ret=%d\n", ret);
580 DBG("DSI PLL%d", pll_10nm->id);
585 static int dsi_10nm_set_usecase(struct msm_dsi_phy *phy)
587 struct dsi_pll_10nm *pll_10nm = to_pll_10nm(phy->vco_hw);
588 void __iomem *base = phy->base;
589 u32 data = 0x0; /* internal PLL */
591 DBG("DSI PLL%d", pll_10nm->id);
593 switch (phy->usecase) {
594 case MSM_DSI_PHY_STANDALONE:
596 case MSM_DSI_PHY_MASTER:
597 pll_10nm->slave = pll_10nm_list[(pll_10nm->id + 1) % DSI_MAX];
599 case MSM_DSI_PHY_SLAVE:
600 data = 0x1; /* external PLL */
607 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_CLK_CFG1, (data << 2));
613 * The post dividers and mux clocks are created using the standard divider and
614 * mux API. Unlike the 14nm PHY, the slave PLL doesn't need its dividers/mux
615 * state to follow the master PLL's divider/mux state. Therefore, we don't
616 * require special clock ops that also configure the slave PLL registers
618 static int pll_10nm_register(struct dsi_pll_10nm *pll_10nm, struct clk_hw **provided_clocks)
620 char clk_name[32], parent[32], vco_name[32];
621 char parent2[32], parent3[32], parent4[32];
622 struct clk_init_data vco_init = {
623 .parent_names = (const char *[]){ "xo" },
626 .flags = CLK_IGNORE_UNUSED,
627 .ops = &clk_ops_dsi_pll_10nm_vco,
629 struct device *dev = &pll_10nm->pdev->dev;
633 DBG("DSI%d", pll_10nm->id);
635 snprintf(vco_name, 32, "dsi%dvco_clk", pll_10nm->id);
636 pll_10nm->clk_hw.init = &vco_init;
638 ret = devm_clk_hw_register(dev, &pll_10nm->clk_hw);
642 snprintf(clk_name, 32, "dsi%d_pll_out_div_clk", pll_10nm->id);
643 snprintf(parent, 32, "dsi%dvco_clk", pll_10nm->id);
645 hw = devm_clk_hw_register_divider(dev, clk_name,
646 parent, CLK_SET_RATE_PARENT,
647 pll_10nm->phy->pll_base +
648 REG_DSI_10nm_PHY_PLL_PLL_OUTDIV_RATE,
649 0, 2, CLK_DIVIDER_POWER_OF_TWO, NULL);
655 snprintf(clk_name, 32, "dsi%d_pll_bit_clk", pll_10nm->id);
656 snprintf(parent, 32, "dsi%d_pll_out_div_clk", pll_10nm->id);
658 /* BIT CLK: DIV_CTRL_3_0 */
659 hw = devm_clk_hw_register_divider(dev, clk_name, parent,
661 pll_10nm->phy->base +
662 REG_DSI_10nm_PHY_CMN_CLK_CFG0,
663 0, 4, CLK_DIVIDER_ONE_BASED,
664 &pll_10nm->postdiv_lock);
670 snprintf(clk_name, 32, "dsi%d_phy_pll_out_byteclk", pll_10nm->id);
671 snprintf(parent, 32, "dsi%d_pll_bit_clk", pll_10nm->id);
673 /* DSI Byte clock = VCO_CLK / OUT_DIV / BIT_DIV / 8 */
674 hw = devm_clk_hw_register_fixed_factor(dev, clk_name, parent,
675 CLK_SET_RATE_PARENT, 1, 8);
681 provided_clocks[DSI_BYTE_PLL_CLK] = hw;
683 snprintf(clk_name, 32, "dsi%d_pll_by_2_bit_clk", pll_10nm->id);
684 snprintf(parent, 32, "dsi%d_pll_bit_clk", pll_10nm->id);
686 hw = devm_clk_hw_register_fixed_factor(dev, clk_name, parent,
693 snprintf(clk_name, 32, "dsi%d_pll_post_out_div_clk", pll_10nm->id);
694 snprintf(parent, 32, "dsi%d_pll_out_div_clk", pll_10nm->id);
696 hw = devm_clk_hw_register_fixed_factor(dev, clk_name, parent,
703 snprintf(clk_name, 32, "dsi%d_pclk_mux", pll_10nm->id);
704 snprintf(parent, 32, "dsi%d_pll_bit_clk", pll_10nm->id);
705 snprintf(parent2, 32, "dsi%d_pll_by_2_bit_clk", pll_10nm->id);
706 snprintf(parent3, 32, "dsi%d_pll_out_div_clk", pll_10nm->id);
707 snprintf(parent4, 32, "dsi%d_pll_post_out_div_clk", pll_10nm->id);
709 hw = devm_clk_hw_register_mux(dev, clk_name,
711 parent, parent2, parent3, parent4
712 }), 4, 0, pll_10nm->phy->base +
713 REG_DSI_10nm_PHY_CMN_CLK_CFG1,
720 snprintf(clk_name, 32, "dsi%d_phy_pll_out_dsiclk", pll_10nm->id);
721 snprintf(parent, 32, "dsi%d_pclk_mux", pll_10nm->id);
723 /* PIX CLK DIV : DIV_CTRL_7_4*/
724 hw = devm_clk_hw_register_divider(dev, clk_name, parent,
725 0, pll_10nm->phy->base +
726 REG_DSI_10nm_PHY_CMN_CLK_CFG0,
727 4, 4, CLK_DIVIDER_ONE_BASED,
728 &pll_10nm->postdiv_lock);
734 provided_clocks[DSI_PIXEL_PLL_CLK] = hw;
743 static int dsi_pll_10nm_init(struct msm_dsi_phy *phy)
745 struct platform_device *pdev = phy->pdev;
747 struct dsi_pll_10nm *pll_10nm;
750 pll_10nm = devm_kzalloc(&pdev->dev, sizeof(*pll_10nm), GFP_KERNEL);
754 DBG("DSI PLL%d", id);
756 pll_10nm->pdev = pdev;
758 pll_10nm_list[id] = pll_10nm;
760 spin_lock_init(&pll_10nm->postdiv_lock);
764 ret = pll_10nm_register(pll_10nm, phy->provided_clocks->hws);
766 DRM_DEV_ERROR(&pdev->dev, "failed to register PLL: %d\n", ret);
770 phy->vco_hw = &pll_10nm->clk_hw;
772 /* TODO: Remove this when we have proper display handover support */
773 msm_dsi_phy_pll_save_state(phy);
778 static int dsi_phy_hw_v3_0_is_pll_on(struct msm_dsi_phy *phy)
780 void __iomem *base = phy->base;
783 data = dsi_phy_read(base + REG_DSI_10nm_PHY_CMN_PLL_CNTRL);
784 mb(); /* make sure read happened */
786 return (data & BIT(0));
789 static void dsi_phy_hw_v3_0_config_lpcdrx(struct msm_dsi_phy *phy, bool enable)
791 void __iomem *lane_base = phy->lane_base;
792 int phy_lane_0 = 0; /* TODO: Support all lane swap configs */
795 * LPRX and CDRX need to enabled only for physical data lane
796 * corresponding to the logical data lane 0
799 dsi_phy_write(lane_base +
800 REG_DSI_10nm_PHY_LN_LPRX_CTRL(phy_lane_0), 0x3);
802 dsi_phy_write(lane_base +
803 REG_DSI_10nm_PHY_LN_LPRX_CTRL(phy_lane_0), 0);
806 static void dsi_phy_hw_v3_0_lane_settings(struct msm_dsi_phy *phy)
809 u8 tx_dctrl[] = { 0x00, 0x00, 0x00, 0x04, 0x01 };
810 void __iomem *lane_base = phy->lane_base;
812 if (phy->cfg->quirks & DSI_PHY_10NM_QUIRK_OLD_TIMINGS)
815 /* Strength ctrl settings */
816 for (i = 0; i < 5; i++) {
817 dsi_phy_write(lane_base + REG_DSI_10nm_PHY_LN_LPTX_STR_CTRL(i),
820 * Disable LPRX and CDRX for all lanes. And later on, it will
821 * be only enabled for the physical data lane corresponding
822 * to the logical data lane 0
824 dsi_phy_write(lane_base + REG_DSI_10nm_PHY_LN_LPRX_CTRL(i), 0);
825 dsi_phy_write(lane_base + REG_DSI_10nm_PHY_LN_PIN_SWAP(i), 0x0);
826 dsi_phy_write(lane_base + REG_DSI_10nm_PHY_LN_HSTX_STR_CTRL(i),
830 dsi_phy_hw_v3_0_config_lpcdrx(phy, true);
833 for (i = 0; i < 5; i++) {
834 dsi_phy_write(lane_base + REG_DSI_10nm_PHY_LN_CFG0(i), 0x0);
835 dsi_phy_write(lane_base + REG_DSI_10nm_PHY_LN_CFG1(i), 0x0);
836 dsi_phy_write(lane_base + REG_DSI_10nm_PHY_LN_CFG2(i), 0x0);
837 dsi_phy_write(lane_base + REG_DSI_10nm_PHY_LN_CFG3(i),
838 i == 4 ? 0x80 : 0x0);
839 dsi_phy_write(lane_base +
840 REG_DSI_10nm_PHY_LN_OFFSET_TOP_CTRL(i), 0x0);
841 dsi_phy_write(lane_base +
842 REG_DSI_10nm_PHY_LN_OFFSET_BOT_CTRL(i), 0x0);
843 dsi_phy_write(lane_base + REG_DSI_10nm_PHY_LN_TX_DCTRL(i),
847 if (!(phy->cfg->quirks & DSI_PHY_10NM_QUIRK_OLD_TIMINGS)) {
848 /* Toggle BIT 0 to release freeze I/0 */
849 dsi_phy_write(lane_base + REG_DSI_10nm_PHY_LN_TX_DCTRL(3), 0x05);
850 dsi_phy_write(lane_base + REG_DSI_10nm_PHY_LN_TX_DCTRL(3), 0x04);
854 static int dsi_10nm_phy_enable(struct msm_dsi_phy *phy, int src_pll_id,
855 struct msm_dsi_phy_clk_request *clk_req)
859 u32 const delay_us = 5;
860 u32 const timeout_us = 1000;
861 struct msm_dsi_dphy_timing *timing = &phy->timing;
862 void __iomem *base = phy->base;
867 if (msm_dsi_dphy_timing_calc_v3(timing, clk_req)) {
868 DRM_DEV_ERROR(&phy->pdev->dev,
869 "%s: D-PHY timing calculation failed\n", __func__);
873 if (dsi_phy_hw_v3_0_is_pll_on(phy))
874 pr_warn("PLL turned on before configuring PHY\n");
876 /* wait for REFGEN READY */
877 ret = readl_poll_timeout_atomic(base + REG_DSI_10nm_PHY_CMN_PHY_STATUS,
878 status, (status & BIT(0)),
879 delay_us, timeout_us);
881 pr_err("Ref gen not ready. Aborting\n");
885 /* de-assert digital and pll power down */
886 data = BIT(6) | BIT(5);
887 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_CTRL_0, data);
889 /* Assert PLL core reset */
890 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_PLL_CNTRL, 0x00);
892 /* turn off resync FIFO */
893 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_RBUF_CTRL, 0x00);
895 /* Select MS1 byte-clk */
896 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_GLBL_CTRL, 0x10);
899 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_VREG_CTRL, 0x59);
901 /* Configure PHY lane swap (TODO: we need to calculate this) */
902 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_LANE_CFG0, 0x21);
903 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_LANE_CFG1, 0x84);
905 /* DSI PHY timings */
906 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_TIMING_CTRL_0,
907 timing->hs_halfbyte_en);
908 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_TIMING_CTRL_1,
910 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_TIMING_CTRL_2,
911 timing->clk_prepare);
912 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_TIMING_CTRL_3,
914 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_TIMING_CTRL_4,
916 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_TIMING_CTRL_5,
918 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_TIMING_CTRL_6,
920 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_TIMING_CTRL_7,
922 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_TIMING_CTRL_8,
924 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_TIMING_CTRL_9,
925 timing->ta_go | (timing->ta_sure << 3));
926 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_TIMING_CTRL_10,
928 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_TIMING_CTRL_11,
931 /* Remove power down from all blocks */
932 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_CTRL_0, 0x7f);
935 data = dsi_phy_read(base + REG_DSI_10nm_PHY_CMN_CTRL_0);
937 /* TODO: only power up lanes that are used */
939 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_CTRL_0, data);
940 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_LANE_CTRL0, 0x1F);
942 /* Select full-rate mode */
943 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_CTRL_2, 0x40);
945 ret = dsi_10nm_set_usecase(phy);
947 DRM_DEV_ERROR(&phy->pdev->dev, "%s: set pll usecase failed, %d\n",
952 /* DSI lane settings */
953 dsi_phy_hw_v3_0_lane_settings(phy);
955 DBG("DSI%d PHY enabled", phy->id);
960 static void dsi_10nm_phy_disable(struct msm_dsi_phy *phy)
962 void __iomem *base = phy->base;
967 if (dsi_phy_hw_v3_0_is_pll_on(phy))
968 pr_warn("Turning OFF PHY while PLL is on\n");
970 dsi_phy_hw_v3_0_config_lpcdrx(phy, false);
971 data = dsi_phy_read(base + REG_DSI_10nm_PHY_CMN_CTRL_0);
973 /* disable all lanes */
975 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_CTRL_0, data);
976 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_LANE_CTRL0, 0);
978 /* Turn off all PHY blocks */
979 dsi_phy_write(base + REG_DSI_10nm_PHY_CMN_CTRL_0, 0x00);
980 /* make sure phy is turned off */
983 DBG("DSI%d PHY disabled", phy->id);
986 const struct msm_dsi_phy_cfg dsi_phy_10nm_cfgs = {
987 .src_pll_truthtable = { {false, false}, {true, false} },
988 .has_phy_lane = true,
996 .enable = dsi_10nm_phy_enable,
997 .disable = dsi_10nm_phy_disable,
998 .pll_init = dsi_pll_10nm_init,
999 .save_pll_state = dsi_10nm_pll_save_state,
1000 .restore_pll_state = dsi_10nm_pll_restore_state,
1002 .min_pll_rate = 1000000000UL,
1003 .max_pll_rate = 3500000000UL,
1004 .io_start = { 0xae94400, 0xae96400 },
1008 const struct msm_dsi_phy_cfg dsi_phy_10nm_8998_cfgs = {
1009 .src_pll_truthtable = { {false, false}, {true, false} },
1010 .has_phy_lane = true,
1014 {"vdds", 36000, 32},
1018 .enable = dsi_10nm_phy_enable,
1019 .disable = dsi_10nm_phy_disable,
1020 .pll_init = dsi_pll_10nm_init,
1021 .save_pll_state = dsi_10nm_pll_save_state,
1022 .restore_pll_state = dsi_10nm_pll_restore_state,
1024 .min_pll_rate = 1000000000UL,
1025 .max_pll_rate = 3500000000UL,
1026 .io_start = { 0xc994400, 0xc996400 },
1028 .quirks = DSI_PHY_10NM_QUIRK_OLD_TIMINGS,