1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (c) 2013, 2018, The Linux Foundation. All rights reserved.
6 #include <linux/kernel.h>
7 #include <linux/bitops.h>
10 #include <linux/export.h>
11 #include <linux/clk-provider.h>
12 #include <linux/delay.h>
13 #include <linux/rational.h>
14 #include <linux/regmap.h>
15 #include <linux/math64.h>
16 #include <linux/slab.h>
18 #include <asm/div64.h>
24 #define CMD_UPDATE BIT(0)
25 #define CMD_ROOT_EN BIT(1)
26 #define CMD_DIRTY_CFG BIT(4)
27 #define CMD_DIRTY_N BIT(5)
28 #define CMD_DIRTY_M BIT(6)
29 #define CMD_DIRTY_D BIT(7)
30 #define CMD_ROOT_OFF BIT(31)
33 #define CFG_SRC_DIV_SHIFT 0
34 #define CFG_SRC_SEL_SHIFT 8
35 #define CFG_SRC_SEL_MASK (0x7 << CFG_SRC_SEL_SHIFT)
36 #define CFG_MODE_SHIFT 12
37 #define CFG_MODE_MASK (0x3 << CFG_MODE_SHIFT)
38 #define CFG_MODE_DUAL_EDGE (0x2 << CFG_MODE_SHIFT)
39 #define CFG_HW_CLK_CTRL_MASK BIT(20)
45 #define RCG_CFG_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + CFG_REG)
46 #define RCG_M_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + M_REG)
47 #define RCG_N_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + N_REG)
48 #define RCG_D_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + D_REG)
50 /* Dynamic Frequency Scaling */
51 #define MAX_PERF_LEVEL 8
52 #define SE_CMD_DFSR_OFFSET 0x14
53 #define SE_CMD_DFS_EN BIT(0)
54 #define SE_PERF_DFSR(level) (0x1c + 0x4 * (level))
55 #define SE_PERF_M_DFSR(level) (0x5c + 0x4 * (level))
56 #define SE_PERF_N_DFSR(level) (0x9c + 0x4 * (level))
63 static int clk_rcg2_is_enabled(struct clk_hw *hw)
65 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
69 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
73 return (cmd & CMD_ROOT_OFF) == 0;
76 static u8 clk_rcg2_get_parent(struct clk_hw *hw)
78 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
79 int num_parents = clk_hw_get_num_parents(hw);
83 ret = regmap_read(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg), &cfg);
87 cfg &= CFG_SRC_SEL_MASK;
88 cfg >>= CFG_SRC_SEL_SHIFT;
90 for (i = 0; i < num_parents; i++)
91 if (cfg == rcg->parent_map[i].cfg)
95 pr_debug("%s: Clock %s has invalid parent, using default.\n",
96 __func__, clk_hw_get_name(hw));
100 static int update_config(struct clk_rcg2 *rcg)
104 struct clk_hw *hw = &rcg->clkr.hw;
105 const char *name = clk_hw_get_name(hw);
107 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
108 CMD_UPDATE, CMD_UPDATE);
112 /* Wait for update to take effect */
113 for (count = 500; count > 0; count--) {
114 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
117 if (!(cmd & CMD_UPDATE))
122 WARN(1, "%s: rcg didn't update its configuration.", name);
126 static int clk_rcg2_set_parent(struct clk_hw *hw, u8 index)
128 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
130 u32 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
132 ret = regmap_update_bits(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg),
133 CFG_SRC_SEL_MASK, cfg);
137 return update_config(rcg);
141 * Calculate m/n:d rate
144 * rate = ----------- x ---
148 calc_rate(unsigned long rate, u32 m, u32 n, u32 mode, u32 hid_div)
166 clk_rcg2_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
168 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
169 u32 cfg, hid_div, m = 0, n = 0, mode = 0, mask;
171 regmap_read(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg), &cfg);
173 if (rcg->mnd_width) {
174 mask = BIT(rcg->mnd_width) - 1;
175 regmap_read(rcg->clkr.regmap, RCG_M_OFFSET(rcg), &m);
177 regmap_read(rcg->clkr.regmap, RCG_N_OFFSET(rcg), &n);
181 mode = cfg & CFG_MODE_MASK;
182 mode >>= CFG_MODE_SHIFT;
185 mask = BIT(rcg->hid_width) - 1;
186 hid_div = cfg >> CFG_SRC_DIV_SHIFT;
189 return calc_rate(parent_rate, m, n, mode, hid_div);
192 static int _freq_tbl_determine_rate(struct clk_hw *hw, const struct freq_tbl *f,
193 struct clk_rate_request *req,
194 enum freq_policy policy)
196 unsigned long clk_flags, rate = req->rate;
198 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
203 f = qcom_find_freq_floor(f, rate);
206 f = qcom_find_freq(f, rate);
215 index = qcom_find_src_index(hw, rcg->parent_map, f->src);
219 clk_flags = clk_hw_get_flags(hw);
220 p = clk_hw_get_parent_by_index(hw, index);
224 if (clk_flags & CLK_SET_RATE_PARENT) {
230 rate *= f->pre_div + 1;
240 rate = clk_hw_get_rate(p);
242 req->best_parent_hw = p;
243 req->best_parent_rate = rate;
249 static int clk_rcg2_determine_rate(struct clk_hw *hw,
250 struct clk_rate_request *req)
252 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
254 return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, CEIL);
257 static int clk_rcg2_determine_floor_rate(struct clk_hw *hw,
258 struct clk_rate_request *req)
260 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
262 return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, FLOOR);
265 static int __clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f)
267 u32 cfg, mask, d_val, not2d_val, n_minus_m;
268 struct clk_hw *hw = &rcg->clkr.hw;
269 int ret, index = qcom_find_src_index(hw, rcg->parent_map, f->src);
274 if (rcg->mnd_width && f->n) {
275 mask = BIT(rcg->mnd_width) - 1;
276 ret = regmap_update_bits(rcg->clkr.regmap,
277 RCG_M_OFFSET(rcg), mask, f->m);
281 ret = regmap_update_bits(rcg->clkr.regmap,
282 RCG_N_OFFSET(rcg), mask, ~(f->n - f->m));
286 /* Calculate 2d value */
289 n_minus_m = f->n - f->m;
292 d_val = clamp_t(u32, d_val, f->m, n_minus_m);
293 not2d_val = ~d_val & mask;
295 ret = regmap_update_bits(rcg->clkr.regmap,
296 RCG_D_OFFSET(rcg), mask, not2d_val);
301 mask = BIT(rcg->hid_width) - 1;
302 mask |= CFG_SRC_SEL_MASK | CFG_MODE_MASK | CFG_HW_CLK_CTRL_MASK;
303 cfg = f->pre_div << CFG_SRC_DIV_SHIFT;
304 cfg |= rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
305 if (rcg->mnd_width && f->n && (f->m != f->n))
306 cfg |= CFG_MODE_DUAL_EDGE;
307 return regmap_update_bits(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg),
311 static int clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f)
315 ret = __clk_rcg2_configure(rcg, f);
319 return update_config(rcg);
322 static int __clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
323 enum freq_policy policy)
325 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
326 const struct freq_tbl *f;
330 f = qcom_find_freq_floor(rcg->freq_tbl, rate);
333 f = qcom_find_freq(rcg->freq_tbl, rate);
342 return clk_rcg2_configure(rcg, f);
345 static int clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
346 unsigned long parent_rate)
348 return __clk_rcg2_set_rate(hw, rate, CEIL);
351 static int clk_rcg2_set_floor_rate(struct clk_hw *hw, unsigned long rate,
352 unsigned long parent_rate)
354 return __clk_rcg2_set_rate(hw, rate, FLOOR);
357 static int clk_rcg2_set_rate_and_parent(struct clk_hw *hw,
358 unsigned long rate, unsigned long parent_rate, u8 index)
360 return __clk_rcg2_set_rate(hw, rate, CEIL);
363 static int clk_rcg2_set_floor_rate_and_parent(struct clk_hw *hw,
364 unsigned long rate, unsigned long parent_rate, u8 index)
366 return __clk_rcg2_set_rate(hw, rate, FLOOR);
369 static int clk_rcg2_get_duty_cycle(struct clk_hw *hw, struct clk_duty *duty)
371 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
372 u32 notn_m, n, m, d, not2d, mask;
374 if (!rcg->mnd_width) {
375 /* 50 % duty-cycle for Non-MND RCGs */
381 regmap_read(rcg->clkr.regmap, RCG_D_OFFSET(rcg), ¬2d);
382 regmap_read(rcg->clkr.regmap, RCG_M_OFFSET(rcg), &m);
383 regmap_read(rcg->clkr.regmap, RCG_N_OFFSET(rcg), ¬n_m);
385 if (!not2d && !m && !notn_m) {
386 /* 50 % duty-cycle always */
392 mask = BIT(rcg->mnd_width) - 1;
395 d = DIV_ROUND_CLOSEST(d, 2);
397 n = (~(notn_m) + m) & mask;
405 static int clk_rcg2_set_duty_cycle(struct clk_hw *hw, struct clk_duty *duty)
407 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
408 u32 notn_m, n, m, d, not2d, mask, duty_per;
411 /* Duty-cycle cannot be modified for non-MND RCGs */
415 mask = BIT(rcg->mnd_width) - 1;
417 regmap_read(rcg->clkr.regmap, RCG_N_OFFSET(rcg), ¬n_m);
418 regmap_read(rcg->clkr.regmap, RCG_M_OFFSET(rcg), &m);
420 n = (~(notn_m) + m) & mask;
422 duty_per = (duty->num * 100) / duty->den;
424 /* Calculate 2d value */
425 d = DIV_ROUND_CLOSEST(n * duty_per * 2, 100);
427 /* Check bit widths of 2d. If D is too big reduce duty cycle. */
431 if ((d / 2) > (n - m))
433 else if ((d / 2) < (m / 2))
438 ret = regmap_update_bits(rcg->clkr.regmap, RCG_D_OFFSET(rcg), mask,
443 return update_config(rcg);
446 const struct clk_ops clk_rcg2_ops = {
447 .is_enabled = clk_rcg2_is_enabled,
448 .get_parent = clk_rcg2_get_parent,
449 .set_parent = clk_rcg2_set_parent,
450 .recalc_rate = clk_rcg2_recalc_rate,
451 .determine_rate = clk_rcg2_determine_rate,
452 .set_rate = clk_rcg2_set_rate,
453 .set_rate_and_parent = clk_rcg2_set_rate_and_parent,
454 .get_duty_cycle = clk_rcg2_get_duty_cycle,
455 .set_duty_cycle = clk_rcg2_set_duty_cycle,
457 EXPORT_SYMBOL_GPL(clk_rcg2_ops);
459 const struct clk_ops clk_rcg2_floor_ops = {
460 .is_enabled = clk_rcg2_is_enabled,
461 .get_parent = clk_rcg2_get_parent,
462 .set_parent = clk_rcg2_set_parent,
463 .recalc_rate = clk_rcg2_recalc_rate,
464 .determine_rate = clk_rcg2_determine_floor_rate,
465 .set_rate = clk_rcg2_set_floor_rate,
466 .set_rate_and_parent = clk_rcg2_set_floor_rate_and_parent,
467 .get_duty_cycle = clk_rcg2_get_duty_cycle,
468 .set_duty_cycle = clk_rcg2_set_duty_cycle,
470 EXPORT_SYMBOL_GPL(clk_rcg2_floor_ops);
477 static const struct frac_entry frac_table_675m[] = { /* link rate of 270M */
478 { 52, 295 }, /* 119 M */
479 { 11, 57 }, /* 130.25 M */
480 { 63, 307 }, /* 138.50 M */
481 { 11, 50 }, /* 148.50 M */
482 { 47, 206 }, /* 154 M */
483 { 31, 100 }, /* 205.25 M */
484 { 107, 269 }, /* 268.50 M */
488 static struct frac_entry frac_table_810m[] = { /* Link rate of 162M */
489 { 31, 211 }, /* 119 M */
490 { 32, 199 }, /* 130.25 M */
491 { 63, 307 }, /* 138.50 M */
492 { 11, 60 }, /* 148.50 M */
493 { 50, 263 }, /* 154 M */
494 { 31, 120 }, /* 205.25 M */
495 { 119, 359 }, /* 268.50 M */
499 static int clk_edp_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
500 unsigned long parent_rate)
502 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
503 struct freq_tbl f = *rcg->freq_tbl;
504 const struct frac_entry *frac;
506 s64 src_rate = parent_rate;
508 u32 mask = BIT(rcg->hid_width) - 1;
511 if (src_rate == 810000000)
512 frac = frac_table_810m;
514 frac = frac_table_675m;
516 for (; frac->num; frac++) {
518 request *= frac->den;
519 request = div_s64(request, frac->num);
520 if ((src_rate < (request - delta)) ||
521 (src_rate > (request + delta)))
524 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
527 f.pre_div >>= CFG_SRC_DIV_SHIFT;
532 return clk_rcg2_configure(rcg, &f);
538 static int clk_edp_pixel_set_rate_and_parent(struct clk_hw *hw,
539 unsigned long rate, unsigned long parent_rate, u8 index)
541 /* Parent index is set statically in frequency table */
542 return clk_edp_pixel_set_rate(hw, rate, parent_rate);
545 static int clk_edp_pixel_determine_rate(struct clk_hw *hw,
546 struct clk_rate_request *req)
548 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
549 const struct freq_tbl *f = rcg->freq_tbl;
550 const struct frac_entry *frac;
553 u32 mask = BIT(rcg->hid_width) - 1;
555 int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
557 /* Force the correct parent */
558 req->best_parent_hw = clk_hw_get_parent_by_index(hw, index);
559 req->best_parent_rate = clk_hw_get_rate(req->best_parent_hw);
561 if (req->best_parent_rate == 810000000)
562 frac = frac_table_810m;
564 frac = frac_table_675m;
566 for (; frac->num; frac++) {
568 request *= frac->den;
569 request = div_s64(request, frac->num);
570 if ((req->best_parent_rate < (request - delta)) ||
571 (req->best_parent_rate > (request + delta)))
574 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
576 hid_div >>= CFG_SRC_DIV_SHIFT;
579 req->rate = calc_rate(req->best_parent_rate,
580 frac->num, frac->den,
581 !!frac->den, hid_div);
588 const struct clk_ops clk_edp_pixel_ops = {
589 .is_enabled = clk_rcg2_is_enabled,
590 .get_parent = clk_rcg2_get_parent,
591 .set_parent = clk_rcg2_set_parent,
592 .recalc_rate = clk_rcg2_recalc_rate,
593 .set_rate = clk_edp_pixel_set_rate,
594 .set_rate_and_parent = clk_edp_pixel_set_rate_and_parent,
595 .determine_rate = clk_edp_pixel_determine_rate,
597 EXPORT_SYMBOL_GPL(clk_edp_pixel_ops);
599 static int clk_byte_determine_rate(struct clk_hw *hw,
600 struct clk_rate_request *req)
602 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
603 const struct freq_tbl *f = rcg->freq_tbl;
604 int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
605 unsigned long parent_rate, div;
606 u32 mask = BIT(rcg->hid_width) - 1;
612 req->best_parent_hw = p = clk_hw_get_parent_by_index(hw, index);
613 req->best_parent_rate = parent_rate = clk_hw_round_rate(p, req->rate);
615 div = DIV_ROUND_UP((2 * parent_rate), req->rate) - 1;
616 div = min_t(u32, div, mask);
618 req->rate = calc_rate(parent_rate, 0, 0, 0, div);
623 static int clk_byte_set_rate(struct clk_hw *hw, unsigned long rate,
624 unsigned long parent_rate)
626 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
627 struct freq_tbl f = *rcg->freq_tbl;
629 u32 mask = BIT(rcg->hid_width) - 1;
631 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
632 div = min_t(u32, div, mask);
636 return clk_rcg2_configure(rcg, &f);
639 static int clk_byte_set_rate_and_parent(struct clk_hw *hw,
640 unsigned long rate, unsigned long parent_rate, u8 index)
642 /* Parent index is set statically in frequency table */
643 return clk_byte_set_rate(hw, rate, parent_rate);
646 const struct clk_ops clk_byte_ops = {
647 .is_enabled = clk_rcg2_is_enabled,
648 .get_parent = clk_rcg2_get_parent,
649 .set_parent = clk_rcg2_set_parent,
650 .recalc_rate = clk_rcg2_recalc_rate,
651 .set_rate = clk_byte_set_rate,
652 .set_rate_and_parent = clk_byte_set_rate_and_parent,
653 .determine_rate = clk_byte_determine_rate,
655 EXPORT_SYMBOL_GPL(clk_byte_ops);
657 static int clk_byte2_determine_rate(struct clk_hw *hw,
658 struct clk_rate_request *req)
660 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
661 unsigned long parent_rate, div;
662 u32 mask = BIT(rcg->hid_width) - 1;
664 unsigned long rate = req->rate;
669 p = req->best_parent_hw;
670 req->best_parent_rate = parent_rate = clk_hw_round_rate(p, rate);
672 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
673 div = min_t(u32, div, mask);
675 req->rate = calc_rate(parent_rate, 0, 0, 0, div);
680 static int clk_byte2_set_rate(struct clk_hw *hw, unsigned long rate,
681 unsigned long parent_rate)
683 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
684 struct freq_tbl f = { 0 };
686 int i, num_parents = clk_hw_get_num_parents(hw);
687 u32 mask = BIT(rcg->hid_width) - 1;
690 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
691 div = min_t(u32, div, mask);
695 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
696 cfg &= CFG_SRC_SEL_MASK;
697 cfg >>= CFG_SRC_SEL_SHIFT;
699 for (i = 0; i < num_parents; i++) {
700 if (cfg == rcg->parent_map[i].cfg) {
701 f.src = rcg->parent_map[i].src;
702 return clk_rcg2_configure(rcg, &f);
709 static int clk_byte2_set_rate_and_parent(struct clk_hw *hw,
710 unsigned long rate, unsigned long parent_rate, u8 index)
712 /* Read the hardware to determine parent during set_rate */
713 return clk_byte2_set_rate(hw, rate, parent_rate);
716 const struct clk_ops clk_byte2_ops = {
717 .is_enabled = clk_rcg2_is_enabled,
718 .get_parent = clk_rcg2_get_parent,
719 .set_parent = clk_rcg2_set_parent,
720 .recalc_rate = clk_rcg2_recalc_rate,
721 .set_rate = clk_byte2_set_rate,
722 .set_rate_and_parent = clk_byte2_set_rate_and_parent,
723 .determine_rate = clk_byte2_determine_rate,
725 EXPORT_SYMBOL_GPL(clk_byte2_ops);
727 static const struct frac_entry frac_table_pixel[] = {
736 static int clk_pixel_determine_rate(struct clk_hw *hw,
737 struct clk_rate_request *req)
739 unsigned long request, src_rate;
741 const struct frac_entry *frac = frac_table_pixel;
743 for (; frac->num; frac++) {
744 request = (req->rate * frac->den) / frac->num;
746 src_rate = clk_hw_round_rate(req->best_parent_hw, request);
747 if ((src_rate < (request - delta)) ||
748 (src_rate > (request + delta)))
751 req->best_parent_rate = src_rate;
752 req->rate = (src_rate * frac->num) / frac->den;
759 static int clk_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
760 unsigned long parent_rate)
762 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
763 struct freq_tbl f = { 0 };
764 const struct frac_entry *frac = frac_table_pixel;
765 unsigned long request;
767 u32 mask = BIT(rcg->hid_width) - 1;
769 int i, num_parents = clk_hw_get_num_parents(hw);
771 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
772 cfg &= CFG_SRC_SEL_MASK;
773 cfg >>= CFG_SRC_SEL_SHIFT;
775 for (i = 0; i < num_parents; i++)
776 if (cfg == rcg->parent_map[i].cfg) {
777 f.src = rcg->parent_map[i].src;
781 for (; frac->num; frac++) {
782 request = (rate * frac->den) / frac->num;
784 if ((parent_rate < (request - delta)) ||
785 (parent_rate > (request + delta)))
788 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
791 f.pre_div >>= CFG_SRC_DIV_SHIFT;
796 return clk_rcg2_configure(rcg, &f);
801 static int clk_pixel_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
802 unsigned long parent_rate, u8 index)
804 return clk_pixel_set_rate(hw, rate, parent_rate);
807 const struct clk_ops clk_pixel_ops = {
808 .is_enabled = clk_rcg2_is_enabled,
809 .get_parent = clk_rcg2_get_parent,
810 .set_parent = clk_rcg2_set_parent,
811 .recalc_rate = clk_rcg2_recalc_rate,
812 .set_rate = clk_pixel_set_rate,
813 .set_rate_and_parent = clk_pixel_set_rate_and_parent,
814 .determine_rate = clk_pixel_determine_rate,
816 EXPORT_SYMBOL_GPL(clk_pixel_ops);
818 static int clk_gfx3d_determine_rate(struct clk_hw *hw,
819 struct clk_rate_request *req)
821 struct clk_rate_request parent_req = { };
822 struct clk_rcg2_gfx3d *cgfx = to_clk_rcg2_gfx3d(hw);
823 struct clk_hw *xo, *p0, *p1, *p2;
824 unsigned long p0_rate;
825 u8 mux_div = cgfx->div;
832 * This function does ping-pong the RCG between PLLs: if we don't
833 * have at least one fixed PLL and two variable ones,
834 * then it's not going to work correctly.
836 if (WARN_ON(!p0 || !p1 || !p2))
839 xo = clk_hw_get_parent_by_index(hw, 0);
840 if (req->rate == clk_hw_get_rate(xo)) {
841 req->best_parent_hw = xo;
848 parent_req.rate = req->rate * mux_div;
850 /* This has to be a fixed rate PLL */
851 p0_rate = clk_hw_get_rate(p0);
853 if (parent_req.rate == p0_rate) {
854 req->rate = req->best_parent_rate = p0_rate;
855 req->best_parent_hw = p0;
859 if (req->best_parent_hw == p0) {
860 /* Are we going back to a previously used rate? */
861 if (clk_hw_get_rate(p2) == parent_req.rate)
862 req->best_parent_hw = p2;
864 req->best_parent_hw = p1;
865 } else if (req->best_parent_hw == p2) {
866 req->best_parent_hw = p1;
868 req->best_parent_hw = p2;
871 ret = __clk_determine_rate(req->best_parent_hw, &parent_req);
875 req->rate = req->best_parent_rate = parent_req.rate;
876 req->rate /= mux_div;
881 static int clk_gfx3d_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
882 unsigned long parent_rate, u8 index)
884 struct clk_rcg2_gfx3d *cgfx = to_clk_rcg2_gfx3d(hw);
885 struct clk_rcg2 *rcg = &cgfx->rcg;
889 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
890 /* On some targets, the GFX3D RCG may need to divide PLL frequency */
892 cfg |= ((2 * cgfx->div) - 1) << CFG_SRC_DIV_SHIFT;
894 ret = regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, cfg);
898 return update_config(rcg);
901 static int clk_gfx3d_set_rate(struct clk_hw *hw, unsigned long rate,
902 unsigned long parent_rate)
905 * We should never get here; clk_gfx3d_determine_rate() should always
906 * make us use a different parent than what we're currently using, so
907 * clk_gfx3d_set_rate_and_parent() should always be called.
912 const struct clk_ops clk_gfx3d_ops = {
913 .is_enabled = clk_rcg2_is_enabled,
914 .get_parent = clk_rcg2_get_parent,
915 .set_parent = clk_rcg2_set_parent,
916 .recalc_rate = clk_rcg2_recalc_rate,
917 .set_rate = clk_gfx3d_set_rate,
918 .set_rate_and_parent = clk_gfx3d_set_rate_and_parent,
919 .determine_rate = clk_gfx3d_determine_rate,
921 EXPORT_SYMBOL_GPL(clk_gfx3d_ops);
923 static int clk_rcg2_set_force_enable(struct clk_hw *hw)
925 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
926 const char *name = clk_hw_get_name(hw);
929 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
930 CMD_ROOT_EN, CMD_ROOT_EN);
934 /* wait for RCG to turn ON */
935 for (count = 500; count > 0; count--) {
936 if (clk_rcg2_is_enabled(hw))
942 pr_err("%s: RCG did not turn on\n", name);
946 static int clk_rcg2_clear_force_enable(struct clk_hw *hw)
948 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
950 return regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
955 clk_rcg2_shared_force_enable_clear(struct clk_hw *hw, const struct freq_tbl *f)
957 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
960 ret = clk_rcg2_set_force_enable(hw);
964 ret = clk_rcg2_configure(rcg, f);
968 return clk_rcg2_clear_force_enable(hw);
971 static int clk_rcg2_shared_set_rate(struct clk_hw *hw, unsigned long rate,
972 unsigned long parent_rate)
974 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
975 const struct freq_tbl *f;
977 f = qcom_find_freq(rcg->freq_tbl, rate);
982 * In case clock is disabled, update the CFG, M, N and D registers
983 * and don't hit the update bit of CMD register.
985 if (!__clk_is_enabled(hw->clk))
986 return __clk_rcg2_configure(rcg, f);
988 return clk_rcg2_shared_force_enable_clear(hw, f);
991 static int clk_rcg2_shared_set_rate_and_parent(struct clk_hw *hw,
992 unsigned long rate, unsigned long parent_rate, u8 index)
994 return clk_rcg2_shared_set_rate(hw, rate, parent_rate);
997 static int clk_rcg2_shared_enable(struct clk_hw *hw)
999 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1003 * Set the update bit because required configuration has already
1004 * been written in clk_rcg2_shared_set_rate()
1006 ret = clk_rcg2_set_force_enable(hw);
1010 ret = update_config(rcg);
1014 return clk_rcg2_clear_force_enable(hw);
1017 static void clk_rcg2_shared_disable(struct clk_hw *hw)
1019 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1023 * Store current configuration as switching to safe source would clear
1024 * the SRC and DIV of CFG register
1026 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
1029 * Park the RCG at a safe configuration - sourced off of safe source.
1030 * Force enable and disable the RCG while configuring it to safeguard
1031 * against any update signal coming from the downstream clock.
1032 * The current parent is still prepared and enabled at this point, and
1033 * the safe source is always on while application processor subsystem
1034 * is online. Therefore, the RCG can safely switch its parent.
1036 clk_rcg2_set_force_enable(hw);
1038 regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
1039 rcg->safe_src_index << CFG_SRC_SEL_SHIFT);
1043 clk_rcg2_clear_force_enable(hw);
1045 /* Write back the stored configuration corresponding to current rate */
1046 regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, cfg);
1049 const struct clk_ops clk_rcg2_shared_ops = {
1050 .enable = clk_rcg2_shared_enable,
1051 .disable = clk_rcg2_shared_disable,
1052 .get_parent = clk_rcg2_get_parent,
1053 .set_parent = clk_rcg2_set_parent,
1054 .recalc_rate = clk_rcg2_recalc_rate,
1055 .determine_rate = clk_rcg2_determine_rate,
1056 .set_rate = clk_rcg2_shared_set_rate,
1057 .set_rate_and_parent = clk_rcg2_shared_set_rate_and_parent,
1059 EXPORT_SYMBOL_GPL(clk_rcg2_shared_ops);
1061 /* Common APIs to be used for DFS based RCGR */
1062 static void clk_rcg2_dfs_populate_freq(struct clk_hw *hw, unsigned int l,
1065 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1067 unsigned long prate = 0;
1068 u32 val, mask, cfg, mode, src;
1071 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_DFSR(l), &cfg);
1073 mask = BIT(rcg->hid_width) - 1;
1076 f->pre_div = cfg & mask;
1078 src = cfg & CFG_SRC_SEL_MASK;
1079 src >>= CFG_SRC_SEL_SHIFT;
1081 num_parents = clk_hw_get_num_parents(hw);
1082 for (i = 0; i < num_parents; i++) {
1083 if (src == rcg->parent_map[i].cfg) {
1084 f->src = rcg->parent_map[i].src;
1085 p = clk_hw_get_parent_by_index(&rcg->clkr.hw, i);
1086 prate = clk_hw_get_rate(p);
1090 mode = cfg & CFG_MODE_MASK;
1091 mode >>= CFG_MODE_SHIFT;
1093 mask = BIT(rcg->mnd_width) - 1;
1094 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_M_DFSR(l),
1099 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_N_DFSR(l),
1107 f->freq = calc_rate(prate, f->m, f->n, mode, f->pre_div);
1110 static int clk_rcg2_dfs_populate_freq_table(struct clk_rcg2 *rcg)
1112 struct freq_tbl *freq_tbl;
1115 /* Allocate space for 1 extra since table is NULL terminated */
1116 freq_tbl = kcalloc(MAX_PERF_LEVEL + 1, sizeof(*freq_tbl), GFP_KERNEL);
1119 rcg->freq_tbl = freq_tbl;
1121 for (i = 0; i < MAX_PERF_LEVEL; i++)
1122 clk_rcg2_dfs_populate_freq(&rcg->clkr.hw, i, freq_tbl + i);
1127 static int clk_rcg2_dfs_determine_rate(struct clk_hw *hw,
1128 struct clk_rate_request *req)
1130 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1133 if (!rcg->freq_tbl) {
1134 ret = clk_rcg2_dfs_populate_freq_table(rcg);
1136 pr_err("Failed to update DFS tables for %s\n",
1137 clk_hw_get_name(hw));
1142 return clk_rcg2_determine_rate(hw, req);
1145 static unsigned long
1146 clk_rcg2_dfs_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
1148 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1149 u32 level, mask, cfg, m = 0, n = 0, mode, pre_div;
1151 regmap_read(rcg->clkr.regmap,
1152 rcg->cmd_rcgr + SE_CMD_DFSR_OFFSET, &level);
1153 level &= GENMASK(4, 1);
1157 return rcg->freq_tbl[level].freq;
1160 * Assume that parent_rate is actually the parent because
1161 * we can't do any better at figuring it out when the table
1162 * hasn't been populated yet. We only populate the table
1163 * in determine_rate because we can't guarantee the parents
1164 * will be registered with the framework until then.
1166 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_DFSR(level),
1169 mask = BIT(rcg->hid_width) - 1;
1172 pre_div = cfg & mask;
1174 mode = cfg & CFG_MODE_MASK;
1175 mode >>= CFG_MODE_SHIFT;
1177 mask = BIT(rcg->mnd_width) - 1;
1178 regmap_read(rcg->clkr.regmap,
1179 rcg->cmd_rcgr + SE_PERF_M_DFSR(level), &m);
1182 regmap_read(rcg->clkr.regmap,
1183 rcg->cmd_rcgr + SE_PERF_N_DFSR(level), &n);
1189 return calc_rate(parent_rate, m, n, mode, pre_div);
1192 static const struct clk_ops clk_rcg2_dfs_ops = {
1193 .is_enabled = clk_rcg2_is_enabled,
1194 .get_parent = clk_rcg2_get_parent,
1195 .determine_rate = clk_rcg2_dfs_determine_rate,
1196 .recalc_rate = clk_rcg2_dfs_recalc_rate,
1199 static int clk_rcg2_enable_dfs(const struct clk_rcg_dfs_data *data,
1200 struct regmap *regmap)
1202 struct clk_rcg2 *rcg = data->rcg;
1203 struct clk_init_data *init = data->init;
1207 ret = regmap_read(regmap, rcg->cmd_rcgr + SE_CMD_DFSR_OFFSET, &val);
1211 if (!(val & SE_CMD_DFS_EN))
1215 * Rate changes with consumer writing a register in
1216 * their own I/O region
1218 init->flags |= CLK_GET_RATE_NOCACHE;
1219 init->ops = &clk_rcg2_dfs_ops;
1221 rcg->freq_tbl = NULL;
1226 int qcom_cc_register_rcg_dfs(struct regmap *regmap,
1227 const struct clk_rcg_dfs_data *rcgs, size_t len)
1231 for (i = 0; i < len; i++) {
1232 ret = clk_rcg2_enable_dfs(&rcgs[i], regmap);
1239 EXPORT_SYMBOL_GPL(qcom_cc_register_rcg_dfs);
1241 static int clk_rcg2_dp_set_rate(struct clk_hw *hw, unsigned long rate,
1242 unsigned long parent_rate)
1244 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1245 struct freq_tbl f = { 0 };
1246 u32 mask = BIT(rcg->hid_width) - 1;
1248 int i, num_parents = clk_hw_get_num_parents(hw);
1249 unsigned long num, den;
1251 rational_best_approximation(parent_rate, rate,
1252 GENMASK(rcg->mnd_width - 1, 0),
1253 GENMASK(rcg->mnd_width - 1, 0), &den, &num);
1258 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
1260 cfg &= CFG_SRC_SEL_MASK;
1261 cfg >>= CFG_SRC_SEL_SHIFT;
1263 for (i = 0; i < num_parents; i++) {
1264 if (cfg == rcg->parent_map[i].cfg) {
1265 f.src = rcg->parent_map[i].src;
1270 f.pre_div = hid_div;
1271 f.pre_div >>= CFG_SRC_DIV_SHIFT;
1282 return clk_rcg2_configure(rcg, &f);
1285 static int clk_rcg2_dp_set_rate_and_parent(struct clk_hw *hw,
1286 unsigned long rate, unsigned long parent_rate, u8 index)
1288 return clk_rcg2_dp_set_rate(hw, rate, parent_rate);
1291 static int clk_rcg2_dp_determine_rate(struct clk_hw *hw,
1292 struct clk_rate_request *req)
1294 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1295 unsigned long num, den;
1298 /* Parent rate is a fixed phy link rate */
1299 rational_best_approximation(req->best_parent_rate, req->rate,
1300 GENMASK(rcg->mnd_width - 1, 0),
1301 GENMASK(rcg->mnd_width - 1, 0), &den, &num);
1306 tmp = req->best_parent_rate * num;
1313 const struct clk_ops clk_dp_ops = {
1314 .is_enabled = clk_rcg2_is_enabled,
1315 .get_parent = clk_rcg2_get_parent,
1316 .set_parent = clk_rcg2_set_parent,
1317 .recalc_rate = clk_rcg2_recalc_rate,
1318 .set_rate = clk_rcg2_dp_set_rate,
1319 .set_rate_and_parent = clk_rcg2_dp_set_rate_and_parent,
1320 .determine_rate = clk_rcg2_dp_determine_rate,
1322 EXPORT_SYMBOL_GPL(clk_dp_ops);