1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (c) 2013, 2018, The Linux Foundation. All rights reserved.
6 #include <linux/kernel.h>
7 #include <linux/bitops.h>
10 #include <linux/export.h>
11 #include <linux/clk-provider.h>
12 #include <linux/delay.h>
13 #include <linux/rational.h>
14 #include <linux/regmap.h>
15 #include <linux/math64.h>
16 #include <linux/minmax.h>
17 #include <linux/slab.h>
19 #include <asm/div64.h>
25 #define CMD_UPDATE BIT(0)
26 #define CMD_ROOT_EN BIT(1)
27 #define CMD_DIRTY_CFG BIT(4)
28 #define CMD_DIRTY_N BIT(5)
29 #define CMD_DIRTY_M BIT(6)
30 #define CMD_DIRTY_D BIT(7)
31 #define CMD_ROOT_OFF BIT(31)
34 #define CFG_SRC_DIV_SHIFT 0
35 #define CFG_SRC_SEL_SHIFT 8
36 #define CFG_SRC_SEL_MASK (0x7 << CFG_SRC_SEL_SHIFT)
37 #define CFG_MODE_SHIFT 12
38 #define CFG_MODE_MASK (0x3 << CFG_MODE_SHIFT)
39 #define CFG_MODE_DUAL_EDGE (0x2 << CFG_MODE_SHIFT)
40 #define CFG_HW_CLK_CTRL_MASK BIT(20)
46 #define RCG_CFG_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + CFG_REG)
47 #define RCG_M_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + M_REG)
48 #define RCG_N_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + N_REG)
49 #define RCG_D_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + D_REG)
51 /* Dynamic Frequency Scaling */
52 #define MAX_PERF_LEVEL 8
53 #define SE_CMD_DFSR_OFFSET 0x14
54 #define SE_CMD_DFS_EN BIT(0)
55 #define SE_PERF_DFSR(level) (0x1c + 0x4 * (level))
56 #define SE_PERF_M_DFSR(level) (0x5c + 0x4 * (level))
57 #define SE_PERF_N_DFSR(level) (0x9c + 0x4 * (level))
64 static int clk_rcg2_is_enabled(struct clk_hw
*hw
)
66 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
70 ret
= regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CMD_REG
, &cmd
);
74 return (cmd
& CMD_ROOT_OFF
) == 0;
77 static u8
__clk_rcg2_get_parent(struct clk_hw
*hw
, u32 cfg
)
79 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
80 int num_parents
= clk_hw_get_num_parents(hw
);
83 cfg
&= CFG_SRC_SEL_MASK
;
84 cfg
>>= CFG_SRC_SEL_SHIFT
;
86 for (i
= 0; i
< num_parents
; i
++)
87 if (cfg
== rcg
->parent_map
[i
].cfg
)
90 pr_debug("%s: Clock %s has invalid parent, using default.\n",
91 __func__
, clk_hw_get_name(hw
));
95 static u8
clk_rcg2_get_parent(struct clk_hw
*hw
)
97 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
101 ret
= regmap_read(rcg
->clkr
.regmap
, RCG_CFG_OFFSET(rcg
), &cfg
);
103 pr_debug("%s: Unable to read CFG register for %s\n",
104 __func__
, clk_hw_get_name(hw
));
108 return __clk_rcg2_get_parent(hw
, cfg
);
111 static int update_config(struct clk_rcg2
*rcg
)
115 struct clk_hw
*hw
= &rcg
->clkr
.hw
;
116 const char *name
= clk_hw_get_name(hw
);
118 ret
= regmap_update_bits(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CMD_REG
,
119 CMD_UPDATE
, CMD_UPDATE
);
123 /* Wait for update to take effect */
124 for (count
= 500; count
> 0; count
--) {
125 ret
= regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CMD_REG
, &cmd
);
128 if (!(cmd
& CMD_UPDATE
))
133 WARN(1, "%s: rcg didn't update its configuration.", name
);
137 static int clk_rcg2_set_parent(struct clk_hw
*hw
, u8 index
)
139 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
141 u32 cfg
= rcg
->parent_map
[index
].cfg
<< CFG_SRC_SEL_SHIFT
;
143 ret
= regmap_update_bits(rcg
->clkr
.regmap
, RCG_CFG_OFFSET(rcg
),
144 CFG_SRC_SEL_MASK
, cfg
);
148 return update_config(rcg
);
152 * Calculate m/n:d rate
155 * rate = ----------- x ---
159 calc_rate(unsigned long rate
, u32 m
, u32 n
, u32 mode
, u32 hid_div
)
162 rate
= mult_frac(rate
, 2, hid_div
+ 1);
165 rate
= mult_frac(rate
, m
, n
);
171 __clk_rcg2_recalc_rate(struct clk_hw
*hw
, unsigned long parent_rate
, u32 cfg
)
173 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
174 u32 hid_div
, m
= 0, n
= 0, mode
= 0, mask
;
176 if (rcg
->mnd_width
) {
177 mask
= BIT(rcg
->mnd_width
) - 1;
178 regmap_read(rcg
->clkr
.regmap
, RCG_M_OFFSET(rcg
), &m
);
180 regmap_read(rcg
->clkr
.regmap
, RCG_N_OFFSET(rcg
), &n
);
184 mode
= cfg
& CFG_MODE_MASK
;
185 mode
>>= CFG_MODE_SHIFT
;
188 mask
= BIT(rcg
->hid_width
) - 1;
189 hid_div
= cfg
>> CFG_SRC_DIV_SHIFT
;
192 return calc_rate(parent_rate
, m
, n
, mode
, hid_div
);
196 clk_rcg2_recalc_rate(struct clk_hw
*hw
, unsigned long parent_rate
)
198 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
201 regmap_read(rcg
->clkr
.regmap
, RCG_CFG_OFFSET(rcg
), &cfg
);
203 return __clk_rcg2_recalc_rate(hw
, parent_rate
, cfg
);
206 static int _freq_tbl_determine_rate(struct clk_hw
*hw
, const struct freq_tbl
*f
,
207 struct clk_rate_request
*req
,
208 enum freq_policy policy
)
210 unsigned long clk_flags
, rate
= req
->rate
;
212 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
217 f
= qcom_find_freq_floor(f
, rate
);
220 f
= qcom_find_freq(f
, rate
);
229 index
= qcom_find_src_index(hw
, rcg
->parent_map
, f
->src
);
233 clk_flags
= clk_hw_get_flags(hw
);
234 p
= clk_hw_get_parent_by_index(hw
, index
);
238 if (clk_flags
& CLK_SET_RATE_PARENT
) {
244 rate
*= f
->pre_div
+ 1;
254 rate
= clk_hw_get_rate(p
);
256 req
->best_parent_hw
= p
;
257 req
->best_parent_rate
= rate
;
263 static const struct freq_conf
*
264 __clk_rcg2_select_conf(struct clk_hw
*hw
, const struct freq_multi_tbl
*f
,
265 unsigned long req_rate
)
267 unsigned long rate_diff
, best_rate_diff
= ULONG_MAX
;
268 const struct freq_conf
*conf
, *best_conf
= NULL
;
269 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
270 const char *name
= clk_hw_get_name(hw
);
271 unsigned long parent_rate
, rate
;
275 /* Exit early if only one config is defined */
276 if (f
->num_confs
== 1) {
277 best_conf
= f
->confs
;
281 /* Search in each provided config the one that is near the wanted rate */
282 for (i
= 0, conf
= f
->confs
; i
< f
->num_confs
; i
++, conf
++) {
283 index
= qcom_find_src_index(hw
, rcg
->parent_map
, conf
->src
);
287 p
= clk_hw_get_parent_by_index(hw
, index
);
291 parent_rate
= clk_hw_get_rate(p
);
292 rate
= calc_rate(parent_rate
, conf
->n
, conf
->m
, conf
->n
, conf
->pre_div
);
294 if (rate
== req_rate
) {
299 rate_diff
= abs_diff(req_rate
, rate
);
300 if (rate_diff
< best_rate_diff
) {
301 best_rate_diff
= rate_diff
;
307 * Very unlikely. Warn if we couldn't find a correct config
308 * due to parent not found in every config.
310 if (unlikely(!best_conf
)) {
311 WARN(1, "%s: can't find a configuration for rate %lu\n",
313 return ERR_PTR(-EINVAL
);
320 static int _freq_tbl_fm_determine_rate(struct clk_hw
*hw
, const struct freq_multi_tbl
*f
,
321 struct clk_rate_request
*req
)
323 unsigned long clk_flags
, rate
= req
->rate
;
324 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
325 const struct freq_conf
*conf
;
329 f
= qcom_find_freq_multi(f
, rate
);
333 conf
= __clk_rcg2_select_conf(hw
, f
, rate
);
335 return PTR_ERR(conf
);
336 index
= qcom_find_src_index(hw
, rcg
->parent_map
, conf
->src
);
340 clk_flags
= clk_hw_get_flags(hw
);
341 p
= clk_hw_get_parent_by_index(hw
, index
);
345 if (clk_flags
& CLK_SET_RATE_PARENT
) {
351 rate
*= conf
->pre_div
+ 1;
358 do_div(tmp
, conf
->m
);
362 rate
= clk_hw_get_rate(p
);
365 req
->best_parent_hw
= p
;
366 req
->best_parent_rate
= rate
;
372 static int clk_rcg2_determine_rate(struct clk_hw
*hw
,
373 struct clk_rate_request
*req
)
375 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
377 return _freq_tbl_determine_rate(hw
, rcg
->freq_tbl
, req
, CEIL
);
380 static int clk_rcg2_determine_floor_rate(struct clk_hw
*hw
,
381 struct clk_rate_request
*req
)
383 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
385 return _freq_tbl_determine_rate(hw
, rcg
->freq_tbl
, req
, FLOOR
);
388 static int clk_rcg2_fm_determine_rate(struct clk_hw
*hw
,
389 struct clk_rate_request
*req
)
391 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
393 return _freq_tbl_fm_determine_rate(hw
, rcg
->freq_multi_tbl
, req
);
396 static int __clk_rcg2_configure(struct clk_rcg2
*rcg
, const struct freq_tbl
*f
,
399 u32 cfg
, mask
, d_val
, not2d_val
, n_minus_m
;
400 struct clk_hw
*hw
= &rcg
->clkr
.hw
;
401 int ret
, index
= qcom_find_src_index(hw
, rcg
->parent_map
, f
->src
);
406 if (rcg
->mnd_width
&& f
->n
) {
407 mask
= BIT(rcg
->mnd_width
) - 1;
408 ret
= regmap_update_bits(rcg
->clkr
.regmap
,
409 RCG_M_OFFSET(rcg
), mask
, f
->m
);
413 ret
= regmap_update_bits(rcg
->clkr
.regmap
,
414 RCG_N_OFFSET(rcg
), mask
, ~(f
->n
- f
->m
));
418 /* Calculate 2d value */
421 n_minus_m
= f
->n
- f
->m
;
424 d_val
= clamp_t(u32
, d_val
, f
->m
, n_minus_m
);
425 not2d_val
= ~d_val
& mask
;
427 ret
= regmap_update_bits(rcg
->clkr
.regmap
,
428 RCG_D_OFFSET(rcg
), mask
, not2d_val
);
433 mask
= BIT(rcg
->hid_width
) - 1;
434 mask
|= CFG_SRC_SEL_MASK
| CFG_MODE_MASK
| CFG_HW_CLK_CTRL_MASK
;
435 cfg
= f
->pre_div
<< CFG_SRC_DIV_SHIFT
;
436 cfg
|= rcg
->parent_map
[index
].cfg
<< CFG_SRC_SEL_SHIFT
;
437 if (rcg
->mnd_width
&& f
->n
&& (f
->m
!= f
->n
))
438 cfg
|= CFG_MODE_DUAL_EDGE
;
439 if (rcg
->hw_clk_ctrl
)
440 cfg
|= CFG_HW_CLK_CTRL_MASK
;
448 static int clk_rcg2_configure(struct clk_rcg2
*rcg
, const struct freq_tbl
*f
)
453 ret
= regmap_read(rcg
->clkr
.regmap
, RCG_CFG_OFFSET(rcg
), &cfg
);
457 ret
= __clk_rcg2_configure(rcg
, f
, &cfg
);
461 ret
= regmap_write(rcg
->clkr
.regmap
, RCG_CFG_OFFSET(rcg
), cfg
);
465 return update_config(rcg
);
468 static int __clk_rcg2_set_rate(struct clk_hw
*hw
, unsigned long rate
,
469 enum freq_policy policy
)
471 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
472 const struct freq_tbl
*f
;
476 f
= qcom_find_freq_floor(rcg
->freq_tbl
, rate
);
479 f
= qcom_find_freq(rcg
->freq_tbl
, rate
);
488 return clk_rcg2_configure(rcg
, f
);
491 static int __clk_rcg2_fm_set_rate(struct clk_hw
*hw
, unsigned long rate
)
493 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
494 const struct freq_multi_tbl
*f
;
495 const struct freq_conf
*conf
;
496 struct freq_tbl f_tbl
= {};
498 f
= qcom_find_freq_multi(rcg
->freq_multi_tbl
, rate
);
502 conf
= __clk_rcg2_select_conf(hw
, f
, rate
);
504 return PTR_ERR(conf
);
506 f_tbl
.freq
= f
->freq
;
507 f_tbl
.src
= conf
->src
;
508 f_tbl
.pre_div
= conf
->pre_div
;
512 return clk_rcg2_configure(rcg
, &f_tbl
);
515 static int clk_rcg2_set_rate(struct clk_hw
*hw
, unsigned long rate
,
516 unsigned long parent_rate
)
518 return __clk_rcg2_set_rate(hw
, rate
, CEIL
);
521 static int clk_rcg2_set_floor_rate(struct clk_hw
*hw
, unsigned long rate
,
522 unsigned long parent_rate
)
524 return __clk_rcg2_set_rate(hw
, rate
, FLOOR
);
527 static int clk_rcg2_fm_set_rate(struct clk_hw
*hw
, unsigned long rate
,
528 unsigned long parent_rate
)
530 return __clk_rcg2_fm_set_rate(hw
, rate
);
533 static int clk_rcg2_set_rate_and_parent(struct clk_hw
*hw
,
534 unsigned long rate
, unsigned long parent_rate
, u8 index
)
536 return __clk_rcg2_set_rate(hw
, rate
, CEIL
);
539 static int clk_rcg2_set_floor_rate_and_parent(struct clk_hw
*hw
,
540 unsigned long rate
, unsigned long parent_rate
, u8 index
)
542 return __clk_rcg2_set_rate(hw
, rate
, FLOOR
);
545 static int clk_rcg2_fm_set_rate_and_parent(struct clk_hw
*hw
,
546 unsigned long rate
, unsigned long parent_rate
, u8 index
)
548 return __clk_rcg2_fm_set_rate(hw
, rate
);
551 static int clk_rcg2_get_duty_cycle(struct clk_hw
*hw
, struct clk_duty
*duty
)
553 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
554 u32 notn_m
, n
, m
, d
, not2d
, mask
;
556 if (!rcg
->mnd_width
) {
557 /* 50 % duty-cycle for Non-MND RCGs */
563 regmap_read(rcg
->clkr
.regmap
, RCG_D_OFFSET(rcg
), ¬2d
);
564 regmap_read(rcg
->clkr
.regmap
, RCG_M_OFFSET(rcg
), &m
);
565 regmap_read(rcg
->clkr
.regmap
, RCG_N_OFFSET(rcg
), ¬n_m
);
567 if (!not2d
&& !m
&& !notn_m
) {
568 /* 50 % duty-cycle always */
574 mask
= BIT(rcg
->mnd_width
) - 1;
577 d
= DIV_ROUND_CLOSEST(d
, 2);
579 n
= (~(notn_m
) + m
) & mask
;
587 static int clk_rcg2_set_duty_cycle(struct clk_hw
*hw
, struct clk_duty
*duty
)
589 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
590 u32 notn_m
, n
, m
, d
, not2d
, mask
, duty_per
, cfg
;
593 /* Duty-cycle cannot be modified for non-MND RCGs */
597 mask
= BIT(rcg
->mnd_width
) - 1;
599 regmap_read(rcg
->clkr
.regmap
, RCG_N_OFFSET(rcg
), ¬n_m
);
600 regmap_read(rcg
->clkr
.regmap
, RCG_M_OFFSET(rcg
), &m
);
601 regmap_read(rcg
->clkr
.regmap
, RCG_CFG_OFFSET(rcg
), &cfg
);
603 /* Duty-cycle cannot be modified if MND divider is in bypass mode. */
604 if (!(cfg
& CFG_MODE_MASK
))
607 n
= (~(notn_m
) + m
) & mask
;
609 duty_per
= (duty
->num
* 100) / duty
->den
;
611 /* Calculate 2d value */
612 d
= DIV_ROUND_CLOSEST(n
* duty_per
* 2, 100);
615 * Check bit widths of 2d. If D is too big reduce duty cycle.
616 * Also make sure it is never zero.
618 d
= clamp_val(d
, 1, mask
);
620 if ((d
/ 2) > (n
- m
))
622 else if ((d
/ 2) < (m
/ 2))
627 ret
= regmap_update_bits(rcg
->clkr
.regmap
, RCG_D_OFFSET(rcg
), mask
,
632 return update_config(rcg
);
635 const struct clk_ops clk_rcg2_ops
= {
636 .is_enabled
= clk_rcg2_is_enabled
,
637 .get_parent
= clk_rcg2_get_parent
,
638 .set_parent
= clk_rcg2_set_parent
,
639 .recalc_rate
= clk_rcg2_recalc_rate
,
640 .determine_rate
= clk_rcg2_determine_rate
,
641 .set_rate
= clk_rcg2_set_rate
,
642 .set_rate_and_parent
= clk_rcg2_set_rate_and_parent
,
643 .get_duty_cycle
= clk_rcg2_get_duty_cycle
,
644 .set_duty_cycle
= clk_rcg2_set_duty_cycle
,
646 EXPORT_SYMBOL_GPL(clk_rcg2_ops
);
648 const struct clk_ops clk_rcg2_floor_ops
= {
649 .is_enabled
= clk_rcg2_is_enabled
,
650 .get_parent
= clk_rcg2_get_parent
,
651 .set_parent
= clk_rcg2_set_parent
,
652 .recalc_rate
= clk_rcg2_recalc_rate
,
653 .determine_rate
= clk_rcg2_determine_floor_rate
,
654 .set_rate
= clk_rcg2_set_floor_rate
,
655 .set_rate_and_parent
= clk_rcg2_set_floor_rate_and_parent
,
656 .get_duty_cycle
= clk_rcg2_get_duty_cycle
,
657 .set_duty_cycle
= clk_rcg2_set_duty_cycle
,
659 EXPORT_SYMBOL_GPL(clk_rcg2_floor_ops
);
661 const struct clk_ops clk_rcg2_fm_ops
= {
662 .is_enabled
= clk_rcg2_is_enabled
,
663 .get_parent
= clk_rcg2_get_parent
,
664 .set_parent
= clk_rcg2_set_parent
,
665 .recalc_rate
= clk_rcg2_recalc_rate
,
666 .determine_rate
= clk_rcg2_fm_determine_rate
,
667 .set_rate
= clk_rcg2_fm_set_rate
,
668 .set_rate_and_parent
= clk_rcg2_fm_set_rate_and_parent
,
669 .get_duty_cycle
= clk_rcg2_get_duty_cycle
,
670 .set_duty_cycle
= clk_rcg2_set_duty_cycle
,
672 EXPORT_SYMBOL_GPL(clk_rcg2_fm_ops
);
674 const struct clk_ops clk_rcg2_mux_closest_ops
= {
675 .determine_rate
= __clk_mux_determine_rate_closest
,
676 .get_parent
= clk_rcg2_get_parent
,
677 .set_parent
= clk_rcg2_set_parent
,
679 EXPORT_SYMBOL_GPL(clk_rcg2_mux_closest_ops
);
686 static const struct frac_entry frac_table_675m
[] = { /* link rate of 270M */
687 { 52, 295 }, /* 119 M */
688 { 11, 57 }, /* 130.25 M */
689 { 63, 307 }, /* 138.50 M */
690 { 11, 50 }, /* 148.50 M */
691 { 47, 206 }, /* 154 M */
692 { 31, 100 }, /* 205.25 M */
693 { 107, 269 }, /* 268.50 M */
697 static struct frac_entry frac_table_810m
[] = { /* Link rate of 162M */
698 { 31, 211 }, /* 119 M */
699 { 32, 199 }, /* 130.25 M */
700 { 63, 307 }, /* 138.50 M */
701 { 11, 60 }, /* 148.50 M */
702 { 50, 263 }, /* 154 M */
703 { 31, 120 }, /* 205.25 M */
704 { 119, 359 }, /* 268.50 M */
708 static int clk_edp_pixel_set_rate(struct clk_hw
*hw
, unsigned long rate
,
709 unsigned long parent_rate
)
711 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
712 struct freq_tbl f
= *rcg
->freq_tbl
;
713 const struct frac_entry
*frac
;
715 s64 src_rate
= parent_rate
;
717 u32 mask
= BIT(rcg
->hid_width
) - 1;
720 if (src_rate
== 810000000)
721 frac
= frac_table_810m
;
723 frac
= frac_table_675m
;
725 for (; frac
->num
; frac
++) {
727 request
*= frac
->den
;
728 request
= div_s64(request
, frac
->num
);
729 if ((src_rate
< (request
- delta
)) ||
730 (src_rate
> (request
+ delta
)))
733 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
,
736 f
.pre_div
>>= CFG_SRC_DIV_SHIFT
;
741 return clk_rcg2_configure(rcg
, &f
);
747 static int clk_edp_pixel_set_rate_and_parent(struct clk_hw
*hw
,
748 unsigned long rate
, unsigned long parent_rate
, u8 index
)
750 /* Parent index is set statically in frequency table */
751 return clk_edp_pixel_set_rate(hw
, rate
, parent_rate
);
754 static int clk_edp_pixel_determine_rate(struct clk_hw
*hw
,
755 struct clk_rate_request
*req
)
757 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
758 const struct freq_tbl
*f
= rcg
->freq_tbl
;
759 const struct frac_entry
*frac
;
762 u32 mask
= BIT(rcg
->hid_width
) - 1;
764 int index
= qcom_find_src_index(hw
, rcg
->parent_map
, f
->src
);
766 /* Force the correct parent */
767 req
->best_parent_hw
= clk_hw_get_parent_by_index(hw
, index
);
768 req
->best_parent_rate
= clk_hw_get_rate(req
->best_parent_hw
);
770 if (req
->best_parent_rate
== 810000000)
771 frac
= frac_table_810m
;
773 frac
= frac_table_675m
;
775 for (; frac
->num
; frac
++) {
777 request
*= frac
->den
;
778 request
= div_s64(request
, frac
->num
);
779 if ((req
->best_parent_rate
< (request
- delta
)) ||
780 (req
->best_parent_rate
> (request
+ delta
)))
783 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
,
785 hid_div
>>= CFG_SRC_DIV_SHIFT
;
788 req
->rate
= calc_rate(req
->best_parent_rate
,
789 frac
->num
, frac
->den
,
790 !!frac
->den
, hid_div
);
797 const struct clk_ops clk_edp_pixel_ops
= {
798 .is_enabled
= clk_rcg2_is_enabled
,
799 .get_parent
= clk_rcg2_get_parent
,
800 .set_parent
= clk_rcg2_set_parent
,
801 .recalc_rate
= clk_rcg2_recalc_rate
,
802 .set_rate
= clk_edp_pixel_set_rate
,
803 .set_rate_and_parent
= clk_edp_pixel_set_rate_and_parent
,
804 .determine_rate
= clk_edp_pixel_determine_rate
,
806 EXPORT_SYMBOL_GPL(clk_edp_pixel_ops
);
808 static int clk_byte_determine_rate(struct clk_hw
*hw
,
809 struct clk_rate_request
*req
)
811 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
812 const struct freq_tbl
*f
= rcg
->freq_tbl
;
813 int index
= qcom_find_src_index(hw
, rcg
->parent_map
, f
->src
);
814 unsigned long parent_rate
, div
;
815 u32 mask
= BIT(rcg
->hid_width
) - 1;
821 req
->best_parent_hw
= p
= clk_hw_get_parent_by_index(hw
, index
);
822 req
->best_parent_rate
= parent_rate
= clk_hw_round_rate(p
, req
->rate
);
824 div
= DIV_ROUND_UP((2 * parent_rate
), req
->rate
) - 1;
825 div
= min_t(u32
, div
, mask
);
827 req
->rate
= calc_rate(parent_rate
, 0, 0, 0, div
);
832 static int clk_byte_set_rate(struct clk_hw
*hw
, unsigned long rate
,
833 unsigned long parent_rate
)
835 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
836 struct freq_tbl f
= *rcg
->freq_tbl
;
838 u32 mask
= BIT(rcg
->hid_width
) - 1;
840 div
= DIV_ROUND_UP((2 * parent_rate
), rate
) - 1;
841 div
= min_t(u32
, div
, mask
);
845 return clk_rcg2_configure(rcg
, &f
);
848 static int clk_byte_set_rate_and_parent(struct clk_hw
*hw
,
849 unsigned long rate
, unsigned long parent_rate
, u8 index
)
851 /* Parent index is set statically in frequency table */
852 return clk_byte_set_rate(hw
, rate
, parent_rate
);
855 const struct clk_ops clk_byte_ops
= {
856 .is_enabled
= clk_rcg2_is_enabled
,
857 .get_parent
= clk_rcg2_get_parent
,
858 .set_parent
= clk_rcg2_set_parent
,
859 .recalc_rate
= clk_rcg2_recalc_rate
,
860 .set_rate
= clk_byte_set_rate
,
861 .set_rate_and_parent
= clk_byte_set_rate_and_parent
,
862 .determine_rate
= clk_byte_determine_rate
,
864 EXPORT_SYMBOL_GPL(clk_byte_ops
);
866 static int clk_byte2_determine_rate(struct clk_hw
*hw
,
867 struct clk_rate_request
*req
)
869 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
870 unsigned long parent_rate
, div
;
871 u32 mask
= BIT(rcg
->hid_width
) - 1;
873 unsigned long rate
= req
->rate
;
878 p
= req
->best_parent_hw
;
879 req
->best_parent_rate
= parent_rate
= clk_hw_round_rate(p
, rate
);
881 div
= DIV_ROUND_UP((2 * parent_rate
), rate
) - 1;
882 div
= min_t(u32
, div
, mask
);
884 req
->rate
= calc_rate(parent_rate
, 0, 0, 0, div
);
889 static int clk_byte2_set_rate(struct clk_hw
*hw
, unsigned long rate
,
890 unsigned long parent_rate
)
892 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
893 struct freq_tbl f
= { 0 };
895 int i
, num_parents
= clk_hw_get_num_parents(hw
);
896 u32 mask
= BIT(rcg
->hid_width
) - 1;
899 div
= DIV_ROUND_UP((2 * parent_rate
), rate
) - 1;
900 div
= min_t(u32
, div
, mask
);
904 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, &cfg
);
905 cfg
&= CFG_SRC_SEL_MASK
;
906 cfg
>>= CFG_SRC_SEL_SHIFT
;
908 for (i
= 0; i
< num_parents
; i
++) {
909 if (cfg
== rcg
->parent_map
[i
].cfg
) {
910 f
.src
= rcg
->parent_map
[i
].src
;
911 return clk_rcg2_configure(rcg
, &f
);
918 static int clk_byte2_set_rate_and_parent(struct clk_hw
*hw
,
919 unsigned long rate
, unsigned long parent_rate
, u8 index
)
921 /* Read the hardware to determine parent during set_rate */
922 return clk_byte2_set_rate(hw
, rate
, parent_rate
);
925 const struct clk_ops clk_byte2_ops
= {
926 .is_enabled
= clk_rcg2_is_enabled
,
927 .get_parent
= clk_rcg2_get_parent
,
928 .set_parent
= clk_rcg2_set_parent
,
929 .recalc_rate
= clk_rcg2_recalc_rate
,
930 .set_rate
= clk_byte2_set_rate
,
931 .set_rate_and_parent
= clk_byte2_set_rate_and_parent
,
932 .determine_rate
= clk_byte2_determine_rate
,
934 EXPORT_SYMBOL_GPL(clk_byte2_ops
);
936 static const struct frac_entry frac_table_pixel
[] = {
945 static int clk_pixel_determine_rate(struct clk_hw
*hw
,
946 struct clk_rate_request
*req
)
948 unsigned long request
, src_rate
;
950 const struct frac_entry
*frac
= frac_table_pixel
;
952 for (; frac
->num
; frac
++) {
953 request
= (req
->rate
* frac
->den
) / frac
->num
;
955 src_rate
= clk_hw_round_rate(req
->best_parent_hw
, request
);
956 if ((src_rate
< (request
- delta
)) ||
957 (src_rate
> (request
+ delta
)))
960 req
->best_parent_rate
= src_rate
;
961 req
->rate
= (src_rate
* frac
->num
) / frac
->den
;
968 static int clk_pixel_set_rate(struct clk_hw
*hw
, unsigned long rate
,
969 unsigned long parent_rate
)
971 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
972 struct freq_tbl f
= { 0 };
973 const struct frac_entry
*frac
= frac_table_pixel
;
974 unsigned long request
;
976 u32 mask
= BIT(rcg
->hid_width
) - 1;
978 int i
, num_parents
= clk_hw_get_num_parents(hw
);
980 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, &cfg
);
981 cfg
&= CFG_SRC_SEL_MASK
;
982 cfg
>>= CFG_SRC_SEL_SHIFT
;
984 for (i
= 0; i
< num_parents
; i
++)
985 if (cfg
== rcg
->parent_map
[i
].cfg
) {
986 f
.src
= rcg
->parent_map
[i
].src
;
990 for (; frac
->num
; frac
++) {
991 request
= (rate
* frac
->den
) / frac
->num
;
993 if ((parent_rate
< (request
- delta
)) ||
994 (parent_rate
> (request
+ delta
)))
997 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
,
1000 f
.pre_div
>>= CFG_SRC_DIV_SHIFT
;
1005 return clk_rcg2_configure(rcg
, &f
);
1010 static int clk_pixel_set_rate_and_parent(struct clk_hw
*hw
, unsigned long rate
,
1011 unsigned long parent_rate
, u8 index
)
1013 return clk_pixel_set_rate(hw
, rate
, parent_rate
);
1016 const struct clk_ops clk_pixel_ops
= {
1017 .is_enabled
= clk_rcg2_is_enabled
,
1018 .get_parent
= clk_rcg2_get_parent
,
1019 .set_parent
= clk_rcg2_set_parent
,
1020 .recalc_rate
= clk_rcg2_recalc_rate
,
1021 .set_rate
= clk_pixel_set_rate
,
1022 .set_rate_and_parent
= clk_pixel_set_rate_and_parent
,
1023 .determine_rate
= clk_pixel_determine_rate
,
1025 EXPORT_SYMBOL_GPL(clk_pixel_ops
);
1027 static int clk_gfx3d_determine_rate(struct clk_hw
*hw
,
1028 struct clk_rate_request
*req
)
1030 struct clk_rate_request parent_req
= { .min_rate
= 0, .max_rate
= ULONG_MAX
};
1031 struct clk_rcg2_gfx3d
*cgfx
= to_clk_rcg2_gfx3d(hw
);
1032 struct clk_hw
*xo
, *p0
, *p1
, *p2
;
1033 unsigned long p0_rate
;
1034 u8 mux_div
= cgfx
->div
;
1041 * This function does ping-pong the RCG between PLLs: if we don't
1042 * have at least one fixed PLL and two variable ones,
1043 * then it's not going to work correctly.
1045 if (WARN_ON(!p0
|| !p1
|| !p2
))
1048 xo
= clk_hw_get_parent_by_index(hw
, 0);
1049 if (req
->rate
== clk_hw_get_rate(xo
)) {
1050 req
->best_parent_hw
= xo
;
1057 parent_req
.rate
= req
->rate
* mux_div
;
1059 /* This has to be a fixed rate PLL */
1060 p0_rate
= clk_hw_get_rate(p0
);
1062 if (parent_req
.rate
== p0_rate
) {
1063 req
->rate
= req
->best_parent_rate
= p0_rate
;
1064 req
->best_parent_hw
= p0
;
1068 if (req
->best_parent_hw
== p0
) {
1069 /* Are we going back to a previously used rate? */
1070 if (clk_hw_get_rate(p2
) == parent_req
.rate
)
1071 req
->best_parent_hw
= p2
;
1073 req
->best_parent_hw
= p1
;
1074 } else if (req
->best_parent_hw
== p2
) {
1075 req
->best_parent_hw
= p1
;
1077 req
->best_parent_hw
= p2
;
1080 clk_hw_get_rate_range(req
->best_parent_hw
,
1081 &parent_req
.min_rate
, &parent_req
.max_rate
);
1083 if (req
->min_rate
> parent_req
.min_rate
)
1084 parent_req
.min_rate
= req
->min_rate
;
1086 if (req
->max_rate
< parent_req
.max_rate
)
1087 parent_req
.max_rate
= req
->max_rate
;
1089 ret
= __clk_determine_rate(req
->best_parent_hw
, &parent_req
);
1093 req
->rate
= req
->best_parent_rate
= parent_req
.rate
;
1094 req
->rate
/= mux_div
;
1099 static int clk_gfx3d_set_rate_and_parent(struct clk_hw
*hw
, unsigned long rate
,
1100 unsigned long parent_rate
, u8 index
)
1102 struct clk_rcg2_gfx3d
*cgfx
= to_clk_rcg2_gfx3d(hw
);
1103 struct clk_rcg2
*rcg
= &cgfx
->rcg
;
1107 cfg
= rcg
->parent_map
[index
].cfg
<< CFG_SRC_SEL_SHIFT
;
1108 /* On some targets, the GFX3D RCG may need to divide PLL frequency */
1110 cfg
|= ((2 * cgfx
->div
) - 1) << CFG_SRC_DIV_SHIFT
;
1112 ret
= regmap_write(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, cfg
);
1116 return update_config(rcg
);
1119 static int clk_gfx3d_set_rate(struct clk_hw
*hw
, unsigned long rate
,
1120 unsigned long parent_rate
)
1123 * We should never get here; clk_gfx3d_determine_rate() should always
1124 * make us use a different parent than what we're currently using, so
1125 * clk_gfx3d_set_rate_and_parent() should always be called.
1130 const struct clk_ops clk_gfx3d_ops
= {
1131 .is_enabled
= clk_rcg2_is_enabled
,
1132 .get_parent
= clk_rcg2_get_parent
,
1133 .set_parent
= clk_rcg2_set_parent
,
1134 .recalc_rate
= clk_rcg2_recalc_rate
,
1135 .set_rate
= clk_gfx3d_set_rate
,
1136 .set_rate_and_parent
= clk_gfx3d_set_rate_and_parent
,
1137 .determine_rate
= clk_gfx3d_determine_rate
,
1139 EXPORT_SYMBOL_GPL(clk_gfx3d_ops
);
1141 static int clk_rcg2_set_force_enable(struct clk_hw
*hw
)
1143 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
1144 const char *name
= clk_hw_get_name(hw
);
1147 ret
= regmap_update_bits(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CMD_REG
,
1148 CMD_ROOT_EN
, CMD_ROOT_EN
);
1152 /* wait for RCG to turn ON */
1153 for (count
= 500; count
> 0; count
--) {
1154 if (clk_rcg2_is_enabled(hw
))
1160 pr_err("%s: RCG did not turn on\n", name
);
1164 static int clk_rcg2_clear_force_enable(struct clk_hw
*hw
)
1166 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
1168 return regmap_update_bits(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CMD_REG
,
1173 clk_rcg2_shared_force_enable_clear(struct clk_hw
*hw
, const struct freq_tbl
*f
)
1175 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
1178 ret
= clk_rcg2_set_force_enable(hw
);
1182 ret
= clk_rcg2_configure(rcg
, f
);
1186 return clk_rcg2_clear_force_enable(hw
);
1189 static int __clk_rcg2_shared_set_rate(struct clk_hw
*hw
, unsigned long rate
,
1190 unsigned long parent_rate
,
1191 enum freq_policy policy
)
1193 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
1194 const struct freq_tbl
*f
;
1198 f
= qcom_find_freq_floor(rcg
->freq_tbl
, rate
);
1201 f
= qcom_find_freq(rcg
->freq_tbl
, rate
);
1208 * In case clock is disabled, update the M, N and D registers, cache
1209 * the CFG value in parked_cfg and don't hit the update bit of CMD
1212 if (!clk_hw_is_enabled(hw
))
1213 return __clk_rcg2_configure(rcg
, f
, &rcg
->parked_cfg
);
1215 return clk_rcg2_shared_force_enable_clear(hw
, f
);
1218 static int clk_rcg2_shared_set_rate(struct clk_hw
*hw
, unsigned long rate
,
1219 unsigned long parent_rate
)
1221 return __clk_rcg2_shared_set_rate(hw
, rate
, parent_rate
, CEIL
);
1224 static int clk_rcg2_shared_set_rate_and_parent(struct clk_hw
*hw
,
1225 unsigned long rate
, unsigned long parent_rate
, u8 index
)
1227 return __clk_rcg2_shared_set_rate(hw
, rate
, parent_rate
, CEIL
);
1230 static int clk_rcg2_shared_set_floor_rate(struct clk_hw
*hw
, unsigned long rate
,
1231 unsigned long parent_rate
)
1233 return __clk_rcg2_shared_set_rate(hw
, rate
, parent_rate
, FLOOR
);
1236 static int clk_rcg2_shared_set_floor_rate_and_parent(struct clk_hw
*hw
,
1237 unsigned long rate
, unsigned long parent_rate
, u8 index
)
1239 return __clk_rcg2_shared_set_rate(hw
, rate
, parent_rate
, FLOOR
);
1242 static int clk_rcg2_shared_enable(struct clk_hw
*hw
)
1244 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
1248 * Set the update bit because required configuration has already
1249 * been written in clk_rcg2_shared_set_rate()
1251 ret
= clk_rcg2_set_force_enable(hw
);
1255 /* Write back the stored configuration corresponding to current rate */
1256 ret
= regmap_write(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, rcg
->parked_cfg
);
1260 ret
= update_config(rcg
);
1264 return clk_rcg2_clear_force_enable(hw
);
1267 static void clk_rcg2_shared_disable(struct clk_hw
*hw
)
1269 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
1272 * Store current configuration as switching to safe source would clear
1273 * the SRC and DIV of CFG register
1275 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, &rcg
->parked_cfg
);
1278 * Park the RCG at a safe configuration - sourced off of safe source.
1279 * Force enable and disable the RCG while configuring it to safeguard
1280 * against any update signal coming from the downstream clock.
1281 * The current parent is still prepared and enabled at this point, and
1282 * the safe source is always on while application processor subsystem
1283 * is online. Therefore, the RCG can safely switch its parent.
1285 clk_rcg2_set_force_enable(hw
);
1287 regmap_write(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
,
1288 rcg
->safe_src_index
<< CFG_SRC_SEL_SHIFT
);
1292 clk_rcg2_clear_force_enable(hw
);
1295 static u8
clk_rcg2_shared_get_parent(struct clk_hw
*hw
)
1297 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
1299 /* If the shared rcg is parked use the cached cfg instead */
1300 if (!clk_hw_is_enabled(hw
))
1301 return __clk_rcg2_get_parent(hw
, rcg
->parked_cfg
);
1303 return clk_rcg2_get_parent(hw
);
1306 static int clk_rcg2_shared_set_parent(struct clk_hw
*hw
, u8 index
)
1308 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
1310 /* If the shared rcg is parked only update the cached cfg */
1311 if (!clk_hw_is_enabled(hw
)) {
1312 rcg
->parked_cfg
&= ~CFG_SRC_SEL_MASK
;
1313 rcg
->parked_cfg
|= rcg
->parent_map
[index
].cfg
<< CFG_SRC_SEL_SHIFT
;
1318 return clk_rcg2_set_parent(hw
, index
);
1321 static unsigned long
1322 clk_rcg2_shared_recalc_rate(struct clk_hw
*hw
, unsigned long parent_rate
)
1324 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
1326 /* If the shared rcg is parked use the cached cfg instead */
1327 if (!clk_hw_is_enabled(hw
))
1328 return __clk_rcg2_recalc_rate(hw
, parent_rate
, rcg
->parked_cfg
);
1330 return clk_rcg2_recalc_rate(hw
, parent_rate
);
1333 static int clk_rcg2_shared_init(struct clk_hw
*hw
)
1336 * This does a few things:
1338 * 1. Sets rcg->parked_cfg to reflect the value at probe so that the
1339 * proper parent is reported from clk_rcg2_shared_get_parent().
1341 * 2. Clears the force enable bit of the RCG because we rely on child
1342 * clks (branches) to turn the RCG on/off with a hardware feedback
1343 * mechanism and only set the force enable bit in the RCG when we
1344 * want to make sure the clk stays on for parent switches or
1347 * 3. Parks shared RCGs on the safe source at registration because we
1348 * can't be certain that the parent clk will stay on during boot,
1349 * especially if the parent is shared. If this RCG is enabled at
1350 * boot, and the parent is turned off, the RCG will get stuck on. A
1351 * GDSC can wedge if is turned on and the RCG is stuck on because
1352 * the GDSC's controller will hang waiting for the clk status to
1353 * toggle on when it never does.
1355 * The safest option here is to "park" the RCG at init so that the clk
1356 * can never get stuck on or off. This ensures the GDSC can't get
1359 clk_rcg2_shared_disable(hw
);
1364 const struct clk_ops clk_rcg2_shared_ops
= {
1365 .init
= clk_rcg2_shared_init
,
1366 .enable
= clk_rcg2_shared_enable
,
1367 .disable
= clk_rcg2_shared_disable
,
1368 .get_parent
= clk_rcg2_shared_get_parent
,
1369 .set_parent
= clk_rcg2_shared_set_parent
,
1370 .recalc_rate
= clk_rcg2_shared_recalc_rate
,
1371 .determine_rate
= clk_rcg2_determine_rate
,
1372 .set_rate
= clk_rcg2_shared_set_rate
,
1373 .set_rate_and_parent
= clk_rcg2_shared_set_rate_and_parent
,
1375 EXPORT_SYMBOL_GPL(clk_rcg2_shared_ops
);
1377 const struct clk_ops clk_rcg2_shared_floor_ops
= {
1378 .enable
= clk_rcg2_shared_enable
,
1379 .disable
= clk_rcg2_shared_disable
,
1380 .get_parent
= clk_rcg2_shared_get_parent
,
1381 .set_parent
= clk_rcg2_shared_set_parent
,
1382 .recalc_rate
= clk_rcg2_shared_recalc_rate
,
1383 .determine_rate
= clk_rcg2_determine_floor_rate
,
1384 .set_rate
= clk_rcg2_shared_set_floor_rate
,
1385 .set_rate_and_parent
= clk_rcg2_shared_set_floor_rate_and_parent
,
1387 EXPORT_SYMBOL_GPL(clk_rcg2_shared_floor_ops
);
1389 static int clk_rcg2_shared_no_init_park(struct clk_hw
*hw
)
1391 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
1394 * Read the config register so that the parent is properly mapped at
1395 * registration time.
1397 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, &rcg
->parked_cfg
);
1403 * Like clk_rcg2_shared_ops but skip the init so that the clk frequency is left
1404 * unchanged at registration time.
1406 const struct clk_ops clk_rcg2_shared_no_init_park_ops
= {
1407 .init
= clk_rcg2_shared_no_init_park
,
1408 .enable
= clk_rcg2_shared_enable
,
1409 .disable
= clk_rcg2_shared_disable
,
1410 .get_parent
= clk_rcg2_shared_get_parent
,
1411 .set_parent
= clk_rcg2_shared_set_parent
,
1412 .recalc_rate
= clk_rcg2_shared_recalc_rate
,
1413 .determine_rate
= clk_rcg2_determine_rate
,
1414 .set_rate
= clk_rcg2_shared_set_rate
,
1415 .set_rate_and_parent
= clk_rcg2_shared_set_rate_and_parent
,
1417 EXPORT_SYMBOL_GPL(clk_rcg2_shared_no_init_park_ops
);
1419 /* Common APIs to be used for DFS based RCGR */
1420 static void clk_rcg2_dfs_populate_freq(struct clk_hw
*hw
, unsigned int l
,
1423 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
1425 unsigned long prate
= 0;
1426 u32 val
, mask
, cfg
, mode
, src
;
1429 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ SE_PERF_DFSR(l
), &cfg
);
1431 mask
= BIT(rcg
->hid_width
) - 1;
1434 f
->pre_div
= cfg
& mask
;
1436 src
= cfg
& CFG_SRC_SEL_MASK
;
1437 src
>>= CFG_SRC_SEL_SHIFT
;
1439 num_parents
= clk_hw_get_num_parents(hw
);
1440 for (i
= 0; i
< num_parents
; i
++) {
1441 if (src
== rcg
->parent_map
[i
].cfg
) {
1442 f
->src
= rcg
->parent_map
[i
].src
;
1443 p
= clk_hw_get_parent_by_index(&rcg
->clkr
.hw
, i
);
1444 prate
= clk_hw_get_rate(p
);
1448 mode
= cfg
& CFG_MODE_MASK
;
1449 mode
>>= CFG_MODE_SHIFT
;
1451 mask
= BIT(rcg
->mnd_width
) - 1;
1452 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ SE_PERF_M_DFSR(l
),
1457 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ SE_PERF_N_DFSR(l
),
1465 f
->freq
= calc_rate(prate
, f
->m
, f
->n
, mode
, f
->pre_div
);
1468 static int clk_rcg2_dfs_populate_freq_table(struct clk_rcg2
*rcg
)
1470 struct freq_tbl
*freq_tbl
;
1473 /* Allocate space for 1 extra since table is NULL terminated */
1474 freq_tbl
= kcalloc(MAX_PERF_LEVEL
+ 1, sizeof(*freq_tbl
), GFP_KERNEL
);
1477 rcg
->freq_tbl
= freq_tbl
;
1479 for (i
= 0; i
< MAX_PERF_LEVEL
; i
++)
1480 clk_rcg2_dfs_populate_freq(&rcg
->clkr
.hw
, i
, freq_tbl
+ i
);
1485 static int clk_rcg2_dfs_determine_rate(struct clk_hw
*hw
,
1486 struct clk_rate_request
*req
)
1488 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
1491 if (!rcg
->freq_tbl
) {
1492 ret
= clk_rcg2_dfs_populate_freq_table(rcg
);
1494 pr_err("Failed to update DFS tables for %s\n",
1495 clk_hw_get_name(hw
));
1500 return clk_rcg2_determine_rate(hw
, req
);
1503 static unsigned long
1504 clk_rcg2_dfs_recalc_rate(struct clk_hw
*hw
, unsigned long parent_rate
)
1506 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
1507 u32 level
, mask
, cfg
, m
= 0, n
= 0, mode
, pre_div
;
1509 regmap_read(rcg
->clkr
.regmap
,
1510 rcg
->cmd_rcgr
+ SE_CMD_DFSR_OFFSET
, &level
);
1511 level
&= GENMASK(4, 1);
1515 return rcg
->freq_tbl
[level
].freq
;
1518 * Assume that parent_rate is actually the parent because
1519 * we can't do any better at figuring it out when the table
1520 * hasn't been populated yet. We only populate the table
1521 * in determine_rate because we can't guarantee the parents
1522 * will be registered with the framework until then.
1524 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ SE_PERF_DFSR(level
),
1527 mask
= BIT(rcg
->hid_width
) - 1;
1530 pre_div
= cfg
& mask
;
1532 mode
= cfg
& CFG_MODE_MASK
;
1533 mode
>>= CFG_MODE_SHIFT
;
1535 mask
= BIT(rcg
->mnd_width
) - 1;
1536 regmap_read(rcg
->clkr
.regmap
,
1537 rcg
->cmd_rcgr
+ SE_PERF_M_DFSR(level
), &m
);
1540 regmap_read(rcg
->clkr
.regmap
,
1541 rcg
->cmd_rcgr
+ SE_PERF_N_DFSR(level
), &n
);
1547 return calc_rate(parent_rate
, m
, n
, mode
, pre_div
);
1550 static const struct clk_ops clk_rcg2_dfs_ops
= {
1551 .is_enabled
= clk_rcg2_is_enabled
,
1552 .get_parent
= clk_rcg2_get_parent
,
1553 .determine_rate
= clk_rcg2_dfs_determine_rate
,
1554 .recalc_rate
= clk_rcg2_dfs_recalc_rate
,
1557 static int clk_rcg2_enable_dfs(const struct clk_rcg_dfs_data
*data
,
1558 struct regmap
*regmap
)
1560 struct clk_rcg2
*rcg
= data
->rcg
;
1561 struct clk_init_data
*init
= data
->init
;
1565 ret
= regmap_read(regmap
, rcg
->cmd_rcgr
+ SE_CMD_DFSR_OFFSET
, &val
);
1569 if (!(val
& SE_CMD_DFS_EN
))
1573 * Rate changes with consumer writing a register in
1574 * their own I/O region
1576 init
->flags
|= CLK_GET_RATE_NOCACHE
;
1577 init
->ops
= &clk_rcg2_dfs_ops
;
1579 rcg
->freq_tbl
= NULL
;
1584 int qcom_cc_register_rcg_dfs(struct regmap
*regmap
,
1585 const struct clk_rcg_dfs_data
*rcgs
, size_t len
)
1589 for (i
= 0; i
< len
; i
++) {
1590 ret
= clk_rcg2_enable_dfs(&rcgs
[i
], regmap
);
1597 EXPORT_SYMBOL_GPL(qcom_cc_register_rcg_dfs
);
1599 static int clk_rcg2_dp_set_rate(struct clk_hw
*hw
, unsigned long rate
,
1600 unsigned long parent_rate
)
1602 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
1603 struct freq_tbl f
= { 0 };
1604 u32 mask
= BIT(rcg
->hid_width
) - 1;
1606 int i
, num_parents
= clk_hw_get_num_parents(hw
);
1607 unsigned long num
, den
;
1609 rational_best_approximation(parent_rate
, rate
,
1610 GENMASK(rcg
->mnd_width
- 1, 0),
1611 GENMASK(rcg
->mnd_width
- 1, 0), &den
, &num
);
1616 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, &cfg
);
1618 cfg
&= CFG_SRC_SEL_MASK
;
1619 cfg
>>= CFG_SRC_SEL_SHIFT
;
1621 for (i
= 0; i
< num_parents
; i
++) {
1622 if (cfg
== rcg
->parent_map
[i
].cfg
) {
1623 f
.src
= rcg
->parent_map
[i
].src
;
1628 f
.pre_div
= hid_div
;
1629 f
.pre_div
>>= CFG_SRC_DIV_SHIFT
;
1640 return clk_rcg2_configure(rcg
, &f
);
1643 static int clk_rcg2_dp_set_rate_and_parent(struct clk_hw
*hw
,
1644 unsigned long rate
, unsigned long parent_rate
, u8 index
)
1646 return clk_rcg2_dp_set_rate(hw
, rate
, parent_rate
);
1649 static int clk_rcg2_dp_determine_rate(struct clk_hw
*hw
,
1650 struct clk_rate_request
*req
)
1652 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
1653 unsigned long num
, den
;
1656 /* Parent rate is a fixed phy link rate */
1657 rational_best_approximation(req
->best_parent_rate
, req
->rate
,
1658 GENMASK(rcg
->mnd_width
- 1, 0),
1659 GENMASK(rcg
->mnd_width
- 1, 0), &den
, &num
);
1664 tmp
= req
->best_parent_rate
* num
;
1671 const struct clk_ops clk_dp_ops
= {
1672 .is_enabled
= clk_rcg2_is_enabled
,
1673 .get_parent
= clk_rcg2_get_parent
,
1674 .set_parent
= clk_rcg2_set_parent
,
1675 .recalc_rate
= clk_rcg2_recalc_rate
,
1676 .set_rate
= clk_rcg2_dp_set_rate
,
1677 .set_rate_and_parent
= clk_rcg2_dp_set_rate_and_parent
,
1678 .determine_rate
= clk_rcg2_dp_determine_rate
,
1680 EXPORT_SYMBOL_GPL(clk_dp_ops
);