1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (c) 2013, 2018, The Linux Foundation. All rights reserved.
6 #include <linux/kernel.h>
7 #include <linux/bitops.h>
10 #include <linux/export.h>
11 #include <linux/clk-provider.h>
12 #include <linux/delay.h>
13 #include <linux/rational.h>
14 #include <linux/regmap.h>
15 #include <linux/math64.h>
16 #include <linux/slab.h>
18 #include <asm/div64.h>
24 #define CMD_UPDATE BIT(0)
25 #define CMD_ROOT_EN BIT(1)
26 #define CMD_DIRTY_CFG BIT(4)
27 #define CMD_DIRTY_N BIT(5)
28 #define CMD_DIRTY_M BIT(6)
29 #define CMD_DIRTY_D BIT(7)
30 #define CMD_ROOT_OFF BIT(31)
33 #define CFG_SRC_DIV_SHIFT 0
34 #define CFG_SRC_SEL_SHIFT 8
35 #define CFG_SRC_SEL_MASK (0x7 << CFG_SRC_SEL_SHIFT)
36 #define CFG_MODE_SHIFT 12
37 #define CFG_MODE_MASK (0x3 << CFG_MODE_SHIFT)
38 #define CFG_MODE_DUAL_EDGE (0x2 << CFG_MODE_SHIFT)
39 #define CFG_HW_CLK_CTRL_MASK BIT(20)
45 #define RCG_CFG_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + CFG_REG)
46 #define RCG_M_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + M_REG)
47 #define RCG_N_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + N_REG)
48 #define RCG_D_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + D_REG)
50 /* Dynamic Frequency Scaling */
51 #define MAX_PERF_LEVEL 8
52 #define SE_CMD_DFSR_OFFSET 0x14
53 #define SE_CMD_DFS_EN BIT(0)
54 #define SE_PERF_DFSR(level) (0x1c + 0x4 * (level))
55 #define SE_PERF_M_DFSR(level) (0x5c + 0x4 * (level))
56 #define SE_PERF_N_DFSR(level) (0x9c + 0x4 * (level))
63 static int clk_rcg2_is_enabled(struct clk_hw
*hw
)
65 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
69 ret
= regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CMD_REG
, &cmd
);
73 return (cmd
& CMD_ROOT_OFF
) == 0;
76 static u8
clk_rcg2_get_parent(struct clk_hw
*hw
)
78 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
79 int num_parents
= clk_hw_get_num_parents(hw
);
83 ret
= regmap_read(rcg
->clkr
.regmap
, RCG_CFG_OFFSET(rcg
), &cfg
);
87 cfg
&= CFG_SRC_SEL_MASK
;
88 cfg
>>= CFG_SRC_SEL_SHIFT
;
90 for (i
= 0; i
< num_parents
; i
++)
91 if (cfg
== rcg
->parent_map
[i
].cfg
)
95 pr_debug("%s: Clock %s has invalid parent, using default.\n",
96 __func__
, clk_hw_get_name(hw
));
100 static int update_config(struct clk_rcg2
*rcg
)
104 struct clk_hw
*hw
= &rcg
->clkr
.hw
;
105 const char *name
= clk_hw_get_name(hw
);
107 ret
= regmap_update_bits(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CMD_REG
,
108 CMD_UPDATE
, CMD_UPDATE
);
112 /* Wait for update to take effect */
113 for (count
= 500; count
> 0; count
--) {
114 ret
= regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CMD_REG
, &cmd
);
117 if (!(cmd
& CMD_UPDATE
))
122 WARN(1, "%s: rcg didn't update its configuration.", name
);
126 static int clk_rcg2_set_parent(struct clk_hw
*hw
, u8 index
)
128 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
130 u32 cfg
= rcg
->parent_map
[index
].cfg
<< CFG_SRC_SEL_SHIFT
;
132 ret
= regmap_update_bits(rcg
->clkr
.regmap
, RCG_CFG_OFFSET(rcg
),
133 CFG_SRC_SEL_MASK
, cfg
);
137 return update_config(rcg
);
141 * Calculate m/n:d rate
144 * rate = ----------- x ---
148 calc_rate(unsigned long rate
, u32 m
, u32 n
, u32 mode
, u32 hid_div
)
166 clk_rcg2_recalc_rate(struct clk_hw
*hw
, unsigned long parent_rate
)
168 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
169 u32 cfg
, hid_div
, m
= 0, n
= 0, mode
= 0, mask
;
171 regmap_read(rcg
->clkr
.regmap
, RCG_CFG_OFFSET(rcg
), &cfg
);
173 if (rcg
->mnd_width
) {
174 mask
= BIT(rcg
->mnd_width
) - 1;
175 regmap_read(rcg
->clkr
.regmap
, RCG_M_OFFSET(rcg
), &m
);
177 regmap_read(rcg
->clkr
.regmap
, RCG_N_OFFSET(rcg
), &n
);
181 mode
= cfg
& CFG_MODE_MASK
;
182 mode
>>= CFG_MODE_SHIFT
;
185 mask
= BIT(rcg
->hid_width
) - 1;
186 hid_div
= cfg
>> CFG_SRC_DIV_SHIFT
;
189 return calc_rate(parent_rate
, m
, n
, mode
, hid_div
);
192 static int _freq_tbl_determine_rate(struct clk_hw
*hw
, const struct freq_tbl
*f
,
193 struct clk_rate_request
*req
,
194 enum freq_policy policy
)
196 unsigned long clk_flags
, rate
= req
->rate
;
198 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
203 f
= qcom_find_freq_floor(f
, rate
);
206 f
= qcom_find_freq(f
, rate
);
215 index
= qcom_find_src_index(hw
, rcg
->parent_map
, f
->src
);
219 clk_flags
= clk_hw_get_flags(hw
);
220 p
= clk_hw_get_parent_by_index(hw
, index
);
224 if (clk_flags
& CLK_SET_RATE_PARENT
) {
230 rate
*= f
->pre_div
+ 1;
240 rate
= clk_hw_get_rate(p
);
242 req
->best_parent_hw
= p
;
243 req
->best_parent_rate
= rate
;
249 static int clk_rcg2_determine_rate(struct clk_hw
*hw
,
250 struct clk_rate_request
*req
)
252 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
254 return _freq_tbl_determine_rate(hw
, rcg
->freq_tbl
, req
, CEIL
);
257 static int clk_rcg2_determine_floor_rate(struct clk_hw
*hw
,
258 struct clk_rate_request
*req
)
260 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
262 return _freq_tbl_determine_rate(hw
, rcg
->freq_tbl
, req
, FLOOR
);
265 static int __clk_rcg2_configure(struct clk_rcg2
*rcg
, const struct freq_tbl
*f
)
268 struct clk_hw
*hw
= &rcg
->clkr
.hw
;
269 int ret
, index
= qcom_find_src_index(hw
, rcg
->parent_map
, f
->src
);
274 if (rcg
->mnd_width
&& f
->n
) {
275 mask
= BIT(rcg
->mnd_width
) - 1;
276 ret
= regmap_update_bits(rcg
->clkr
.regmap
,
277 RCG_M_OFFSET(rcg
), mask
, f
->m
);
281 ret
= regmap_update_bits(rcg
->clkr
.regmap
,
282 RCG_N_OFFSET(rcg
), mask
, ~(f
->n
- f
->m
));
286 ret
= regmap_update_bits(rcg
->clkr
.regmap
,
287 RCG_D_OFFSET(rcg
), mask
, ~f
->n
);
292 mask
= BIT(rcg
->hid_width
) - 1;
293 mask
|= CFG_SRC_SEL_MASK
| CFG_MODE_MASK
| CFG_HW_CLK_CTRL_MASK
;
294 cfg
= f
->pre_div
<< CFG_SRC_DIV_SHIFT
;
295 cfg
|= rcg
->parent_map
[index
].cfg
<< CFG_SRC_SEL_SHIFT
;
296 if (rcg
->mnd_width
&& f
->n
&& (f
->m
!= f
->n
))
297 cfg
|= CFG_MODE_DUAL_EDGE
;
298 return regmap_update_bits(rcg
->clkr
.regmap
, RCG_CFG_OFFSET(rcg
),
302 static int clk_rcg2_configure(struct clk_rcg2
*rcg
, const struct freq_tbl
*f
)
306 ret
= __clk_rcg2_configure(rcg
, f
);
310 return update_config(rcg
);
313 static int __clk_rcg2_set_rate(struct clk_hw
*hw
, unsigned long rate
,
314 enum freq_policy policy
)
316 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
317 const struct freq_tbl
*f
;
321 f
= qcom_find_freq_floor(rcg
->freq_tbl
, rate
);
324 f
= qcom_find_freq(rcg
->freq_tbl
, rate
);
333 return clk_rcg2_configure(rcg
, f
);
336 static int clk_rcg2_set_rate(struct clk_hw
*hw
, unsigned long rate
,
337 unsigned long parent_rate
)
339 return __clk_rcg2_set_rate(hw
, rate
, CEIL
);
342 static int clk_rcg2_set_floor_rate(struct clk_hw
*hw
, unsigned long rate
,
343 unsigned long parent_rate
)
345 return __clk_rcg2_set_rate(hw
, rate
, FLOOR
);
348 static int clk_rcg2_set_rate_and_parent(struct clk_hw
*hw
,
349 unsigned long rate
, unsigned long parent_rate
, u8 index
)
351 return __clk_rcg2_set_rate(hw
, rate
, CEIL
);
354 static int clk_rcg2_set_floor_rate_and_parent(struct clk_hw
*hw
,
355 unsigned long rate
, unsigned long parent_rate
, u8 index
)
357 return __clk_rcg2_set_rate(hw
, rate
, FLOOR
);
360 const struct clk_ops clk_rcg2_ops
= {
361 .is_enabled
= clk_rcg2_is_enabled
,
362 .get_parent
= clk_rcg2_get_parent
,
363 .set_parent
= clk_rcg2_set_parent
,
364 .recalc_rate
= clk_rcg2_recalc_rate
,
365 .determine_rate
= clk_rcg2_determine_rate
,
366 .set_rate
= clk_rcg2_set_rate
,
367 .set_rate_and_parent
= clk_rcg2_set_rate_and_parent
,
369 EXPORT_SYMBOL_GPL(clk_rcg2_ops
);
371 const struct clk_ops clk_rcg2_floor_ops
= {
372 .is_enabled
= clk_rcg2_is_enabled
,
373 .get_parent
= clk_rcg2_get_parent
,
374 .set_parent
= clk_rcg2_set_parent
,
375 .recalc_rate
= clk_rcg2_recalc_rate
,
376 .determine_rate
= clk_rcg2_determine_floor_rate
,
377 .set_rate
= clk_rcg2_set_floor_rate
,
378 .set_rate_and_parent
= clk_rcg2_set_floor_rate_and_parent
,
380 EXPORT_SYMBOL_GPL(clk_rcg2_floor_ops
);
387 static const struct frac_entry frac_table_675m
[] = { /* link rate of 270M */
388 { 52, 295 }, /* 119 M */
389 { 11, 57 }, /* 130.25 M */
390 { 63, 307 }, /* 138.50 M */
391 { 11, 50 }, /* 148.50 M */
392 { 47, 206 }, /* 154 M */
393 { 31, 100 }, /* 205.25 M */
394 { 107, 269 }, /* 268.50 M */
398 static struct frac_entry frac_table_810m
[] = { /* Link rate of 162M */
399 { 31, 211 }, /* 119 M */
400 { 32, 199 }, /* 130.25 M */
401 { 63, 307 }, /* 138.50 M */
402 { 11, 60 }, /* 148.50 M */
403 { 50, 263 }, /* 154 M */
404 { 31, 120 }, /* 205.25 M */
405 { 119, 359 }, /* 268.50 M */
409 static int clk_edp_pixel_set_rate(struct clk_hw
*hw
, unsigned long rate
,
410 unsigned long parent_rate
)
412 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
413 struct freq_tbl f
= *rcg
->freq_tbl
;
414 const struct frac_entry
*frac
;
416 s64 src_rate
= parent_rate
;
418 u32 mask
= BIT(rcg
->hid_width
) - 1;
421 if (src_rate
== 810000000)
422 frac
= frac_table_810m
;
424 frac
= frac_table_675m
;
426 for (; frac
->num
; frac
++) {
428 request
*= frac
->den
;
429 request
= div_s64(request
, frac
->num
);
430 if ((src_rate
< (request
- delta
)) ||
431 (src_rate
> (request
+ delta
)))
434 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
,
437 f
.pre_div
>>= CFG_SRC_DIV_SHIFT
;
442 return clk_rcg2_configure(rcg
, &f
);
448 static int clk_edp_pixel_set_rate_and_parent(struct clk_hw
*hw
,
449 unsigned long rate
, unsigned long parent_rate
, u8 index
)
451 /* Parent index is set statically in frequency table */
452 return clk_edp_pixel_set_rate(hw
, rate
, parent_rate
);
455 static int clk_edp_pixel_determine_rate(struct clk_hw
*hw
,
456 struct clk_rate_request
*req
)
458 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
459 const struct freq_tbl
*f
= rcg
->freq_tbl
;
460 const struct frac_entry
*frac
;
463 u32 mask
= BIT(rcg
->hid_width
) - 1;
465 int index
= qcom_find_src_index(hw
, rcg
->parent_map
, f
->src
);
467 /* Force the correct parent */
468 req
->best_parent_hw
= clk_hw_get_parent_by_index(hw
, index
);
469 req
->best_parent_rate
= clk_hw_get_rate(req
->best_parent_hw
);
471 if (req
->best_parent_rate
== 810000000)
472 frac
= frac_table_810m
;
474 frac
= frac_table_675m
;
476 for (; frac
->num
; frac
++) {
478 request
*= frac
->den
;
479 request
= div_s64(request
, frac
->num
);
480 if ((req
->best_parent_rate
< (request
- delta
)) ||
481 (req
->best_parent_rate
> (request
+ delta
)))
484 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
,
486 hid_div
>>= CFG_SRC_DIV_SHIFT
;
489 req
->rate
= calc_rate(req
->best_parent_rate
,
490 frac
->num
, frac
->den
,
491 !!frac
->den
, hid_div
);
498 const struct clk_ops clk_edp_pixel_ops
= {
499 .is_enabled
= clk_rcg2_is_enabled
,
500 .get_parent
= clk_rcg2_get_parent
,
501 .set_parent
= clk_rcg2_set_parent
,
502 .recalc_rate
= clk_rcg2_recalc_rate
,
503 .set_rate
= clk_edp_pixel_set_rate
,
504 .set_rate_and_parent
= clk_edp_pixel_set_rate_and_parent
,
505 .determine_rate
= clk_edp_pixel_determine_rate
,
507 EXPORT_SYMBOL_GPL(clk_edp_pixel_ops
);
509 static int clk_byte_determine_rate(struct clk_hw
*hw
,
510 struct clk_rate_request
*req
)
512 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
513 const struct freq_tbl
*f
= rcg
->freq_tbl
;
514 int index
= qcom_find_src_index(hw
, rcg
->parent_map
, f
->src
);
515 unsigned long parent_rate
, div
;
516 u32 mask
= BIT(rcg
->hid_width
) - 1;
522 req
->best_parent_hw
= p
= clk_hw_get_parent_by_index(hw
, index
);
523 req
->best_parent_rate
= parent_rate
= clk_hw_round_rate(p
, req
->rate
);
525 div
= DIV_ROUND_UP((2 * parent_rate
), req
->rate
) - 1;
526 div
= min_t(u32
, div
, mask
);
528 req
->rate
= calc_rate(parent_rate
, 0, 0, 0, div
);
533 static int clk_byte_set_rate(struct clk_hw
*hw
, unsigned long rate
,
534 unsigned long parent_rate
)
536 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
537 struct freq_tbl f
= *rcg
->freq_tbl
;
539 u32 mask
= BIT(rcg
->hid_width
) - 1;
541 div
= DIV_ROUND_UP((2 * parent_rate
), rate
) - 1;
542 div
= min_t(u32
, div
, mask
);
546 return clk_rcg2_configure(rcg
, &f
);
549 static int clk_byte_set_rate_and_parent(struct clk_hw
*hw
,
550 unsigned long rate
, unsigned long parent_rate
, u8 index
)
552 /* Parent index is set statically in frequency table */
553 return clk_byte_set_rate(hw
, rate
, parent_rate
);
556 const struct clk_ops clk_byte_ops
= {
557 .is_enabled
= clk_rcg2_is_enabled
,
558 .get_parent
= clk_rcg2_get_parent
,
559 .set_parent
= clk_rcg2_set_parent
,
560 .recalc_rate
= clk_rcg2_recalc_rate
,
561 .set_rate
= clk_byte_set_rate
,
562 .set_rate_and_parent
= clk_byte_set_rate_and_parent
,
563 .determine_rate
= clk_byte_determine_rate
,
565 EXPORT_SYMBOL_GPL(clk_byte_ops
);
567 static int clk_byte2_determine_rate(struct clk_hw
*hw
,
568 struct clk_rate_request
*req
)
570 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
571 unsigned long parent_rate
, div
;
572 u32 mask
= BIT(rcg
->hid_width
) - 1;
574 unsigned long rate
= req
->rate
;
579 p
= req
->best_parent_hw
;
580 req
->best_parent_rate
= parent_rate
= clk_hw_round_rate(p
, rate
);
582 div
= DIV_ROUND_UP((2 * parent_rate
), rate
) - 1;
583 div
= min_t(u32
, div
, mask
);
585 req
->rate
= calc_rate(parent_rate
, 0, 0, 0, div
);
590 static int clk_byte2_set_rate(struct clk_hw
*hw
, unsigned long rate
,
591 unsigned long parent_rate
)
593 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
594 struct freq_tbl f
= { 0 };
596 int i
, num_parents
= clk_hw_get_num_parents(hw
);
597 u32 mask
= BIT(rcg
->hid_width
) - 1;
600 div
= DIV_ROUND_UP((2 * parent_rate
), rate
) - 1;
601 div
= min_t(u32
, div
, mask
);
605 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, &cfg
);
606 cfg
&= CFG_SRC_SEL_MASK
;
607 cfg
>>= CFG_SRC_SEL_SHIFT
;
609 for (i
= 0; i
< num_parents
; i
++) {
610 if (cfg
== rcg
->parent_map
[i
].cfg
) {
611 f
.src
= rcg
->parent_map
[i
].src
;
612 return clk_rcg2_configure(rcg
, &f
);
619 static int clk_byte2_set_rate_and_parent(struct clk_hw
*hw
,
620 unsigned long rate
, unsigned long parent_rate
, u8 index
)
622 /* Read the hardware to determine parent during set_rate */
623 return clk_byte2_set_rate(hw
, rate
, parent_rate
);
626 const struct clk_ops clk_byte2_ops
= {
627 .is_enabled
= clk_rcg2_is_enabled
,
628 .get_parent
= clk_rcg2_get_parent
,
629 .set_parent
= clk_rcg2_set_parent
,
630 .recalc_rate
= clk_rcg2_recalc_rate
,
631 .set_rate
= clk_byte2_set_rate
,
632 .set_rate_and_parent
= clk_byte2_set_rate_and_parent
,
633 .determine_rate
= clk_byte2_determine_rate
,
635 EXPORT_SYMBOL_GPL(clk_byte2_ops
);
637 static const struct frac_entry frac_table_pixel
[] = {
645 static int clk_pixel_determine_rate(struct clk_hw
*hw
,
646 struct clk_rate_request
*req
)
648 unsigned long request
, src_rate
;
650 const struct frac_entry
*frac
= frac_table_pixel
;
652 for (; frac
->num
; frac
++) {
653 request
= (req
->rate
* frac
->den
) / frac
->num
;
655 src_rate
= clk_hw_round_rate(req
->best_parent_hw
, request
);
656 if ((src_rate
< (request
- delta
)) ||
657 (src_rate
> (request
+ delta
)))
660 req
->best_parent_rate
= src_rate
;
661 req
->rate
= (src_rate
* frac
->num
) / frac
->den
;
668 static int clk_pixel_set_rate(struct clk_hw
*hw
, unsigned long rate
,
669 unsigned long parent_rate
)
671 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
672 struct freq_tbl f
= { 0 };
673 const struct frac_entry
*frac
= frac_table_pixel
;
674 unsigned long request
;
676 u32 mask
= BIT(rcg
->hid_width
) - 1;
678 int i
, num_parents
= clk_hw_get_num_parents(hw
);
680 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, &cfg
);
681 cfg
&= CFG_SRC_SEL_MASK
;
682 cfg
>>= CFG_SRC_SEL_SHIFT
;
684 for (i
= 0; i
< num_parents
; i
++)
685 if (cfg
== rcg
->parent_map
[i
].cfg
) {
686 f
.src
= rcg
->parent_map
[i
].src
;
690 for (; frac
->num
; frac
++) {
691 request
= (rate
* frac
->den
) / frac
->num
;
693 if ((parent_rate
< (request
- delta
)) ||
694 (parent_rate
> (request
+ delta
)))
697 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
,
700 f
.pre_div
>>= CFG_SRC_DIV_SHIFT
;
705 return clk_rcg2_configure(rcg
, &f
);
710 static int clk_pixel_set_rate_and_parent(struct clk_hw
*hw
, unsigned long rate
,
711 unsigned long parent_rate
, u8 index
)
713 return clk_pixel_set_rate(hw
, rate
, parent_rate
);
716 const struct clk_ops clk_pixel_ops
= {
717 .is_enabled
= clk_rcg2_is_enabled
,
718 .get_parent
= clk_rcg2_get_parent
,
719 .set_parent
= clk_rcg2_set_parent
,
720 .recalc_rate
= clk_rcg2_recalc_rate
,
721 .set_rate
= clk_pixel_set_rate
,
722 .set_rate_and_parent
= clk_pixel_set_rate_and_parent
,
723 .determine_rate
= clk_pixel_determine_rate
,
725 EXPORT_SYMBOL_GPL(clk_pixel_ops
);
727 static int clk_gfx3d_determine_rate(struct clk_hw
*hw
,
728 struct clk_rate_request
*req
)
730 struct clk_rate_request parent_req
= { };
731 struct clk_hw
*p2
, *p8
, *p9
, *xo
;
732 unsigned long p9_rate
;
735 xo
= clk_hw_get_parent_by_index(hw
, 0);
736 if (req
->rate
== clk_hw_get_rate(xo
)) {
737 req
->best_parent_hw
= xo
;
741 p9
= clk_hw_get_parent_by_index(hw
, 2);
742 p2
= clk_hw_get_parent_by_index(hw
, 3);
743 p8
= clk_hw_get_parent_by_index(hw
, 4);
745 /* PLL9 is a fixed rate PLL */
746 p9_rate
= clk_hw_get_rate(p9
);
748 parent_req
.rate
= req
->rate
= min(req
->rate
, p9_rate
);
749 if (req
->rate
== p9_rate
) {
750 req
->rate
= req
->best_parent_rate
= p9_rate
;
751 req
->best_parent_hw
= p9
;
755 if (req
->best_parent_hw
== p9
) {
756 /* Are we going back to a previously used rate? */
757 if (clk_hw_get_rate(p8
) == req
->rate
)
758 req
->best_parent_hw
= p8
;
760 req
->best_parent_hw
= p2
;
761 } else if (req
->best_parent_hw
== p8
) {
762 req
->best_parent_hw
= p2
;
764 req
->best_parent_hw
= p8
;
767 ret
= __clk_determine_rate(req
->best_parent_hw
, &parent_req
);
771 req
->rate
= req
->best_parent_rate
= parent_req
.rate
;
776 static int clk_gfx3d_set_rate_and_parent(struct clk_hw
*hw
, unsigned long rate
,
777 unsigned long parent_rate
, u8 index
)
779 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
783 /* Just mux it, we don't use the division or m/n hardware */
784 cfg
= rcg
->parent_map
[index
].cfg
<< CFG_SRC_SEL_SHIFT
;
785 ret
= regmap_write(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, cfg
);
789 return update_config(rcg
);
792 static int clk_gfx3d_set_rate(struct clk_hw
*hw
, unsigned long rate
,
793 unsigned long parent_rate
)
796 * We should never get here; clk_gfx3d_determine_rate() should always
797 * make us use a different parent than what we're currently using, so
798 * clk_gfx3d_set_rate_and_parent() should always be called.
803 const struct clk_ops clk_gfx3d_ops
= {
804 .is_enabled
= clk_rcg2_is_enabled
,
805 .get_parent
= clk_rcg2_get_parent
,
806 .set_parent
= clk_rcg2_set_parent
,
807 .recalc_rate
= clk_rcg2_recalc_rate
,
808 .set_rate
= clk_gfx3d_set_rate
,
809 .set_rate_and_parent
= clk_gfx3d_set_rate_and_parent
,
810 .determine_rate
= clk_gfx3d_determine_rate
,
812 EXPORT_SYMBOL_GPL(clk_gfx3d_ops
);
814 static int clk_rcg2_set_force_enable(struct clk_hw
*hw
)
816 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
817 const char *name
= clk_hw_get_name(hw
);
820 ret
= regmap_update_bits(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CMD_REG
,
821 CMD_ROOT_EN
, CMD_ROOT_EN
);
825 /* wait for RCG to turn ON */
826 for (count
= 500; count
> 0; count
--) {
827 if (clk_rcg2_is_enabled(hw
))
833 pr_err("%s: RCG did not turn on\n", name
);
837 static int clk_rcg2_clear_force_enable(struct clk_hw
*hw
)
839 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
841 return regmap_update_bits(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CMD_REG
,
846 clk_rcg2_shared_force_enable_clear(struct clk_hw
*hw
, const struct freq_tbl
*f
)
848 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
851 ret
= clk_rcg2_set_force_enable(hw
);
855 ret
= clk_rcg2_configure(rcg
, f
);
859 return clk_rcg2_clear_force_enable(hw
);
862 static int clk_rcg2_shared_set_rate(struct clk_hw
*hw
, unsigned long rate
,
863 unsigned long parent_rate
)
865 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
866 const struct freq_tbl
*f
;
868 f
= qcom_find_freq(rcg
->freq_tbl
, rate
);
873 * In case clock is disabled, update the CFG, M, N and D registers
874 * and don't hit the update bit of CMD register.
876 if (!__clk_is_enabled(hw
->clk
))
877 return __clk_rcg2_configure(rcg
, f
);
879 return clk_rcg2_shared_force_enable_clear(hw
, f
);
882 static int clk_rcg2_shared_set_rate_and_parent(struct clk_hw
*hw
,
883 unsigned long rate
, unsigned long parent_rate
, u8 index
)
885 return clk_rcg2_shared_set_rate(hw
, rate
, parent_rate
);
888 static int clk_rcg2_shared_enable(struct clk_hw
*hw
)
890 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
894 * Set the update bit because required configuration has already
895 * been written in clk_rcg2_shared_set_rate()
897 ret
= clk_rcg2_set_force_enable(hw
);
901 ret
= update_config(rcg
);
905 return clk_rcg2_clear_force_enable(hw
);
908 static void clk_rcg2_shared_disable(struct clk_hw
*hw
)
910 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
914 * Store current configuration as switching to safe source would clear
915 * the SRC and DIV of CFG register
917 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, &cfg
);
920 * Park the RCG at a safe configuration - sourced off of safe source.
921 * Force enable and disable the RCG while configuring it to safeguard
922 * against any update signal coming from the downstream clock.
923 * The current parent is still prepared and enabled at this point, and
924 * the safe source is always on while application processor subsystem
925 * is online. Therefore, the RCG can safely switch its parent.
927 clk_rcg2_set_force_enable(hw
);
929 regmap_write(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
,
930 rcg
->safe_src_index
<< CFG_SRC_SEL_SHIFT
);
934 clk_rcg2_clear_force_enable(hw
);
936 /* Write back the stored configuration corresponding to current rate */
937 regmap_write(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, cfg
);
940 const struct clk_ops clk_rcg2_shared_ops
= {
941 .enable
= clk_rcg2_shared_enable
,
942 .disable
= clk_rcg2_shared_disable
,
943 .get_parent
= clk_rcg2_get_parent
,
944 .set_parent
= clk_rcg2_set_parent
,
945 .recalc_rate
= clk_rcg2_recalc_rate
,
946 .determine_rate
= clk_rcg2_determine_rate
,
947 .set_rate
= clk_rcg2_shared_set_rate
,
948 .set_rate_and_parent
= clk_rcg2_shared_set_rate_and_parent
,
950 EXPORT_SYMBOL_GPL(clk_rcg2_shared_ops
);
952 /* Common APIs to be used for DFS based RCGR */
953 static void clk_rcg2_dfs_populate_freq(struct clk_hw
*hw
, unsigned int l
,
956 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
958 unsigned long prate
= 0;
959 u32 val
, mask
, cfg
, mode
, src
;
962 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ SE_PERF_DFSR(l
), &cfg
);
964 mask
= BIT(rcg
->hid_width
) - 1;
967 f
->pre_div
= cfg
& mask
;
969 src
= cfg
& CFG_SRC_SEL_MASK
;
970 src
>>= CFG_SRC_SEL_SHIFT
;
972 num_parents
= clk_hw_get_num_parents(hw
);
973 for (i
= 0; i
< num_parents
; i
++) {
974 if (src
== rcg
->parent_map
[i
].cfg
) {
975 f
->src
= rcg
->parent_map
[i
].src
;
976 p
= clk_hw_get_parent_by_index(&rcg
->clkr
.hw
, i
);
977 prate
= clk_hw_get_rate(p
);
981 mode
= cfg
& CFG_MODE_MASK
;
982 mode
>>= CFG_MODE_SHIFT
;
984 mask
= BIT(rcg
->mnd_width
) - 1;
985 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ SE_PERF_M_DFSR(l
),
990 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ SE_PERF_N_DFSR(l
),
998 f
->freq
= calc_rate(prate
, f
->m
, f
->n
, mode
, f
->pre_div
);
1001 static int clk_rcg2_dfs_populate_freq_table(struct clk_rcg2
*rcg
)
1003 struct freq_tbl
*freq_tbl
;
1006 /* Allocate space for 1 extra since table is NULL terminated */
1007 freq_tbl
= kcalloc(MAX_PERF_LEVEL
+ 1, sizeof(*freq_tbl
), GFP_KERNEL
);
1010 rcg
->freq_tbl
= freq_tbl
;
1012 for (i
= 0; i
< MAX_PERF_LEVEL
; i
++)
1013 clk_rcg2_dfs_populate_freq(&rcg
->clkr
.hw
, i
, freq_tbl
+ i
);
1018 static int clk_rcg2_dfs_determine_rate(struct clk_hw
*hw
,
1019 struct clk_rate_request
*req
)
1021 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
1024 if (!rcg
->freq_tbl
) {
1025 ret
= clk_rcg2_dfs_populate_freq_table(rcg
);
1027 pr_err("Failed to update DFS tables for %s\n",
1028 clk_hw_get_name(hw
));
1033 return clk_rcg2_determine_rate(hw
, req
);
1036 static unsigned long
1037 clk_rcg2_dfs_recalc_rate(struct clk_hw
*hw
, unsigned long parent_rate
)
1039 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
1040 u32 level
, mask
, cfg
, m
= 0, n
= 0, mode
, pre_div
;
1042 regmap_read(rcg
->clkr
.regmap
,
1043 rcg
->cmd_rcgr
+ SE_CMD_DFSR_OFFSET
, &level
);
1044 level
&= GENMASK(4, 1);
1048 return rcg
->freq_tbl
[level
].freq
;
1051 * Assume that parent_rate is actually the parent because
1052 * we can't do any better at figuring it out when the table
1053 * hasn't been populated yet. We only populate the table
1054 * in determine_rate because we can't guarantee the parents
1055 * will be registered with the framework until then.
1057 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ SE_PERF_DFSR(level
),
1060 mask
= BIT(rcg
->hid_width
) - 1;
1063 pre_div
= cfg
& mask
;
1065 mode
= cfg
& CFG_MODE_MASK
;
1066 mode
>>= CFG_MODE_SHIFT
;
1068 mask
= BIT(rcg
->mnd_width
) - 1;
1069 regmap_read(rcg
->clkr
.regmap
,
1070 rcg
->cmd_rcgr
+ SE_PERF_M_DFSR(level
), &m
);
1073 regmap_read(rcg
->clkr
.regmap
,
1074 rcg
->cmd_rcgr
+ SE_PERF_N_DFSR(level
), &n
);
1080 return calc_rate(parent_rate
, m
, n
, mode
, pre_div
);
1083 static const struct clk_ops clk_rcg2_dfs_ops
= {
1084 .is_enabled
= clk_rcg2_is_enabled
,
1085 .get_parent
= clk_rcg2_get_parent
,
1086 .determine_rate
= clk_rcg2_dfs_determine_rate
,
1087 .recalc_rate
= clk_rcg2_dfs_recalc_rate
,
1090 static int clk_rcg2_enable_dfs(const struct clk_rcg_dfs_data
*data
,
1091 struct regmap
*regmap
)
1093 struct clk_rcg2
*rcg
= data
->rcg
;
1094 struct clk_init_data
*init
= data
->init
;
1098 ret
= regmap_read(regmap
, rcg
->cmd_rcgr
+ SE_CMD_DFSR_OFFSET
, &val
);
1102 if (!(val
& SE_CMD_DFS_EN
))
1106 * Rate changes with consumer writing a register in
1107 * their own I/O region
1109 init
->flags
|= CLK_GET_RATE_NOCACHE
;
1110 init
->ops
= &clk_rcg2_dfs_ops
;
1112 rcg
->freq_tbl
= NULL
;
1117 int qcom_cc_register_rcg_dfs(struct regmap
*regmap
,
1118 const struct clk_rcg_dfs_data
*rcgs
, size_t len
)
1122 for (i
= 0; i
< len
; i
++) {
1123 ret
= clk_rcg2_enable_dfs(&rcgs
[i
], regmap
);
1130 EXPORT_SYMBOL_GPL(qcom_cc_register_rcg_dfs
);
1132 static int clk_rcg2_dp_set_rate(struct clk_hw
*hw
, unsigned long rate
,
1133 unsigned long parent_rate
)
1135 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
1136 struct freq_tbl f
= { 0 };
1137 u32 mask
= BIT(rcg
->hid_width
) - 1;
1139 int i
, num_parents
= clk_hw_get_num_parents(hw
);
1140 unsigned long num
, den
;
1142 rational_best_approximation(parent_rate
, rate
,
1143 GENMASK(rcg
->mnd_width
- 1, 0),
1144 GENMASK(rcg
->mnd_width
- 1, 0), &den
, &num
);
1149 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, &cfg
);
1151 cfg
&= CFG_SRC_SEL_MASK
;
1152 cfg
>>= CFG_SRC_SEL_SHIFT
;
1154 for (i
= 0; i
< num_parents
; i
++) {
1155 if (cfg
== rcg
->parent_map
[i
].cfg
) {
1156 f
.src
= rcg
->parent_map
[i
].src
;
1161 f
.pre_div
= hid_div
;
1162 f
.pre_div
>>= CFG_SRC_DIV_SHIFT
;
1173 return clk_rcg2_configure(rcg
, &f
);
1176 static int clk_rcg2_dp_set_rate_and_parent(struct clk_hw
*hw
,
1177 unsigned long rate
, unsigned long parent_rate
, u8 index
)
1179 return clk_rcg2_dp_set_rate(hw
, rate
, parent_rate
);
1182 static int clk_rcg2_dp_determine_rate(struct clk_hw
*hw
,
1183 struct clk_rate_request
*req
)
1185 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
1186 unsigned long num
, den
;
1189 /* Parent rate is a fixed phy link rate */
1190 rational_best_approximation(req
->best_parent_rate
, req
->rate
,
1191 GENMASK(rcg
->mnd_width
- 1, 0),
1192 GENMASK(rcg
->mnd_width
- 1, 0), &den
, &num
);
1197 tmp
= req
->best_parent_rate
* num
;
1204 const struct clk_ops clk_dp_ops
= {
1205 .is_enabled
= clk_rcg2_is_enabled
,
1206 .get_parent
= clk_rcg2_get_parent
,
1207 .set_parent
= clk_rcg2_set_parent
,
1208 .recalc_rate
= clk_rcg2_recalc_rate
,
1209 .set_rate
= clk_rcg2_dp_set_rate
,
1210 .set_rate_and_parent
= clk_rcg2_dp_set_rate_and_parent
,
1211 .determine_rate
= clk_rcg2_dp_determine_rate
,
1213 EXPORT_SYMBOL_GPL(clk_dp_ops
);