1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (c) 2013, 2018, The Linux Foundation. All rights reserved.
6 #include <linux/kernel.h>
7 #include <linux/bitops.h>
10 #include <linux/export.h>
11 #include <linux/clk-provider.h>
12 #include <linux/delay.h>
13 #include <linux/regmap.h>
14 #include <linux/math64.h>
15 #include <linux/slab.h>
17 #include <asm/div64.h>
23 #define CMD_UPDATE BIT(0)
24 #define CMD_ROOT_EN BIT(1)
25 #define CMD_DIRTY_CFG BIT(4)
26 #define CMD_DIRTY_N BIT(5)
27 #define CMD_DIRTY_M BIT(6)
28 #define CMD_DIRTY_D BIT(7)
29 #define CMD_ROOT_OFF BIT(31)
32 #define CFG_SRC_DIV_SHIFT 0
33 #define CFG_SRC_SEL_SHIFT 8
34 #define CFG_SRC_SEL_MASK (0x7 << CFG_SRC_SEL_SHIFT)
35 #define CFG_MODE_SHIFT 12
36 #define CFG_MODE_MASK (0x3 << CFG_MODE_SHIFT)
37 #define CFG_MODE_DUAL_EDGE (0x2 << CFG_MODE_SHIFT)
38 #define CFG_HW_CLK_CTRL_MASK BIT(20)
44 #define RCG_CFG_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + CFG_REG)
45 #define RCG_M_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + M_REG)
46 #define RCG_N_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + N_REG)
47 #define RCG_D_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + D_REG)
49 /* Dynamic Frequency Scaling */
50 #define MAX_PERF_LEVEL 8
51 #define SE_CMD_DFSR_OFFSET 0x14
52 #define SE_CMD_DFS_EN BIT(0)
53 #define SE_PERF_DFSR(level) (0x1c + 0x4 * (level))
54 #define SE_PERF_M_DFSR(level) (0x5c + 0x4 * (level))
55 #define SE_PERF_N_DFSR(level) (0x9c + 0x4 * (level))
62 static int clk_rcg2_is_enabled(struct clk_hw
*hw
)
64 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
68 ret
= regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CMD_REG
, &cmd
);
72 return (cmd
& CMD_ROOT_OFF
) == 0;
75 static u8
clk_rcg2_get_parent(struct clk_hw
*hw
)
77 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
78 int num_parents
= clk_hw_get_num_parents(hw
);
82 ret
= regmap_read(rcg
->clkr
.regmap
, RCG_CFG_OFFSET(rcg
), &cfg
);
86 cfg
&= CFG_SRC_SEL_MASK
;
87 cfg
>>= CFG_SRC_SEL_SHIFT
;
89 for (i
= 0; i
< num_parents
; i
++)
90 if (cfg
== rcg
->parent_map
[i
].cfg
)
94 pr_debug("%s: Clock %s has invalid parent, using default.\n",
95 __func__
, clk_hw_get_name(hw
));
99 static int update_config(struct clk_rcg2
*rcg
)
103 struct clk_hw
*hw
= &rcg
->clkr
.hw
;
104 const char *name
= clk_hw_get_name(hw
);
106 ret
= regmap_update_bits(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CMD_REG
,
107 CMD_UPDATE
, CMD_UPDATE
);
111 /* Wait for update to take effect */
112 for (count
= 500; count
> 0; count
--) {
113 ret
= regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CMD_REG
, &cmd
);
116 if (!(cmd
& CMD_UPDATE
))
121 WARN(1, "%s: rcg didn't update its configuration.", name
);
125 static int clk_rcg2_set_parent(struct clk_hw
*hw
, u8 index
)
127 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
129 u32 cfg
= rcg
->parent_map
[index
].cfg
<< CFG_SRC_SEL_SHIFT
;
131 ret
= regmap_update_bits(rcg
->clkr
.regmap
, RCG_CFG_OFFSET(rcg
),
132 CFG_SRC_SEL_MASK
, cfg
);
136 return update_config(rcg
);
140 * Calculate m/n:d rate
143 * rate = ----------- x ---
147 calc_rate(unsigned long rate
, u32 m
, u32 n
, u32 mode
, u32 hid_div
)
165 clk_rcg2_recalc_rate(struct clk_hw
*hw
, unsigned long parent_rate
)
167 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
168 u32 cfg
, hid_div
, m
= 0, n
= 0, mode
= 0, mask
;
170 regmap_read(rcg
->clkr
.regmap
, RCG_CFG_OFFSET(rcg
), &cfg
);
172 if (rcg
->mnd_width
) {
173 mask
= BIT(rcg
->mnd_width
) - 1;
174 regmap_read(rcg
->clkr
.regmap
, RCG_M_OFFSET(rcg
), &m
);
176 regmap_read(rcg
->clkr
.regmap
, RCG_N_OFFSET(rcg
), &n
);
180 mode
= cfg
& CFG_MODE_MASK
;
181 mode
>>= CFG_MODE_SHIFT
;
184 mask
= BIT(rcg
->hid_width
) - 1;
185 hid_div
= cfg
>> CFG_SRC_DIV_SHIFT
;
188 return calc_rate(parent_rate
, m
, n
, mode
, hid_div
);
191 static int _freq_tbl_determine_rate(struct clk_hw
*hw
, const struct freq_tbl
*f
,
192 struct clk_rate_request
*req
,
193 enum freq_policy policy
)
195 unsigned long clk_flags
, rate
= req
->rate
;
197 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
202 f
= qcom_find_freq_floor(f
, rate
);
205 f
= qcom_find_freq(f
, rate
);
214 index
= qcom_find_src_index(hw
, rcg
->parent_map
, f
->src
);
218 clk_flags
= clk_hw_get_flags(hw
);
219 p
= clk_hw_get_parent_by_index(hw
, index
);
220 if (clk_flags
& CLK_SET_RATE_PARENT
) {
226 rate
*= f
->pre_div
+ 1;
236 rate
= clk_hw_get_rate(p
);
238 req
->best_parent_hw
= p
;
239 req
->best_parent_rate
= rate
;
245 static int clk_rcg2_determine_rate(struct clk_hw
*hw
,
246 struct clk_rate_request
*req
)
248 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
250 return _freq_tbl_determine_rate(hw
, rcg
->freq_tbl
, req
, CEIL
);
253 static int clk_rcg2_determine_floor_rate(struct clk_hw
*hw
,
254 struct clk_rate_request
*req
)
256 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
258 return _freq_tbl_determine_rate(hw
, rcg
->freq_tbl
, req
, FLOOR
);
261 static int __clk_rcg2_configure(struct clk_rcg2
*rcg
, const struct freq_tbl
*f
)
264 struct clk_hw
*hw
= &rcg
->clkr
.hw
;
265 int ret
, index
= qcom_find_src_index(hw
, rcg
->parent_map
, f
->src
);
270 if (rcg
->mnd_width
&& f
->n
) {
271 mask
= BIT(rcg
->mnd_width
) - 1;
272 ret
= regmap_update_bits(rcg
->clkr
.regmap
,
273 RCG_M_OFFSET(rcg
), mask
, f
->m
);
277 ret
= regmap_update_bits(rcg
->clkr
.regmap
,
278 RCG_N_OFFSET(rcg
), mask
, ~(f
->n
- f
->m
));
282 ret
= regmap_update_bits(rcg
->clkr
.regmap
,
283 RCG_D_OFFSET(rcg
), mask
, ~f
->n
);
288 mask
= BIT(rcg
->hid_width
) - 1;
289 mask
|= CFG_SRC_SEL_MASK
| CFG_MODE_MASK
| CFG_HW_CLK_CTRL_MASK
;
290 cfg
= f
->pre_div
<< CFG_SRC_DIV_SHIFT
;
291 cfg
|= rcg
->parent_map
[index
].cfg
<< CFG_SRC_SEL_SHIFT
;
292 if (rcg
->mnd_width
&& f
->n
&& (f
->m
!= f
->n
))
293 cfg
|= CFG_MODE_DUAL_EDGE
;
294 return regmap_update_bits(rcg
->clkr
.regmap
, RCG_CFG_OFFSET(rcg
),
298 static int clk_rcg2_configure(struct clk_rcg2
*rcg
, const struct freq_tbl
*f
)
302 ret
= __clk_rcg2_configure(rcg
, f
);
306 return update_config(rcg
);
309 static int __clk_rcg2_set_rate(struct clk_hw
*hw
, unsigned long rate
,
310 enum freq_policy policy
)
312 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
313 const struct freq_tbl
*f
;
317 f
= qcom_find_freq_floor(rcg
->freq_tbl
, rate
);
320 f
= qcom_find_freq(rcg
->freq_tbl
, rate
);
329 return clk_rcg2_configure(rcg
, f
);
332 static int clk_rcg2_set_rate(struct clk_hw
*hw
, unsigned long rate
,
333 unsigned long parent_rate
)
335 return __clk_rcg2_set_rate(hw
, rate
, CEIL
);
338 static int clk_rcg2_set_floor_rate(struct clk_hw
*hw
, unsigned long rate
,
339 unsigned long parent_rate
)
341 return __clk_rcg2_set_rate(hw
, rate
, FLOOR
);
344 static int clk_rcg2_set_rate_and_parent(struct clk_hw
*hw
,
345 unsigned long rate
, unsigned long parent_rate
, u8 index
)
347 return __clk_rcg2_set_rate(hw
, rate
, CEIL
);
350 static int clk_rcg2_set_floor_rate_and_parent(struct clk_hw
*hw
,
351 unsigned long rate
, unsigned long parent_rate
, u8 index
)
353 return __clk_rcg2_set_rate(hw
, rate
, FLOOR
);
356 const struct clk_ops clk_rcg2_ops
= {
357 .is_enabled
= clk_rcg2_is_enabled
,
358 .get_parent
= clk_rcg2_get_parent
,
359 .set_parent
= clk_rcg2_set_parent
,
360 .recalc_rate
= clk_rcg2_recalc_rate
,
361 .determine_rate
= clk_rcg2_determine_rate
,
362 .set_rate
= clk_rcg2_set_rate
,
363 .set_rate_and_parent
= clk_rcg2_set_rate_and_parent
,
365 EXPORT_SYMBOL_GPL(clk_rcg2_ops
);
367 const struct clk_ops clk_rcg2_floor_ops
= {
368 .is_enabled
= clk_rcg2_is_enabled
,
369 .get_parent
= clk_rcg2_get_parent
,
370 .set_parent
= clk_rcg2_set_parent
,
371 .recalc_rate
= clk_rcg2_recalc_rate
,
372 .determine_rate
= clk_rcg2_determine_floor_rate
,
373 .set_rate
= clk_rcg2_set_floor_rate
,
374 .set_rate_and_parent
= clk_rcg2_set_floor_rate_and_parent
,
376 EXPORT_SYMBOL_GPL(clk_rcg2_floor_ops
);
383 static const struct frac_entry frac_table_675m
[] = { /* link rate of 270M */
384 { 52, 295 }, /* 119 M */
385 { 11, 57 }, /* 130.25 M */
386 { 63, 307 }, /* 138.50 M */
387 { 11, 50 }, /* 148.50 M */
388 { 47, 206 }, /* 154 M */
389 { 31, 100 }, /* 205.25 M */
390 { 107, 269 }, /* 268.50 M */
394 static struct frac_entry frac_table_810m
[] = { /* Link rate of 162M */
395 { 31, 211 }, /* 119 M */
396 { 32, 199 }, /* 130.25 M */
397 { 63, 307 }, /* 138.50 M */
398 { 11, 60 }, /* 148.50 M */
399 { 50, 263 }, /* 154 M */
400 { 31, 120 }, /* 205.25 M */
401 { 119, 359 }, /* 268.50 M */
405 static int clk_edp_pixel_set_rate(struct clk_hw
*hw
, unsigned long rate
,
406 unsigned long parent_rate
)
408 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
409 struct freq_tbl f
= *rcg
->freq_tbl
;
410 const struct frac_entry
*frac
;
412 s64 src_rate
= parent_rate
;
414 u32 mask
= BIT(rcg
->hid_width
) - 1;
417 if (src_rate
== 810000000)
418 frac
= frac_table_810m
;
420 frac
= frac_table_675m
;
422 for (; frac
->num
; frac
++) {
424 request
*= frac
->den
;
425 request
= div_s64(request
, frac
->num
);
426 if ((src_rate
< (request
- delta
)) ||
427 (src_rate
> (request
+ delta
)))
430 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
,
433 f
.pre_div
>>= CFG_SRC_DIV_SHIFT
;
438 return clk_rcg2_configure(rcg
, &f
);
444 static int clk_edp_pixel_set_rate_and_parent(struct clk_hw
*hw
,
445 unsigned long rate
, unsigned long parent_rate
, u8 index
)
447 /* Parent index is set statically in frequency table */
448 return clk_edp_pixel_set_rate(hw
, rate
, parent_rate
);
451 static int clk_edp_pixel_determine_rate(struct clk_hw
*hw
,
452 struct clk_rate_request
*req
)
454 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
455 const struct freq_tbl
*f
= rcg
->freq_tbl
;
456 const struct frac_entry
*frac
;
459 u32 mask
= BIT(rcg
->hid_width
) - 1;
461 int index
= qcom_find_src_index(hw
, rcg
->parent_map
, f
->src
);
463 /* Force the correct parent */
464 req
->best_parent_hw
= clk_hw_get_parent_by_index(hw
, index
);
465 req
->best_parent_rate
= clk_hw_get_rate(req
->best_parent_hw
);
467 if (req
->best_parent_rate
== 810000000)
468 frac
= frac_table_810m
;
470 frac
= frac_table_675m
;
472 for (; frac
->num
; frac
++) {
474 request
*= frac
->den
;
475 request
= div_s64(request
, frac
->num
);
476 if ((req
->best_parent_rate
< (request
- delta
)) ||
477 (req
->best_parent_rate
> (request
+ delta
)))
480 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
,
482 hid_div
>>= CFG_SRC_DIV_SHIFT
;
485 req
->rate
= calc_rate(req
->best_parent_rate
,
486 frac
->num
, frac
->den
,
487 !!frac
->den
, hid_div
);
494 const struct clk_ops clk_edp_pixel_ops
= {
495 .is_enabled
= clk_rcg2_is_enabled
,
496 .get_parent
= clk_rcg2_get_parent
,
497 .set_parent
= clk_rcg2_set_parent
,
498 .recalc_rate
= clk_rcg2_recalc_rate
,
499 .set_rate
= clk_edp_pixel_set_rate
,
500 .set_rate_and_parent
= clk_edp_pixel_set_rate_and_parent
,
501 .determine_rate
= clk_edp_pixel_determine_rate
,
503 EXPORT_SYMBOL_GPL(clk_edp_pixel_ops
);
505 static int clk_byte_determine_rate(struct clk_hw
*hw
,
506 struct clk_rate_request
*req
)
508 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
509 const struct freq_tbl
*f
= rcg
->freq_tbl
;
510 int index
= qcom_find_src_index(hw
, rcg
->parent_map
, f
->src
);
511 unsigned long parent_rate
, div
;
512 u32 mask
= BIT(rcg
->hid_width
) - 1;
518 req
->best_parent_hw
= p
= clk_hw_get_parent_by_index(hw
, index
);
519 req
->best_parent_rate
= parent_rate
= clk_hw_round_rate(p
, req
->rate
);
521 div
= DIV_ROUND_UP((2 * parent_rate
), req
->rate
) - 1;
522 div
= min_t(u32
, div
, mask
);
524 req
->rate
= calc_rate(parent_rate
, 0, 0, 0, div
);
529 static int clk_byte_set_rate(struct clk_hw
*hw
, unsigned long rate
,
530 unsigned long parent_rate
)
532 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
533 struct freq_tbl f
= *rcg
->freq_tbl
;
535 u32 mask
= BIT(rcg
->hid_width
) - 1;
537 div
= DIV_ROUND_UP((2 * parent_rate
), rate
) - 1;
538 div
= min_t(u32
, div
, mask
);
542 return clk_rcg2_configure(rcg
, &f
);
545 static int clk_byte_set_rate_and_parent(struct clk_hw
*hw
,
546 unsigned long rate
, unsigned long parent_rate
, u8 index
)
548 /* Parent index is set statically in frequency table */
549 return clk_byte_set_rate(hw
, rate
, parent_rate
);
552 const struct clk_ops clk_byte_ops
= {
553 .is_enabled
= clk_rcg2_is_enabled
,
554 .get_parent
= clk_rcg2_get_parent
,
555 .set_parent
= clk_rcg2_set_parent
,
556 .recalc_rate
= clk_rcg2_recalc_rate
,
557 .set_rate
= clk_byte_set_rate
,
558 .set_rate_and_parent
= clk_byte_set_rate_and_parent
,
559 .determine_rate
= clk_byte_determine_rate
,
561 EXPORT_SYMBOL_GPL(clk_byte_ops
);
563 static int clk_byte2_determine_rate(struct clk_hw
*hw
,
564 struct clk_rate_request
*req
)
566 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
567 unsigned long parent_rate
, div
;
568 u32 mask
= BIT(rcg
->hid_width
) - 1;
570 unsigned long rate
= req
->rate
;
575 p
= req
->best_parent_hw
;
576 req
->best_parent_rate
= parent_rate
= clk_hw_round_rate(p
, rate
);
578 div
= DIV_ROUND_UP((2 * parent_rate
), rate
) - 1;
579 div
= min_t(u32
, div
, mask
);
581 req
->rate
= calc_rate(parent_rate
, 0, 0, 0, div
);
586 static int clk_byte2_set_rate(struct clk_hw
*hw
, unsigned long rate
,
587 unsigned long parent_rate
)
589 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
590 struct freq_tbl f
= { 0 };
592 int i
, num_parents
= clk_hw_get_num_parents(hw
);
593 u32 mask
= BIT(rcg
->hid_width
) - 1;
596 div
= DIV_ROUND_UP((2 * parent_rate
), rate
) - 1;
597 div
= min_t(u32
, div
, mask
);
601 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, &cfg
);
602 cfg
&= CFG_SRC_SEL_MASK
;
603 cfg
>>= CFG_SRC_SEL_SHIFT
;
605 for (i
= 0; i
< num_parents
; i
++) {
606 if (cfg
== rcg
->parent_map
[i
].cfg
) {
607 f
.src
= rcg
->parent_map
[i
].src
;
608 return clk_rcg2_configure(rcg
, &f
);
615 static int clk_byte2_set_rate_and_parent(struct clk_hw
*hw
,
616 unsigned long rate
, unsigned long parent_rate
, u8 index
)
618 /* Read the hardware to determine parent during set_rate */
619 return clk_byte2_set_rate(hw
, rate
, parent_rate
);
622 const struct clk_ops clk_byte2_ops
= {
623 .is_enabled
= clk_rcg2_is_enabled
,
624 .get_parent
= clk_rcg2_get_parent
,
625 .set_parent
= clk_rcg2_set_parent
,
626 .recalc_rate
= clk_rcg2_recalc_rate
,
627 .set_rate
= clk_byte2_set_rate
,
628 .set_rate_and_parent
= clk_byte2_set_rate_and_parent
,
629 .determine_rate
= clk_byte2_determine_rate
,
631 EXPORT_SYMBOL_GPL(clk_byte2_ops
);
633 static const struct frac_entry frac_table_pixel
[] = {
641 static int clk_pixel_determine_rate(struct clk_hw
*hw
,
642 struct clk_rate_request
*req
)
644 unsigned long request
, src_rate
;
646 const struct frac_entry
*frac
= frac_table_pixel
;
648 for (; frac
->num
; frac
++) {
649 request
= (req
->rate
* frac
->den
) / frac
->num
;
651 src_rate
= clk_hw_round_rate(req
->best_parent_hw
, request
);
652 if ((src_rate
< (request
- delta
)) ||
653 (src_rate
> (request
+ delta
)))
656 req
->best_parent_rate
= src_rate
;
657 req
->rate
= (src_rate
* frac
->num
) / frac
->den
;
664 static int clk_pixel_set_rate(struct clk_hw
*hw
, unsigned long rate
,
665 unsigned long parent_rate
)
667 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
668 struct freq_tbl f
= { 0 };
669 const struct frac_entry
*frac
= frac_table_pixel
;
670 unsigned long request
;
672 u32 mask
= BIT(rcg
->hid_width
) - 1;
674 int i
, num_parents
= clk_hw_get_num_parents(hw
);
676 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, &cfg
);
677 cfg
&= CFG_SRC_SEL_MASK
;
678 cfg
>>= CFG_SRC_SEL_SHIFT
;
680 for (i
= 0; i
< num_parents
; i
++)
681 if (cfg
== rcg
->parent_map
[i
].cfg
) {
682 f
.src
= rcg
->parent_map
[i
].src
;
686 for (; frac
->num
; frac
++) {
687 request
= (rate
* frac
->den
) / frac
->num
;
689 if ((parent_rate
< (request
- delta
)) ||
690 (parent_rate
> (request
+ delta
)))
693 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
,
696 f
.pre_div
>>= CFG_SRC_DIV_SHIFT
;
701 return clk_rcg2_configure(rcg
, &f
);
706 static int clk_pixel_set_rate_and_parent(struct clk_hw
*hw
, unsigned long rate
,
707 unsigned long parent_rate
, u8 index
)
709 return clk_pixel_set_rate(hw
, rate
, parent_rate
);
712 const struct clk_ops clk_pixel_ops
= {
713 .is_enabled
= clk_rcg2_is_enabled
,
714 .get_parent
= clk_rcg2_get_parent
,
715 .set_parent
= clk_rcg2_set_parent
,
716 .recalc_rate
= clk_rcg2_recalc_rate
,
717 .set_rate
= clk_pixel_set_rate
,
718 .set_rate_and_parent
= clk_pixel_set_rate_and_parent
,
719 .determine_rate
= clk_pixel_determine_rate
,
721 EXPORT_SYMBOL_GPL(clk_pixel_ops
);
723 static int clk_gfx3d_determine_rate(struct clk_hw
*hw
,
724 struct clk_rate_request
*req
)
726 struct clk_rate_request parent_req
= { };
727 struct clk_hw
*p2
, *p8
, *p9
, *xo
;
728 unsigned long p9_rate
;
731 xo
= clk_hw_get_parent_by_index(hw
, 0);
732 if (req
->rate
== clk_hw_get_rate(xo
)) {
733 req
->best_parent_hw
= xo
;
737 p9
= clk_hw_get_parent_by_index(hw
, 2);
738 p2
= clk_hw_get_parent_by_index(hw
, 3);
739 p8
= clk_hw_get_parent_by_index(hw
, 4);
741 /* PLL9 is a fixed rate PLL */
742 p9_rate
= clk_hw_get_rate(p9
);
744 parent_req
.rate
= req
->rate
= min(req
->rate
, p9_rate
);
745 if (req
->rate
== p9_rate
) {
746 req
->rate
= req
->best_parent_rate
= p9_rate
;
747 req
->best_parent_hw
= p9
;
751 if (req
->best_parent_hw
== p9
) {
752 /* Are we going back to a previously used rate? */
753 if (clk_hw_get_rate(p8
) == req
->rate
)
754 req
->best_parent_hw
= p8
;
756 req
->best_parent_hw
= p2
;
757 } else if (req
->best_parent_hw
== p8
) {
758 req
->best_parent_hw
= p2
;
760 req
->best_parent_hw
= p8
;
763 ret
= __clk_determine_rate(req
->best_parent_hw
, &parent_req
);
767 req
->rate
= req
->best_parent_rate
= parent_req
.rate
;
772 static int clk_gfx3d_set_rate_and_parent(struct clk_hw
*hw
, unsigned long rate
,
773 unsigned long parent_rate
, u8 index
)
775 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
779 /* Just mux it, we don't use the division or m/n hardware */
780 cfg
= rcg
->parent_map
[index
].cfg
<< CFG_SRC_SEL_SHIFT
;
781 ret
= regmap_write(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, cfg
);
785 return update_config(rcg
);
788 static int clk_gfx3d_set_rate(struct clk_hw
*hw
, unsigned long rate
,
789 unsigned long parent_rate
)
792 * We should never get here; clk_gfx3d_determine_rate() should always
793 * make us use a different parent than what we're currently using, so
794 * clk_gfx3d_set_rate_and_parent() should always be called.
799 const struct clk_ops clk_gfx3d_ops
= {
800 .is_enabled
= clk_rcg2_is_enabled
,
801 .get_parent
= clk_rcg2_get_parent
,
802 .set_parent
= clk_rcg2_set_parent
,
803 .recalc_rate
= clk_rcg2_recalc_rate
,
804 .set_rate
= clk_gfx3d_set_rate
,
805 .set_rate_and_parent
= clk_gfx3d_set_rate_and_parent
,
806 .determine_rate
= clk_gfx3d_determine_rate
,
808 EXPORT_SYMBOL_GPL(clk_gfx3d_ops
);
810 static int clk_rcg2_set_force_enable(struct clk_hw
*hw
)
812 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
813 const char *name
= clk_hw_get_name(hw
);
816 ret
= regmap_update_bits(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CMD_REG
,
817 CMD_ROOT_EN
, CMD_ROOT_EN
);
821 /* wait for RCG to turn ON */
822 for (count
= 500; count
> 0; count
--) {
823 if (clk_rcg2_is_enabled(hw
))
829 pr_err("%s: RCG did not turn on\n", name
);
833 static int clk_rcg2_clear_force_enable(struct clk_hw
*hw
)
835 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
837 return regmap_update_bits(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CMD_REG
,
842 clk_rcg2_shared_force_enable_clear(struct clk_hw
*hw
, const struct freq_tbl
*f
)
844 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
847 ret
= clk_rcg2_set_force_enable(hw
);
851 ret
= clk_rcg2_configure(rcg
, f
);
855 return clk_rcg2_clear_force_enable(hw
);
858 static int clk_rcg2_shared_set_rate(struct clk_hw
*hw
, unsigned long rate
,
859 unsigned long parent_rate
)
861 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
862 const struct freq_tbl
*f
;
864 f
= qcom_find_freq(rcg
->freq_tbl
, rate
);
869 * In case clock is disabled, update the CFG, M, N and D registers
870 * and don't hit the update bit of CMD register.
872 if (!__clk_is_enabled(hw
->clk
))
873 return __clk_rcg2_configure(rcg
, f
);
875 return clk_rcg2_shared_force_enable_clear(hw
, f
);
878 static int clk_rcg2_shared_set_rate_and_parent(struct clk_hw
*hw
,
879 unsigned long rate
, unsigned long parent_rate
, u8 index
)
881 return clk_rcg2_shared_set_rate(hw
, rate
, parent_rate
);
884 static int clk_rcg2_shared_enable(struct clk_hw
*hw
)
886 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
890 * Set the update bit because required configuration has already
891 * been written in clk_rcg2_shared_set_rate()
893 ret
= clk_rcg2_set_force_enable(hw
);
897 ret
= update_config(rcg
);
901 return clk_rcg2_clear_force_enable(hw
);
904 static void clk_rcg2_shared_disable(struct clk_hw
*hw
)
906 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
910 * Store current configuration as switching to safe source would clear
911 * the SRC and DIV of CFG register
913 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, &cfg
);
916 * Park the RCG at a safe configuration - sourced off of safe source.
917 * Force enable and disable the RCG while configuring it to safeguard
918 * against any update signal coming from the downstream clock.
919 * The current parent is still prepared and enabled at this point, and
920 * the safe source is always on while application processor subsystem
921 * is online. Therefore, the RCG can safely switch its parent.
923 clk_rcg2_set_force_enable(hw
);
925 regmap_write(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
,
926 rcg
->safe_src_index
<< CFG_SRC_SEL_SHIFT
);
930 clk_rcg2_clear_force_enable(hw
);
932 /* Write back the stored configuration corresponding to current rate */
933 regmap_write(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, cfg
);
936 const struct clk_ops clk_rcg2_shared_ops
= {
937 .enable
= clk_rcg2_shared_enable
,
938 .disable
= clk_rcg2_shared_disable
,
939 .get_parent
= clk_rcg2_get_parent
,
940 .set_parent
= clk_rcg2_set_parent
,
941 .recalc_rate
= clk_rcg2_recalc_rate
,
942 .determine_rate
= clk_rcg2_determine_rate
,
943 .set_rate
= clk_rcg2_shared_set_rate
,
944 .set_rate_and_parent
= clk_rcg2_shared_set_rate_and_parent
,
946 EXPORT_SYMBOL_GPL(clk_rcg2_shared_ops
);
948 /* Common APIs to be used for DFS based RCGR */
949 static void clk_rcg2_dfs_populate_freq(struct clk_hw
*hw
, unsigned int l
,
952 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
954 unsigned long prate
= 0;
955 u32 val
, mask
, cfg
, mode
;
958 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ SE_PERF_DFSR(l
), &cfg
);
960 mask
= BIT(rcg
->hid_width
) - 1;
963 f
->pre_div
= cfg
& mask
;
965 cfg
&= CFG_SRC_SEL_MASK
;
966 cfg
>>= CFG_SRC_SEL_SHIFT
;
968 num_parents
= clk_hw_get_num_parents(hw
);
969 for (i
= 0; i
< num_parents
; i
++) {
970 if (cfg
== rcg
->parent_map
[i
].cfg
) {
971 f
->src
= rcg
->parent_map
[i
].src
;
972 p
= clk_hw_get_parent_by_index(&rcg
->clkr
.hw
, i
);
973 prate
= clk_hw_get_rate(p
);
977 mode
= cfg
& CFG_MODE_MASK
;
978 mode
>>= CFG_MODE_SHIFT
;
980 mask
= BIT(rcg
->mnd_width
) - 1;
981 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ SE_PERF_M_DFSR(l
),
986 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ SE_PERF_N_DFSR(l
),
994 f
->freq
= calc_rate(prate
, f
->m
, f
->n
, mode
, f
->pre_div
);
997 static int clk_rcg2_dfs_populate_freq_table(struct clk_rcg2
*rcg
)
999 struct freq_tbl
*freq_tbl
;
1002 /* Allocate space for 1 extra since table is NULL terminated */
1003 freq_tbl
= kcalloc(MAX_PERF_LEVEL
+ 1, sizeof(*freq_tbl
), GFP_KERNEL
);
1006 rcg
->freq_tbl
= freq_tbl
;
1008 for (i
= 0; i
< MAX_PERF_LEVEL
; i
++)
1009 clk_rcg2_dfs_populate_freq(&rcg
->clkr
.hw
, i
, freq_tbl
+ i
);
1014 static int clk_rcg2_dfs_determine_rate(struct clk_hw
*hw
,
1015 struct clk_rate_request
*req
)
1017 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
1020 if (!rcg
->freq_tbl
) {
1021 ret
= clk_rcg2_dfs_populate_freq_table(rcg
);
1023 pr_err("Failed to update DFS tables for %s\n",
1024 clk_hw_get_name(hw
));
1029 return clk_rcg2_determine_rate(hw
, req
);
1032 static unsigned long
1033 clk_rcg2_dfs_recalc_rate(struct clk_hw
*hw
, unsigned long parent_rate
)
1035 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
1036 u32 level
, mask
, cfg
, m
= 0, n
= 0, mode
, pre_div
;
1038 regmap_read(rcg
->clkr
.regmap
,
1039 rcg
->cmd_rcgr
+ SE_CMD_DFSR_OFFSET
, &level
);
1040 level
&= GENMASK(4, 1);
1044 return rcg
->freq_tbl
[level
].freq
;
1047 * Assume that parent_rate is actually the parent because
1048 * we can't do any better at figuring it out when the table
1049 * hasn't been populated yet. We only populate the table
1050 * in determine_rate because we can't guarantee the parents
1051 * will be registered with the framework until then.
1053 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ SE_PERF_DFSR(level
),
1056 mask
= BIT(rcg
->hid_width
) - 1;
1059 pre_div
= cfg
& mask
;
1061 mode
= cfg
& CFG_MODE_MASK
;
1062 mode
>>= CFG_MODE_SHIFT
;
1064 mask
= BIT(rcg
->mnd_width
) - 1;
1065 regmap_read(rcg
->clkr
.regmap
,
1066 rcg
->cmd_rcgr
+ SE_PERF_M_DFSR(level
), &m
);
1069 regmap_read(rcg
->clkr
.regmap
,
1070 rcg
->cmd_rcgr
+ SE_PERF_N_DFSR(level
), &n
);
1076 return calc_rate(parent_rate
, m
, n
, mode
, pre_div
);
1079 static const struct clk_ops clk_rcg2_dfs_ops
= {
1080 .is_enabled
= clk_rcg2_is_enabled
,
1081 .get_parent
= clk_rcg2_get_parent
,
1082 .determine_rate
= clk_rcg2_dfs_determine_rate
,
1083 .recalc_rate
= clk_rcg2_dfs_recalc_rate
,
1086 static int clk_rcg2_enable_dfs(const struct clk_rcg_dfs_data
*data
,
1087 struct regmap
*regmap
)
1089 struct clk_rcg2
*rcg
= data
->rcg
;
1090 struct clk_init_data
*init
= data
->init
;
1094 ret
= regmap_read(regmap
, rcg
->cmd_rcgr
+ SE_CMD_DFSR_OFFSET
, &val
);
1098 if (!(val
& SE_CMD_DFS_EN
))
1102 * Rate changes with consumer writing a register in
1103 * their own I/O region
1105 init
->flags
|= CLK_GET_RATE_NOCACHE
;
1106 init
->ops
= &clk_rcg2_dfs_ops
;
1108 rcg
->freq_tbl
= NULL
;
1113 int qcom_cc_register_rcg_dfs(struct regmap
*regmap
,
1114 const struct clk_rcg_dfs_data
*rcgs
, size_t len
)
1118 for (i
= 0; i
< len
; i
++) {
1119 ret
= clk_rcg2_enable_dfs(&rcgs
[i
], regmap
);
1126 EXPORT_SYMBOL_GPL(qcom_cc_register_rcg_dfs
);