1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (c) 2013, 2018, The Linux Foundation. All rights reserved.
6 #include <linux/kernel.h>
7 #include <linux/bitops.h>
10 #include <linux/export.h>
11 #include <linux/clk-provider.h>
12 #include <linux/delay.h>
13 #include <linux/regmap.h>
14 #include <linux/math64.h>
16 #include <asm/div64.h>
22 #define CMD_UPDATE BIT(0)
23 #define CMD_ROOT_EN BIT(1)
24 #define CMD_DIRTY_CFG BIT(4)
25 #define CMD_DIRTY_N BIT(5)
26 #define CMD_DIRTY_M BIT(6)
27 #define CMD_DIRTY_D BIT(7)
28 #define CMD_ROOT_OFF BIT(31)
31 #define CFG_SRC_DIV_SHIFT 0
32 #define CFG_SRC_SEL_SHIFT 8
33 #define CFG_SRC_SEL_MASK (0x7 << CFG_SRC_SEL_SHIFT)
34 #define CFG_MODE_SHIFT 12
35 #define CFG_MODE_MASK (0x3 << CFG_MODE_SHIFT)
36 #define CFG_MODE_DUAL_EDGE (0x2 << CFG_MODE_SHIFT)
37 #define CFG_HW_CLK_CTRL_MASK BIT(20)
48 static int clk_rcg2_is_enabled(struct clk_hw
*hw
)
50 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
54 ret
= regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CMD_REG
, &cmd
);
58 return (cmd
& CMD_ROOT_OFF
) == 0;
61 static u8
clk_rcg2_get_parent(struct clk_hw
*hw
)
63 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
64 int num_parents
= clk_hw_get_num_parents(hw
);
68 ret
= regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, &cfg
);
72 cfg
&= CFG_SRC_SEL_MASK
;
73 cfg
>>= CFG_SRC_SEL_SHIFT
;
75 for (i
= 0; i
< num_parents
; i
++)
76 if (cfg
== rcg
->parent_map
[i
].cfg
)
80 pr_debug("%s: Clock %s has invalid parent, using default.\n",
81 __func__
, clk_hw_get_name(hw
));
85 static int update_config(struct clk_rcg2
*rcg
)
89 struct clk_hw
*hw
= &rcg
->clkr
.hw
;
90 const char *name
= clk_hw_get_name(hw
);
92 ret
= regmap_update_bits(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CMD_REG
,
93 CMD_UPDATE
, CMD_UPDATE
);
97 /* Wait for update to take effect */
98 for (count
= 500; count
> 0; count
--) {
99 ret
= regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CMD_REG
, &cmd
);
102 if (!(cmd
& CMD_UPDATE
))
107 WARN(1, "%s: rcg didn't update its configuration.", name
);
111 static int clk_rcg2_set_parent(struct clk_hw
*hw
, u8 index
)
113 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
115 u32 cfg
= rcg
->parent_map
[index
].cfg
<< CFG_SRC_SEL_SHIFT
;
117 ret
= regmap_update_bits(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
,
118 CFG_SRC_SEL_MASK
, cfg
);
122 return update_config(rcg
);
126 * Calculate m/n:d rate
129 * rate = ----------- x ---
133 calc_rate(unsigned long rate
, u32 m
, u32 n
, u32 mode
, u32 hid_div
)
151 clk_rcg2_recalc_rate(struct clk_hw
*hw
, unsigned long parent_rate
)
153 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
154 u32 cfg
, hid_div
, m
= 0, n
= 0, mode
= 0, mask
;
156 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, &cfg
);
158 if (rcg
->mnd_width
) {
159 mask
= BIT(rcg
->mnd_width
) - 1;
160 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ M_REG
, &m
);
162 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ N_REG
, &n
);
166 mode
= cfg
& CFG_MODE_MASK
;
167 mode
>>= CFG_MODE_SHIFT
;
170 mask
= BIT(rcg
->hid_width
) - 1;
171 hid_div
= cfg
>> CFG_SRC_DIV_SHIFT
;
174 return calc_rate(parent_rate
, m
, n
, mode
, hid_div
);
177 static int _freq_tbl_determine_rate(struct clk_hw
*hw
, const struct freq_tbl
*f
,
178 struct clk_rate_request
*req
,
179 enum freq_policy policy
)
181 unsigned long clk_flags
, rate
= req
->rate
;
183 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
188 f
= qcom_find_freq_floor(f
, rate
);
191 f
= qcom_find_freq(f
, rate
);
200 index
= qcom_find_src_index(hw
, rcg
->parent_map
, f
->src
);
204 clk_flags
= clk_hw_get_flags(hw
);
205 p
= clk_hw_get_parent_by_index(hw
, index
);
206 if (clk_flags
& CLK_SET_RATE_PARENT
) {
210 rate
*= f
->pre_div
+ 1;
220 rate
= clk_hw_get_rate(p
);
222 req
->best_parent_hw
= p
;
223 req
->best_parent_rate
= rate
;
229 static int clk_rcg2_determine_rate(struct clk_hw
*hw
,
230 struct clk_rate_request
*req
)
232 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
234 return _freq_tbl_determine_rate(hw
, rcg
->freq_tbl
, req
, CEIL
);
237 static int clk_rcg2_determine_floor_rate(struct clk_hw
*hw
,
238 struct clk_rate_request
*req
)
240 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
242 return _freq_tbl_determine_rate(hw
, rcg
->freq_tbl
, req
, FLOOR
);
245 static int __clk_rcg2_configure(struct clk_rcg2
*rcg
, const struct freq_tbl
*f
)
248 struct clk_hw
*hw
= &rcg
->clkr
.hw
;
249 int ret
, index
= qcom_find_src_index(hw
, rcg
->parent_map
, f
->src
);
254 if (rcg
->mnd_width
&& f
->n
) {
255 mask
= BIT(rcg
->mnd_width
) - 1;
256 ret
= regmap_update_bits(rcg
->clkr
.regmap
,
257 rcg
->cmd_rcgr
+ M_REG
, mask
, f
->m
);
261 ret
= regmap_update_bits(rcg
->clkr
.regmap
,
262 rcg
->cmd_rcgr
+ N_REG
, mask
, ~(f
->n
- f
->m
));
266 ret
= regmap_update_bits(rcg
->clkr
.regmap
,
267 rcg
->cmd_rcgr
+ D_REG
, mask
, ~f
->n
);
272 mask
= BIT(rcg
->hid_width
) - 1;
273 mask
|= CFG_SRC_SEL_MASK
| CFG_MODE_MASK
| CFG_HW_CLK_CTRL_MASK
;
274 cfg
= f
->pre_div
<< CFG_SRC_DIV_SHIFT
;
275 cfg
|= rcg
->parent_map
[index
].cfg
<< CFG_SRC_SEL_SHIFT
;
276 if (rcg
->mnd_width
&& f
->n
&& (f
->m
!= f
->n
))
277 cfg
|= CFG_MODE_DUAL_EDGE
;
279 return regmap_update_bits(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
,
283 static int clk_rcg2_configure(struct clk_rcg2
*rcg
, const struct freq_tbl
*f
)
287 ret
= __clk_rcg2_configure(rcg
, f
);
291 return update_config(rcg
);
294 static int __clk_rcg2_set_rate(struct clk_hw
*hw
, unsigned long rate
,
295 enum freq_policy policy
)
297 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
298 const struct freq_tbl
*f
;
302 f
= qcom_find_freq_floor(rcg
->freq_tbl
, rate
);
305 f
= qcom_find_freq(rcg
->freq_tbl
, rate
);
314 return clk_rcg2_configure(rcg
, f
);
317 static int clk_rcg2_set_rate(struct clk_hw
*hw
, unsigned long rate
,
318 unsigned long parent_rate
)
320 return __clk_rcg2_set_rate(hw
, rate
, CEIL
);
323 static int clk_rcg2_set_floor_rate(struct clk_hw
*hw
, unsigned long rate
,
324 unsigned long parent_rate
)
326 return __clk_rcg2_set_rate(hw
, rate
, FLOOR
);
329 static int clk_rcg2_set_rate_and_parent(struct clk_hw
*hw
,
330 unsigned long rate
, unsigned long parent_rate
, u8 index
)
332 return __clk_rcg2_set_rate(hw
, rate
, CEIL
);
335 static int clk_rcg2_set_floor_rate_and_parent(struct clk_hw
*hw
,
336 unsigned long rate
, unsigned long parent_rate
, u8 index
)
338 return __clk_rcg2_set_rate(hw
, rate
, FLOOR
);
341 const struct clk_ops clk_rcg2_ops
= {
342 .is_enabled
= clk_rcg2_is_enabled
,
343 .get_parent
= clk_rcg2_get_parent
,
344 .set_parent
= clk_rcg2_set_parent
,
345 .recalc_rate
= clk_rcg2_recalc_rate
,
346 .determine_rate
= clk_rcg2_determine_rate
,
347 .set_rate
= clk_rcg2_set_rate
,
348 .set_rate_and_parent
= clk_rcg2_set_rate_and_parent
,
350 EXPORT_SYMBOL_GPL(clk_rcg2_ops
);
352 const struct clk_ops clk_rcg2_floor_ops
= {
353 .is_enabled
= clk_rcg2_is_enabled
,
354 .get_parent
= clk_rcg2_get_parent
,
355 .set_parent
= clk_rcg2_set_parent
,
356 .recalc_rate
= clk_rcg2_recalc_rate
,
357 .determine_rate
= clk_rcg2_determine_floor_rate
,
358 .set_rate
= clk_rcg2_set_floor_rate
,
359 .set_rate_and_parent
= clk_rcg2_set_floor_rate_and_parent
,
361 EXPORT_SYMBOL_GPL(clk_rcg2_floor_ops
);
368 static const struct frac_entry frac_table_675m
[] = { /* link rate of 270M */
369 { 52, 295 }, /* 119 M */
370 { 11, 57 }, /* 130.25 M */
371 { 63, 307 }, /* 138.50 M */
372 { 11, 50 }, /* 148.50 M */
373 { 47, 206 }, /* 154 M */
374 { 31, 100 }, /* 205.25 M */
375 { 107, 269 }, /* 268.50 M */
379 static struct frac_entry frac_table_810m
[] = { /* Link rate of 162M */
380 { 31, 211 }, /* 119 M */
381 { 32, 199 }, /* 130.25 M */
382 { 63, 307 }, /* 138.50 M */
383 { 11, 60 }, /* 148.50 M */
384 { 50, 263 }, /* 154 M */
385 { 31, 120 }, /* 205.25 M */
386 { 119, 359 }, /* 268.50 M */
390 static int clk_edp_pixel_set_rate(struct clk_hw
*hw
, unsigned long rate
,
391 unsigned long parent_rate
)
393 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
394 struct freq_tbl f
= *rcg
->freq_tbl
;
395 const struct frac_entry
*frac
;
397 s64 src_rate
= parent_rate
;
399 u32 mask
= BIT(rcg
->hid_width
) - 1;
402 if (src_rate
== 810000000)
403 frac
= frac_table_810m
;
405 frac
= frac_table_675m
;
407 for (; frac
->num
; frac
++) {
409 request
*= frac
->den
;
410 request
= div_s64(request
, frac
->num
);
411 if ((src_rate
< (request
- delta
)) ||
412 (src_rate
> (request
+ delta
)))
415 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
,
418 f
.pre_div
>>= CFG_SRC_DIV_SHIFT
;
423 return clk_rcg2_configure(rcg
, &f
);
429 static int clk_edp_pixel_set_rate_and_parent(struct clk_hw
*hw
,
430 unsigned long rate
, unsigned long parent_rate
, u8 index
)
432 /* Parent index is set statically in frequency table */
433 return clk_edp_pixel_set_rate(hw
, rate
, parent_rate
);
436 static int clk_edp_pixel_determine_rate(struct clk_hw
*hw
,
437 struct clk_rate_request
*req
)
439 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
440 const struct freq_tbl
*f
= rcg
->freq_tbl
;
441 const struct frac_entry
*frac
;
444 u32 mask
= BIT(rcg
->hid_width
) - 1;
446 int index
= qcom_find_src_index(hw
, rcg
->parent_map
, f
->src
);
448 /* Force the correct parent */
449 req
->best_parent_hw
= clk_hw_get_parent_by_index(hw
, index
);
450 req
->best_parent_rate
= clk_hw_get_rate(req
->best_parent_hw
);
452 if (req
->best_parent_rate
== 810000000)
453 frac
= frac_table_810m
;
455 frac
= frac_table_675m
;
457 for (; frac
->num
; frac
++) {
459 request
*= frac
->den
;
460 request
= div_s64(request
, frac
->num
);
461 if ((req
->best_parent_rate
< (request
- delta
)) ||
462 (req
->best_parent_rate
> (request
+ delta
)))
465 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
,
467 hid_div
>>= CFG_SRC_DIV_SHIFT
;
470 req
->rate
= calc_rate(req
->best_parent_rate
,
471 frac
->num
, frac
->den
,
472 !!frac
->den
, hid_div
);
479 const struct clk_ops clk_edp_pixel_ops
= {
480 .is_enabled
= clk_rcg2_is_enabled
,
481 .get_parent
= clk_rcg2_get_parent
,
482 .set_parent
= clk_rcg2_set_parent
,
483 .recalc_rate
= clk_rcg2_recalc_rate
,
484 .set_rate
= clk_edp_pixel_set_rate
,
485 .set_rate_and_parent
= clk_edp_pixel_set_rate_and_parent
,
486 .determine_rate
= clk_edp_pixel_determine_rate
,
488 EXPORT_SYMBOL_GPL(clk_edp_pixel_ops
);
490 static int clk_byte_determine_rate(struct clk_hw
*hw
,
491 struct clk_rate_request
*req
)
493 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
494 const struct freq_tbl
*f
= rcg
->freq_tbl
;
495 int index
= qcom_find_src_index(hw
, rcg
->parent_map
, f
->src
);
496 unsigned long parent_rate
, div
;
497 u32 mask
= BIT(rcg
->hid_width
) - 1;
503 req
->best_parent_hw
= p
= clk_hw_get_parent_by_index(hw
, index
);
504 req
->best_parent_rate
= parent_rate
= clk_hw_round_rate(p
, req
->rate
);
506 div
= DIV_ROUND_UP((2 * parent_rate
), req
->rate
) - 1;
507 div
= min_t(u32
, div
, mask
);
509 req
->rate
= calc_rate(parent_rate
, 0, 0, 0, div
);
514 static int clk_byte_set_rate(struct clk_hw
*hw
, unsigned long rate
,
515 unsigned long parent_rate
)
517 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
518 struct freq_tbl f
= *rcg
->freq_tbl
;
520 u32 mask
= BIT(rcg
->hid_width
) - 1;
522 div
= DIV_ROUND_UP((2 * parent_rate
), rate
) - 1;
523 div
= min_t(u32
, div
, mask
);
527 return clk_rcg2_configure(rcg
, &f
);
530 static int clk_byte_set_rate_and_parent(struct clk_hw
*hw
,
531 unsigned long rate
, unsigned long parent_rate
, u8 index
)
533 /* Parent index is set statically in frequency table */
534 return clk_byte_set_rate(hw
, rate
, parent_rate
);
537 const struct clk_ops clk_byte_ops
= {
538 .is_enabled
= clk_rcg2_is_enabled
,
539 .get_parent
= clk_rcg2_get_parent
,
540 .set_parent
= clk_rcg2_set_parent
,
541 .recalc_rate
= clk_rcg2_recalc_rate
,
542 .set_rate
= clk_byte_set_rate
,
543 .set_rate_and_parent
= clk_byte_set_rate_and_parent
,
544 .determine_rate
= clk_byte_determine_rate
,
546 EXPORT_SYMBOL_GPL(clk_byte_ops
);
548 static int clk_byte2_determine_rate(struct clk_hw
*hw
,
549 struct clk_rate_request
*req
)
551 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
552 unsigned long parent_rate
, div
;
553 u32 mask
= BIT(rcg
->hid_width
) - 1;
555 unsigned long rate
= req
->rate
;
560 p
= req
->best_parent_hw
;
561 req
->best_parent_rate
= parent_rate
= clk_hw_round_rate(p
, rate
);
563 div
= DIV_ROUND_UP((2 * parent_rate
), rate
) - 1;
564 div
= min_t(u32
, div
, mask
);
566 req
->rate
= calc_rate(parent_rate
, 0, 0, 0, div
);
571 static int clk_byte2_set_rate(struct clk_hw
*hw
, unsigned long rate
,
572 unsigned long parent_rate
)
574 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
575 struct freq_tbl f
= { 0 };
577 int i
, num_parents
= clk_hw_get_num_parents(hw
);
578 u32 mask
= BIT(rcg
->hid_width
) - 1;
581 div
= DIV_ROUND_UP((2 * parent_rate
), rate
) - 1;
582 div
= min_t(u32
, div
, mask
);
586 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, &cfg
);
587 cfg
&= CFG_SRC_SEL_MASK
;
588 cfg
>>= CFG_SRC_SEL_SHIFT
;
590 for (i
= 0; i
< num_parents
; i
++) {
591 if (cfg
== rcg
->parent_map
[i
].cfg
) {
592 f
.src
= rcg
->parent_map
[i
].src
;
593 return clk_rcg2_configure(rcg
, &f
);
600 static int clk_byte2_set_rate_and_parent(struct clk_hw
*hw
,
601 unsigned long rate
, unsigned long parent_rate
, u8 index
)
603 /* Read the hardware to determine parent during set_rate */
604 return clk_byte2_set_rate(hw
, rate
, parent_rate
);
607 const struct clk_ops clk_byte2_ops
= {
608 .is_enabled
= clk_rcg2_is_enabled
,
609 .get_parent
= clk_rcg2_get_parent
,
610 .set_parent
= clk_rcg2_set_parent
,
611 .recalc_rate
= clk_rcg2_recalc_rate
,
612 .set_rate
= clk_byte2_set_rate
,
613 .set_rate_and_parent
= clk_byte2_set_rate_and_parent
,
614 .determine_rate
= clk_byte2_determine_rate
,
616 EXPORT_SYMBOL_GPL(clk_byte2_ops
);
618 static const struct frac_entry frac_table_pixel
[] = {
626 static int clk_pixel_determine_rate(struct clk_hw
*hw
,
627 struct clk_rate_request
*req
)
629 unsigned long request
, src_rate
;
631 const struct frac_entry
*frac
= frac_table_pixel
;
633 for (; frac
->num
; frac
++) {
634 request
= (req
->rate
* frac
->den
) / frac
->num
;
636 src_rate
= clk_hw_round_rate(req
->best_parent_hw
, request
);
637 if ((src_rate
< (request
- delta
)) ||
638 (src_rate
> (request
+ delta
)))
641 req
->best_parent_rate
= src_rate
;
642 req
->rate
= (src_rate
* frac
->num
) / frac
->den
;
649 static int clk_pixel_set_rate(struct clk_hw
*hw
, unsigned long rate
,
650 unsigned long parent_rate
)
652 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
653 struct freq_tbl f
= { 0 };
654 const struct frac_entry
*frac
= frac_table_pixel
;
655 unsigned long request
;
657 u32 mask
= BIT(rcg
->hid_width
) - 1;
659 int i
, num_parents
= clk_hw_get_num_parents(hw
);
661 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, &cfg
);
662 cfg
&= CFG_SRC_SEL_MASK
;
663 cfg
>>= CFG_SRC_SEL_SHIFT
;
665 for (i
= 0; i
< num_parents
; i
++)
666 if (cfg
== rcg
->parent_map
[i
].cfg
) {
667 f
.src
= rcg
->parent_map
[i
].src
;
671 for (; frac
->num
; frac
++) {
672 request
= (rate
* frac
->den
) / frac
->num
;
674 if ((parent_rate
< (request
- delta
)) ||
675 (parent_rate
> (request
+ delta
)))
678 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
,
681 f
.pre_div
>>= CFG_SRC_DIV_SHIFT
;
686 return clk_rcg2_configure(rcg
, &f
);
691 static int clk_pixel_set_rate_and_parent(struct clk_hw
*hw
, unsigned long rate
,
692 unsigned long parent_rate
, u8 index
)
694 return clk_pixel_set_rate(hw
, rate
, parent_rate
);
697 const struct clk_ops clk_pixel_ops
= {
698 .is_enabled
= clk_rcg2_is_enabled
,
699 .get_parent
= clk_rcg2_get_parent
,
700 .set_parent
= clk_rcg2_set_parent
,
701 .recalc_rate
= clk_rcg2_recalc_rate
,
702 .set_rate
= clk_pixel_set_rate
,
703 .set_rate_and_parent
= clk_pixel_set_rate_and_parent
,
704 .determine_rate
= clk_pixel_determine_rate
,
706 EXPORT_SYMBOL_GPL(clk_pixel_ops
);
708 static int clk_gfx3d_determine_rate(struct clk_hw
*hw
,
709 struct clk_rate_request
*req
)
711 struct clk_rate_request parent_req
= { };
712 struct clk_hw
*p2
, *p8
, *p9
, *xo
;
713 unsigned long p9_rate
;
716 xo
= clk_hw_get_parent_by_index(hw
, 0);
717 if (req
->rate
== clk_hw_get_rate(xo
)) {
718 req
->best_parent_hw
= xo
;
722 p9
= clk_hw_get_parent_by_index(hw
, 2);
723 p2
= clk_hw_get_parent_by_index(hw
, 3);
724 p8
= clk_hw_get_parent_by_index(hw
, 4);
726 /* PLL9 is a fixed rate PLL */
727 p9_rate
= clk_hw_get_rate(p9
);
729 parent_req
.rate
= req
->rate
= min(req
->rate
, p9_rate
);
730 if (req
->rate
== p9_rate
) {
731 req
->rate
= req
->best_parent_rate
= p9_rate
;
732 req
->best_parent_hw
= p9
;
736 if (req
->best_parent_hw
== p9
) {
737 /* Are we going back to a previously used rate? */
738 if (clk_hw_get_rate(p8
) == req
->rate
)
739 req
->best_parent_hw
= p8
;
741 req
->best_parent_hw
= p2
;
742 } else if (req
->best_parent_hw
== p8
) {
743 req
->best_parent_hw
= p2
;
745 req
->best_parent_hw
= p8
;
748 ret
= __clk_determine_rate(req
->best_parent_hw
, &parent_req
);
752 req
->rate
= req
->best_parent_rate
= parent_req
.rate
;
757 static int clk_gfx3d_set_rate_and_parent(struct clk_hw
*hw
, unsigned long rate
,
758 unsigned long parent_rate
, u8 index
)
760 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
764 /* Just mux it, we don't use the division or m/n hardware */
765 cfg
= rcg
->parent_map
[index
].cfg
<< CFG_SRC_SEL_SHIFT
;
766 ret
= regmap_write(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, cfg
);
770 return update_config(rcg
);
773 static int clk_gfx3d_set_rate(struct clk_hw
*hw
, unsigned long rate
,
774 unsigned long parent_rate
)
777 * We should never get here; clk_gfx3d_determine_rate() should always
778 * make us use a different parent than what we're currently using, so
779 * clk_gfx3d_set_rate_and_parent() should always be called.
784 const struct clk_ops clk_gfx3d_ops
= {
785 .is_enabled
= clk_rcg2_is_enabled
,
786 .get_parent
= clk_rcg2_get_parent
,
787 .set_parent
= clk_rcg2_set_parent
,
788 .recalc_rate
= clk_rcg2_recalc_rate
,
789 .set_rate
= clk_gfx3d_set_rate
,
790 .set_rate_and_parent
= clk_gfx3d_set_rate_and_parent
,
791 .determine_rate
= clk_gfx3d_determine_rate
,
793 EXPORT_SYMBOL_GPL(clk_gfx3d_ops
);
795 static int clk_rcg2_set_force_enable(struct clk_hw
*hw
)
797 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
798 const char *name
= clk_hw_get_name(hw
);
801 ret
= regmap_update_bits(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CMD_REG
,
802 CMD_ROOT_EN
, CMD_ROOT_EN
);
806 /* wait for RCG to turn ON */
807 for (count
= 500; count
> 0; count
--) {
808 if (clk_rcg2_is_enabled(hw
))
814 pr_err("%s: RCG did not turn on\n", name
);
818 static int clk_rcg2_clear_force_enable(struct clk_hw
*hw
)
820 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
822 return regmap_update_bits(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CMD_REG
,
827 clk_rcg2_shared_force_enable_clear(struct clk_hw
*hw
, const struct freq_tbl
*f
)
829 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
832 ret
= clk_rcg2_set_force_enable(hw
);
836 ret
= clk_rcg2_configure(rcg
, f
);
840 return clk_rcg2_clear_force_enable(hw
);
843 static int clk_rcg2_shared_set_rate(struct clk_hw
*hw
, unsigned long rate
,
844 unsigned long parent_rate
)
846 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
847 const struct freq_tbl
*f
;
849 f
= qcom_find_freq(rcg
->freq_tbl
, rate
);
854 * In case clock is disabled, update the CFG, M, N and D registers
855 * and don't hit the update bit of CMD register.
857 if (!__clk_is_enabled(hw
->clk
))
858 return __clk_rcg2_configure(rcg
, f
);
860 return clk_rcg2_shared_force_enable_clear(hw
, f
);
863 static int clk_rcg2_shared_set_rate_and_parent(struct clk_hw
*hw
,
864 unsigned long rate
, unsigned long parent_rate
, u8 index
)
866 return clk_rcg2_shared_set_rate(hw
, rate
, parent_rate
);
869 static int clk_rcg2_shared_enable(struct clk_hw
*hw
)
871 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
875 * Set the update bit because required configuration has already
876 * been written in clk_rcg2_shared_set_rate()
878 ret
= clk_rcg2_set_force_enable(hw
);
882 ret
= update_config(rcg
);
886 return clk_rcg2_clear_force_enable(hw
);
889 static void clk_rcg2_shared_disable(struct clk_hw
*hw
)
891 struct clk_rcg2
*rcg
= to_clk_rcg2(hw
);
895 * Store current configuration as switching to safe source would clear
896 * the SRC and DIV of CFG register
898 regmap_read(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, &cfg
);
901 * Park the RCG at a safe configuration - sourced off of safe source.
902 * Force enable and disable the RCG while configuring it to safeguard
903 * against any update signal coming from the downstream clock.
904 * The current parent is still prepared and enabled at this point, and
905 * the safe source is always on while application processor subsystem
906 * is online. Therefore, the RCG can safely switch its parent.
908 clk_rcg2_set_force_enable(hw
);
910 regmap_write(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
,
911 rcg
->safe_src_index
<< CFG_SRC_SEL_SHIFT
);
915 clk_rcg2_clear_force_enable(hw
);
917 /* Write back the stored configuration corresponding to current rate */
918 regmap_write(rcg
->clkr
.regmap
, rcg
->cmd_rcgr
+ CFG_REG
, cfg
);
921 const struct clk_ops clk_rcg2_shared_ops
= {
922 .enable
= clk_rcg2_shared_enable
,
923 .disable
= clk_rcg2_shared_disable
,
924 .get_parent
= clk_rcg2_get_parent
,
925 .set_parent
= clk_rcg2_set_parent
,
926 .recalc_rate
= clk_rcg2_recalc_rate
,
927 .determine_rate
= clk_rcg2_determine_rate
,
928 .set_rate
= clk_rcg2_shared_set_rate
,
929 .set_rate_and_parent
= clk_rcg2_shared_set_rate_and_parent
,
931 EXPORT_SYMBOL_GPL(clk_rcg2_shared_ops
);