1 // SPDX-License-Identifier: GPL-2.0-only
3 * Copyright (c) 2013, The Linux Foundation. All rights reserved.
6 #include <linux/kernel.h>
7 #include <linux/bitops.h>
9 #include <linux/export.h>
10 #include <linux/clk-provider.h>
11 #include <linux/regmap.h>
13 #include <asm/div64.h>
18 static u32
ns_to_src(struct src_sel
*s
, u32 ns
)
20 ns
>>= s
->src_sel_shift
;
25 static u32
src_to_ns(struct src_sel
*s
, u8 src
, u32 ns
)
30 mask
<<= s
->src_sel_shift
;
33 ns
|= src
<< s
->src_sel_shift
;
37 static u8
clk_rcg_get_parent(struct clk_hw
*hw
)
39 struct clk_rcg
*rcg
= to_clk_rcg(hw
);
40 int num_parents
= clk_hw_get_num_parents(hw
);
44 ret
= regmap_read(rcg
->clkr
.regmap
, rcg
->ns_reg
, &ns
);
47 ns
= ns_to_src(&rcg
->s
, ns
);
48 for (i
= 0; i
< num_parents
; i
++)
49 if (ns
== rcg
->s
.parent_map
[i
].cfg
)
53 pr_debug("%s: Clock %s has invalid parent, using default.\n",
54 __func__
, clk_hw_get_name(hw
));
58 static int reg_to_bank(struct clk_dyn_rcg
*rcg
, u32 bank
)
60 bank
&= BIT(rcg
->mux_sel_bit
);
64 static u8
clk_dyn_rcg_get_parent(struct clk_hw
*hw
)
66 struct clk_dyn_rcg
*rcg
= to_clk_dyn_rcg(hw
);
67 int num_parents
= clk_hw_get_num_parents(hw
);
73 ret
= regmap_read(rcg
->clkr
.regmap
, rcg
->bank_reg
, ®
);
76 bank
= reg_to_bank(rcg
, reg
);
79 ret
= regmap_read(rcg
->clkr
.regmap
, rcg
->ns_reg
[bank
], &ns
);
82 ns
= ns_to_src(s
, ns
);
84 for (i
= 0; i
< num_parents
; i
++)
85 if (ns
== s
->parent_map
[i
].cfg
)
89 pr_debug("%s: Clock %s has invalid parent, using default.\n",
90 __func__
, clk_hw_get_name(hw
));
94 static int clk_rcg_set_parent(struct clk_hw
*hw
, u8 index
)
96 struct clk_rcg
*rcg
= to_clk_rcg(hw
);
99 regmap_read(rcg
->clkr
.regmap
, rcg
->ns_reg
, &ns
);
100 ns
= src_to_ns(&rcg
->s
, rcg
->s
.parent_map
[index
].cfg
, ns
);
101 regmap_write(rcg
->clkr
.regmap
, rcg
->ns_reg
, ns
);
106 static u32
md_to_m(struct mn
*mn
, u32 md
)
108 md
>>= mn
->m_val_shift
;
109 md
&= BIT(mn
->width
) - 1;
113 static u32
ns_to_pre_div(struct pre_div
*p
, u32 ns
)
115 ns
>>= p
->pre_div_shift
;
116 ns
&= BIT(p
->pre_div_width
) - 1;
120 static u32
pre_div_to_ns(struct pre_div
*p
, u8 pre_div
, u32 ns
)
124 mask
= BIT(p
->pre_div_width
) - 1;
125 mask
<<= p
->pre_div_shift
;
128 ns
|= pre_div
<< p
->pre_div_shift
;
132 static u32
mn_to_md(struct mn
*mn
, u32 m
, u32 n
, u32 md
)
136 mask_w
= BIT(mn
->width
) - 1;
137 mask
= (mask_w
<< mn
->m_val_shift
) | mask_w
;
141 m
<<= mn
->m_val_shift
;
149 static u32
ns_m_to_n(struct mn
*mn
, u32 ns
, u32 m
)
151 ns
= ~ns
>> mn
->n_val_shift
;
152 ns
&= BIT(mn
->width
) - 1;
156 static u32
reg_to_mnctr_mode(struct mn
*mn
, u32 val
)
158 val
>>= mn
->mnctr_mode_shift
;
159 val
&= MNCTR_MODE_MASK
;
163 static u32
mn_to_ns(struct mn
*mn
, u32 m
, u32 n
, u32 ns
)
167 mask
= BIT(mn
->width
) - 1;
168 mask
<<= mn
->n_val_shift
;
174 n
&= BIT(mn
->width
) - 1;
175 n
<<= mn
->n_val_shift
;
182 static u32
mn_to_reg(struct mn
*mn
, u32 m
, u32 n
, u32 val
)
186 mask
= MNCTR_MODE_MASK
<< mn
->mnctr_mode_shift
;
187 mask
|= BIT(mn
->mnctr_en_bit
);
191 val
|= BIT(mn
->mnctr_en_bit
);
192 val
|= MNCTR_MODE_DUAL
<< mn
->mnctr_mode_shift
;
198 static int configure_bank(struct clk_dyn_rcg
*rcg
, const struct freq_tbl
*f
)
201 int bank
, new_bank
, ret
, index
;
207 bool banked_mn
= !!rcg
->mn
[1].width
;
208 bool banked_p
= !!rcg
->p
[1].pre_div_width
;
209 struct clk_hw
*hw
= &rcg
->clkr
.hw
;
211 enabled
= __clk_is_enabled(hw
->clk
);
213 ret
= regmap_read(rcg
->clkr
.regmap
, rcg
->bank_reg
, ®
);
216 bank
= reg_to_bank(rcg
, reg
);
217 new_bank
= enabled
? !bank
: bank
;
219 ns_reg
= rcg
->ns_reg
[new_bank
];
220 ret
= regmap_read(rcg
->clkr
.regmap
, ns_reg
, &ns
);
225 mn
= &rcg
->mn
[new_bank
];
226 md_reg
= rcg
->md_reg
[new_bank
];
228 ns
|= BIT(mn
->mnctr_reset_bit
);
229 ret
= regmap_write(rcg
->clkr
.regmap
, ns_reg
, ns
);
233 ret
= regmap_read(rcg
->clkr
.regmap
, md_reg
, &md
);
236 md
= mn_to_md(mn
, f
->m
, f
->n
, md
);
237 ret
= regmap_write(rcg
->clkr
.regmap
, md_reg
, md
);
240 ns
= mn_to_ns(mn
, f
->m
, f
->n
, ns
);
241 ret
= regmap_write(rcg
->clkr
.regmap
, ns_reg
, ns
);
245 /* Two NS registers means mode control is in NS register */
246 if (rcg
->ns_reg
[0] != rcg
->ns_reg
[1]) {
247 ns
= mn_to_reg(mn
, f
->m
, f
->n
, ns
);
248 ret
= regmap_write(rcg
->clkr
.regmap
, ns_reg
, ns
);
252 reg
= mn_to_reg(mn
, f
->m
, f
->n
, reg
);
253 ret
= regmap_write(rcg
->clkr
.regmap
, rcg
->bank_reg
,
259 ns
&= ~BIT(mn
->mnctr_reset_bit
);
260 ret
= regmap_write(rcg
->clkr
.regmap
, ns_reg
, ns
);
266 p
= &rcg
->p
[new_bank
];
267 ns
= pre_div_to_ns(p
, f
->pre_div
- 1, ns
);
270 s
= &rcg
->s
[new_bank
];
271 index
= qcom_find_src_index(hw
, s
->parent_map
, f
->src
);
274 ns
= src_to_ns(s
, s
->parent_map
[index
].cfg
, ns
);
275 ret
= regmap_write(rcg
->clkr
.regmap
, ns_reg
, ns
);
280 ret
= regmap_read(rcg
->clkr
.regmap
, rcg
->bank_reg
, ®
);
283 reg
^= BIT(rcg
->mux_sel_bit
);
284 ret
= regmap_write(rcg
->clkr
.regmap
, rcg
->bank_reg
, reg
);
291 static int clk_dyn_rcg_set_parent(struct clk_hw
*hw
, u8 index
)
293 struct clk_dyn_rcg
*rcg
= to_clk_dyn_rcg(hw
);
296 struct freq_tbl f
= { 0 };
297 bool banked_mn
= !!rcg
->mn
[1].width
;
298 bool banked_p
= !!rcg
->p
[1].pre_div_width
;
300 regmap_read(rcg
->clkr
.regmap
, rcg
->bank_reg
, ®
);
301 bank
= reg_to_bank(rcg
, reg
);
303 regmap_read(rcg
->clkr
.regmap
, rcg
->ns_reg
[bank
], &ns
);
306 regmap_read(rcg
->clkr
.regmap
, rcg
->md_reg
[bank
], &md
);
307 f
.m
= md_to_m(&rcg
->mn
[bank
], md
);
308 f
.n
= ns_m_to_n(&rcg
->mn
[bank
], ns
, f
.m
);
312 f
.pre_div
= ns_to_pre_div(&rcg
->p
[bank
], ns
) + 1;
314 f
.src
= qcom_find_src_index(hw
, rcg
->s
[bank
].parent_map
, index
);
315 return configure_bank(rcg
, &f
);
319 * Calculate m/n:d rate
322 * rate = ----------- x ---
326 calc_rate(unsigned long rate
, u32 m
, u32 n
, u32 mode
, u32 pre_div
)
342 clk_rcg_recalc_rate(struct clk_hw
*hw
, unsigned long parent_rate
)
344 struct clk_rcg
*rcg
= to_clk_rcg(hw
);
345 u32 pre_div
, m
= 0, n
= 0, ns
, md
, mode
= 0;
346 struct mn
*mn
= &rcg
->mn
;
348 regmap_read(rcg
->clkr
.regmap
, rcg
->ns_reg
, &ns
);
349 pre_div
= ns_to_pre_div(&rcg
->p
, ns
);
352 regmap_read(rcg
->clkr
.regmap
, rcg
->md_reg
, &md
);
354 n
= ns_m_to_n(mn
, ns
, m
);
355 /* MN counter mode is in hw.enable_reg sometimes */
356 if (rcg
->clkr
.enable_reg
!= rcg
->ns_reg
)
357 regmap_read(rcg
->clkr
.regmap
, rcg
->clkr
.enable_reg
, &mode
);
360 mode
= reg_to_mnctr_mode(mn
, mode
);
363 return calc_rate(parent_rate
, m
, n
, mode
, pre_div
);
367 clk_dyn_rcg_recalc_rate(struct clk_hw
*hw
, unsigned long parent_rate
)
369 struct clk_dyn_rcg
*rcg
= to_clk_dyn_rcg(hw
);
370 u32 m
, n
, pre_div
, ns
, md
, mode
, reg
;
373 bool banked_p
= !!rcg
->p
[1].pre_div_width
;
374 bool banked_mn
= !!rcg
->mn
[1].width
;
376 regmap_read(rcg
->clkr
.regmap
, rcg
->bank_reg
, ®
);
377 bank
= reg_to_bank(rcg
, reg
);
379 regmap_read(rcg
->clkr
.regmap
, rcg
->ns_reg
[bank
], &ns
);
380 m
= n
= pre_div
= mode
= 0;
384 regmap_read(rcg
->clkr
.regmap
, rcg
->md_reg
[bank
], &md
);
386 n
= ns_m_to_n(mn
, ns
, m
);
387 /* Two NS registers means mode control is in NS register */
388 if (rcg
->ns_reg
[0] != rcg
->ns_reg
[1])
390 mode
= reg_to_mnctr_mode(mn
, reg
);
394 pre_div
= ns_to_pre_div(&rcg
->p
[bank
], ns
);
396 return calc_rate(parent_rate
, m
, n
, mode
, pre_div
);
399 static int _freq_tbl_determine_rate(struct clk_hw
*hw
, const struct freq_tbl
*f
,
400 struct clk_rate_request
*req
,
401 const struct parent_map
*parent_map
)
403 unsigned long clk_flags
, rate
= req
->rate
;
407 f
= qcom_find_freq(f
, rate
);
411 index
= qcom_find_src_index(hw
, parent_map
, f
->src
);
415 clk_flags
= clk_hw_get_flags(hw
);
416 p
= clk_hw_get_parent_by_index(hw
, index
);
417 if (clk_flags
& CLK_SET_RATE_PARENT
) {
418 rate
= rate
* f
->pre_div
;
426 rate
= clk_hw_get_rate(p
);
428 req
->best_parent_hw
= p
;
429 req
->best_parent_rate
= rate
;
435 static int clk_rcg_determine_rate(struct clk_hw
*hw
,
436 struct clk_rate_request
*req
)
438 struct clk_rcg
*rcg
= to_clk_rcg(hw
);
440 return _freq_tbl_determine_rate(hw
, rcg
->freq_tbl
, req
,
444 static int clk_dyn_rcg_determine_rate(struct clk_hw
*hw
,
445 struct clk_rate_request
*req
)
447 struct clk_dyn_rcg
*rcg
= to_clk_dyn_rcg(hw
);
452 regmap_read(rcg
->clkr
.regmap
, rcg
->bank_reg
, ®
);
453 bank
= reg_to_bank(rcg
, reg
);
456 return _freq_tbl_determine_rate(hw
, rcg
->freq_tbl
, req
, s
->parent_map
);
459 static int clk_rcg_bypass_determine_rate(struct clk_hw
*hw
,
460 struct clk_rate_request
*req
)
462 struct clk_rcg
*rcg
= to_clk_rcg(hw
);
463 const struct freq_tbl
*f
= rcg
->freq_tbl
;
465 int index
= qcom_find_src_index(hw
, rcg
->s
.parent_map
, f
->src
);
467 req
->best_parent_hw
= p
= clk_hw_get_parent_by_index(hw
, index
);
468 req
->best_parent_rate
= clk_hw_round_rate(p
, req
->rate
);
469 req
->rate
= req
->best_parent_rate
;
474 static int __clk_rcg_set_rate(struct clk_rcg
*rcg
, const struct freq_tbl
*f
)
477 struct mn
*mn
= &rcg
->mn
;
479 unsigned int reset_reg
;
481 if (rcg
->mn
.reset_in_cc
)
482 reset_reg
= rcg
->clkr
.enable_reg
;
484 reset_reg
= rcg
->ns_reg
;
487 mask
= BIT(mn
->mnctr_reset_bit
);
488 regmap_update_bits(rcg
->clkr
.regmap
, reset_reg
, mask
, mask
);
490 regmap_read(rcg
->clkr
.regmap
, rcg
->md_reg
, &md
);
491 md
= mn_to_md(mn
, f
->m
, f
->n
, md
);
492 regmap_write(rcg
->clkr
.regmap
, rcg
->md_reg
, md
);
494 regmap_read(rcg
->clkr
.regmap
, rcg
->ns_reg
, &ns
);
495 /* MN counter mode is in hw.enable_reg sometimes */
496 if (rcg
->clkr
.enable_reg
!= rcg
->ns_reg
) {
497 regmap_read(rcg
->clkr
.regmap
, rcg
->clkr
.enable_reg
, &ctl
);
498 ctl
= mn_to_reg(mn
, f
->m
, f
->n
, ctl
);
499 regmap_write(rcg
->clkr
.regmap
, rcg
->clkr
.enable_reg
, ctl
);
501 ns
= mn_to_reg(mn
, f
->m
, f
->n
, ns
);
503 ns
= mn_to_ns(mn
, f
->m
, f
->n
, ns
);
505 regmap_read(rcg
->clkr
.regmap
, rcg
->ns_reg
, &ns
);
508 ns
= pre_div_to_ns(&rcg
->p
, f
->pre_div
- 1, ns
);
509 regmap_write(rcg
->clkr
.regmap
, rcg
->ns_reg
, ns
);
511 regmap_update_bits(rcg
->clkr
.regmap
, reset_reg
, mask
, 0);
516 static int clk_rcg_set_rate(struct clk_hw
*hw
, unsigned long rate
,
517 unsigned long parent_rate
)
519 struct clk_rcg
*rcg
= to_clk_rcg(hw
);
520 const struct freq_tbl
*f
;
522 f
= qcom_find_freq(rcg
->freq_tbl
, rate
);
526 return __clk_rcg_set_rate(rcg
, f
);
529 static int clk_rcg_bypass_set_rate(struct clk_hw
*hw
, unsigned long rate
,
530 unsigned long parent_rate
)
532 struct clk_rcg
*rcg
= to_clk_rcg(hw
);
534 return __clk_rcg_set_rate(rcg
, rcg
->freq_tbl
);
537 static int clk_rcg_bypass2_determine_rate(struct clk_hw
*hw
,
538 struct clk_rate_request
*req
)
542 p
= req
->best_parent_hw
;
543 req
->best_parent_rate
= clk_hw_round_rate(p
, req
->rate
);
544 req
->rate
= req
->best_parent_rate
;
549 static int clk_rcg_bypass2_set_rate(struct clk_hw
*hw
, unsigned long rate
,
550 unsigned long parent_rate
)
552 struct clk_rcg
*rcg
= to_clk_rcg(hw
);
553 struct freq_tbl f
= { 0 };
555 int i
, ret
, num_parents
= clk_hw_get_num_parents(hw
);
557 ret
= regmap_read(rcg
->clkr
.regmap
, rcg
->ns_reg
, &ns
);
561 src
= ns_to_src(&rcg
->s
, ns
);
562 f
.pre_div
= ns_to_pre_div(&rcg
->p
, ns
) + 1;
564 for (i
= 0; i
< num_parents
; i
++) {
565 if (src
== rcg
->s
.parent_map
[i
].cfg
) {
566 f
.src
= rcg
->s
.parent_map
[i
].src
;
567 return __clk_rcg_set_rate(rcg
, &f
);
574 static int clk_rcg_bypass2_set_rate_and_parent(struct clk_hw
*hw
,
575 unsigned long rate
, unsigned long parent_rate
, u8 index
)
577 /* Read the hardware to determine parent during set_rate */
578 return clk_rcg_bypass2_set_rate(hw
, rate
, parent_rate
);
586 static const struct frac_entry pixel_table
[] = {
593 static int clk_rcg_pixel_determine_rate(struct clk_hw
*hw
,
594 struct clk_rate_request
*req
)
597 const struct frac_entry
*frac
= pixel_table
;
598 unsigned long request
, src_rate
;
600 for (; frac
->num
; frac
++) {
601 request
= (req
->rate
* frac
->den
) / frac
->num
;
603 src_rate
= clk_hw_round_rate(req
->best_parent_hw
, request
);
605 if ((src_rate
< (request
- delta
)) ||
606 (src_rate
> (request
+ delta
)))
609 req
->best_parent_rate
= src_rate
;
610 req
->rate
= (src_rate
* frac
->num
) / frac
->den
;
617 static int clk_rcg_pixel_set_rate(struct clk_hw
*hw
, unsigned long rate
,
618 unsigned long parent_rate
)
620 struct clk_rcg
*rcg
= to_clk_rcg(hw
);
622 const struct frac_entry
*frac
= pixel_table
;
623 unsigned long request
;
624 struct freq_tbl f
= { 0 };
626 int i
, ret
, num_parents
= clk_hw_get_num_parents(hw
);
628 ret
= regmap_read(rcg
->clkr
.regmap
, rcg
->ns_reg
, &ns
);
632 src
= ns_to_src(&rcg
->s
, ns
);
634 for (i
= 0; i
< num_parents
; i
++) {
635 if (src
== rcg
->s
.parent_map
[i
].cfg
) {
636 f
.src
= rcg
->s
.parent_map
[i
].src
;
641 /* bypass the pre divider */
644 /* let us find appropriate m/n values for this */
645 for (; frac
->num
; frac
++) {
646 request
= (rate
* frac
->den
) / frac
->num
;
648 if ((parent_rate
< (request
- delta
)) ||
649 (parent_rate
> (request
+ delta
)))
655 return __clk_rcg_set_rate(rcg
, &f
);
661 static int clk_rcg_pixel_set_rate_and_parent(struct clk_hw
*hw
,
662 unsigned long rate
, unsigned long parent_rate
, u8 index
)
664 return clk_rcg_pixel_set_rate(hw
, rate
, parent_rate
);
667 static int clk_rcg_esc_determine_rate(struct clk_hw
*hw
,
668 struct clk_rate_request
*req
)
670 struct clk_rcg
*rcg
= to_clk_rcg(hw
);
671 int pre_div_max
= BIT(rcg
->p
.pre_div_width
);
673 unsigned long src_rate
;
678 src_rate
= clk_hw_get_rate(req
->best_parent_hw
);
680 div
= src_rate
/ req
->rate
;
682 if (div
>= 1 && div
<= pre_div_max
) {
683 req
->best_parent_rate
= src_rate
;
684 req
->rate
= src_rate
/ div
;
691 static int clk_rcg_esc_set_rate(struct clk_hw
*hw
, unsigned long rate
,
692 unsigned long parent_rate
)
694 struct clk_rcg
*rcg
= to_clk_rcg(hw
);
695 struct freq_tbl f
= { 0 };
696 int pre_div_max
= BIT(rcg
->p
.pre_div_width
);
699 int i
, ret
, num_parents
= clk_hw_get_num_parents(hw
);
704 ret
= regmap_read(rcg
->clkr
.regmap
, rcg
->ns_reg
, &ns
);
708 ns
= ns_to_src(&rcg
->s
, ns
);
710 for (i
= 0; i
< num_parents
; i
++) {
711 if (ns
== rcg
->s
.parent_map
[i
].cfg
) {
712 f
.src
= rcg
->s
.parent_map
[i
].src
;
717 div
= parent_rate
/ rate
;
719 if (div
>= 1 && div
<= pre_div_max
) {
721 return __clk_rcg_set_rate(rcg
, &f
);
727 static int clk_rcg_esc_set_rate_and_parent(struct clk_hw
*hw
,
728 unsigned long rate
, unsigned long parent_rate
, u8 index
)
730 return clk_rcg_esc_set_rate(hw
, rate
, parent_rate
);
734 * This type of clock has a glitch-free mux that switches between the output of
735 * the M/N counter and an always on clock source (XO). When clk_set_rate() is
736 * called we need to make sure that we don't switch to the M/N counter if it
737 * isn't clocking because the mux will get stuck and the clock will stop
738 * outputting a clock. This can happen if the framework isn't aware that this
739 * clock is on and so clk_set_rate() doesn't turn on the new parent. To fix
740 * this we switch the mux in the enable/disable ops and reprogram the M/N
741 * counter in the set_rate op. We also make sure to switch away from the M/N
742 * counter in set_rate if software thinks the clock is off.
744 static int clk_rcg_lcc_set_rate(struct clk_hw
*hw
, unsigned long rate
,
745 unsigned long parent_rate
)
747 struct clk_rcg
*rcg
= to_clk_rcg(hw
);
748 const struct freq_tbl
*f
;
752 f
= qcom_find_freq(rcg
->freq_tbl
, rate
);
756 /* Switch to XO to avoid glitches */
757 regmap_update_bits(rcg
->clkr
.regmap
, rcg
->ns_reg
, gfm
, 0);
758 ret
= __clk_rcg_set_rate(rcg
, f
);
759 /* Switch back to M/N if it's clocking */
760 if (__clk_is_enabled(hw
->clk
))
761 regmap_update_bits(rcg
->clkr
.regmap
, rcg
->ns_reg
, gfm
, gfm
);
766 static int clk_rcg_lcc_enable(struct clk_hw
*hw
)
768 struct clk_rcg
*rcg
= to_clk_rcg(hw
);
772 return regmap_update_bits(rcg
->clkr
.regmap
, rcg
->ns_reg
, gfm
, gfm
);
775 static void clk_rcg_lcc_disable(struct clk_hw
*hw
)
777 struct clk_rcg
*rcg
= to_clk_rcg(hw
);
781 regmap_update_bits(rcg
->clkr
.regmap
, rcg
->ns_reg
, gfm
, 0);
784 static int __clk_dyn_rcg_set_rate(struct clk_hw
*hw
, unsigned long rate
)
786 struct clk_dyn_rcg
*rcg
= to_clk_dyn_rcg(hw
);
787 const struct freq_tbl
*f
;
789 f
= qcom_find_freq(rcg
->freq_tbl
, rate
);
793 return configure_bank(rcg
, f
);
796 static int clk_dyn_rcg_set_rate(struct clk_hw
*hw
, unsigned long rate
,
797 unsigned long parent_rate
)
799 return __clk_dyn_rcg_set_rate(hw
, rate
);
802 static int clk_dyn_rcg_set_rate_and_parent(struct clk_hw
*hw
,
803 unsigned long rate
, unsigned long parent_rate
, u8 index
)
805 return __clk_dyn_rcg_set_rate(hw
, rate
);
808 const struct clk_ops clk_rcg_ops
= {
809 .enable
= clk_enable_regmap
,
810 .disable
= clk_disable_regmap
,
811 .get_parent
= clk_rcg_get_parent
,
812 .set_parent
= clk_rcg_set_parent
,
813 .recalc_rate
= clk_rcg_recalc_rate
,
814 .determine_rate
= clk_rcg_determine_rate
,
815 .set_rate
= clk_rcg_set_rate
,
817 EXPORT_SYMBOL_GPL(clk_rcg_ops
);
819 const struct clk_ops clk_rcg_bypass_ops
= {
820 .enable
= clk_enable_regmap
,
821 .disable
= clk_disable_regmap
,
822 .get_parent
= clk_rcg_get_parent
,
823 .set_parent
= clk_rcg_set_parent
,
824 .recalc_rate
= clk_rcg_recalc_rate
,
825 .determine_rate
= clk_rcg_bypass_determine_rate
,
826 .set_rate
= clk_rcg_bypass_set_rate
,
828 EXPORT_SYMBOL_GPL(clk_rcg_bypass_ops
);
830 const struct clk_ops clk_rcg_bypass2_ops
= {
831 .enable
= clk_enable_regmap
,
832 .disable
= clk_disable_regmap
,
833 .get_parent
= clk_rcg_get_parent
,
834 .set_parent
= clk_rcg_set_parent
,
835 .recalc_rate
= clk_rcg_recalc_rate
,
836 .determine_rate
= clk_rcg_bypass2_determine_rate
,
837 .set_rate
= clk_rcg_bypass2_set_rate
,
838 .set_rate_and_parent
= clk_rcg_bypass2_set_rate_and_parent
,
840 EXPORT_SYMBOL_GPL(clk_rcg_bypass2_ops
);
842 const struct clk_ops clk_rcg_pixel_ops
= {
843 .enable
= clk_enable_regmap
,
844 .disable
= clk_disable_regmap
,
845 .get_parent
= clk_rcg_get_parent
,
846 .set_parent
= clk_rcg_set_parent
,
847 .recalc_rate
= clk_rcg_recalc_rate
,
848 .determine_rate
= clk_rcg_pixel_determine_rate
,
849 .set_rate
= clk_rcg_pixel_set_rate
,
850 .set_rate_and_parent
= clk_rcg_pixel_set_rate_and_parent
,
852 EXPORT_SYMBOL_GPL(clk_rcg_pixel_ops
);
854 const struct clk_ops clk_rcg_esc_ops
= {
855 .enable
= clk_enable_regmap
,
856 .disable
= clk_disable_regmap
,
857 .get_parent
= clk_rcg_get_parent
,
858 .set_parent
= clk_rcg_set_parent
,
859 .recalc_rate
= clk_rcg_recalc_rate
,
860 .determine_rate
= clk_rcg_esc_determine_rate
,
861 .set_rate
= clk_rcg_esc_set_rate
,
862 .set_rate_and_parent
= clk_rcg_esc_set_rate_and_parent
,
864 EXPORT_SYMBOL_GPL(clk_rcg_esc_ops
);
866 const struct clk_ops clk_rcg_lcc_ops
= {
867 .enable
= clk_rcg_lcc_enable
,
868 .disable
= clk_rcg_lcc_disable
,
869 .get_parent
= clk_rcg_get_parent
,
870 .set_parent
= clk_rcg_set_parent
,
871 .recalc_rate
= clk_rcg_recalc_rate
,
872 .determine_rate
= clk_rcg_determine_rate
,
873 .set_rate
= clk_rcg_lcc_set_rate
,
875 EXPORT_SYMBOL_GPL(clk_rcg_lcc_ops
);
877 const struct clk_ops clk_dyn_rcg_ops
= {
878 .enable
= clk_enable_regmap
,
879 .is_enabled
= clk_is_enabled_regmap
,
880 .disable
= clk_disable_regmap
,
881 .get_parent
= clk_dyn_rcg_get_parent
,
882 .set_parent
= clk_dyn_rcg_set_parent
,
883 .recalc_rate
= clk_dyn_rcg_recalc_rate
,
884 .determine_rate
= clk_dyn_rcg_determine_rate
,
885 .set_rate
= clk_dyn_rcg_set_rate
,
886 .set_rate_and_parent
= clk_dyn_rcg_set_rate_and_parent
,
888 EXPORT_SYMBOL_GPL(clk_dyn_rcg_ops
);