1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Copyright (C) 2016 Maxime Ripard
4 * Maxime Ripard <maxime.ripard@free-electrons.com>
7 #include <linux/clk-provider.h>
13 static void ccu_mp_find_best(unsigned long parent
, unsigned long rate
,
14 unsigned int max_m
, unsigned int max_p
,
15 unsigned int *m
, unsigned int *p
)
17 unsigned long best_rate
= 0;
18 unsigned int best_m
= 0, best_p
= 0;
21 for (_p
= 1; _p
<= max_p
; _p
<<= 1) {
22 for (_m
= 1; _m
<= max_m
; _m
++) {
23 unsigned long tmp_rate
= parent
/ _p
/ _m
;
28 if ((rate
- tmp_rate
) < (rate
- best_rate
)) {
40 static unsigned long ccu_mp_find_best_with_parent_adj(struct clk_hw
*hw
,
41 unsigned long *parent
,
46 unsigned long parent_rate_saved
;
47 unsigned long parent_rate
, now
;
48 unsigned long best_rate
= 0;
49 unsigned int _m
, _p
, div
;
52 parent_rate_saved
= *parent
;
55 * The maximum divider we can use without overflowing
56 * unsigned long in rate * m * p below
58 maxdiv
= max_m
* max_p
;
59 maxdiv
= min(ULONG_MAX
/ rate
, maxdiv
);
61 for (_p
= 1; _p
<= max_p
; _p
<<= 1) {
62 for (_m
= 1; _m
<= max_m
; _m
++) {
68 if (rate
* div
== parent_rate_saved
) {
70 * It's the most ideal case if the requested
71 * rate can be divided from parent clock without
72 * needing to change parent rate, so return the
73 * divider immediately.
75 *parent
= parent_rate_saved
;
79 parent_rate
= clk_hw_round_rate(hw
, rate
* div
);
80 now
= parent_rate
/ div
;
82 if (now
<= rate
&& now
> best_rate
) {
84 *parent
= parent_rate
;
95 static unsigned long ccu_mp_round_rate(struct ccu_mux_internal
*mux
,
97 unsigned long *parent_rate
,
101 struct ccu_mp
*cmp
= data
;
102 unsigned int max_m
, max_p
;
105 if (cmp
->common
.features
& CCU_FEATURE_FIXED_POSTDIV
)
106 rate
*= cmp
->fixed_post_div
;
108 max_m
= cmp
->m
.max
?: 1 << cmp
->m
.width
;
109 max_p
= cmp
->p
.max
?: 1 << ((1 << cmp
->p
.width
) - 1);
111 if (!(clk_hw_get_flags(hw
) & CLK_SET_RATE_PARENT
)) {
112 ccu_mp_find_best(*parent_rate
, rate
, max_m
, max_p
, &m
, &p
);
113 rate
= *parent_rate
/ p
/ m
;
115 rate
= ccu_mp_find_best_with_parent_adj(hw
, parent_rate
, rate
,
119 if (cmp
->common
.features
& CCU_FEATURE_FIXED_POSTDIV
)
120 rate
/= cmp
->fixed_post_div
;
125 static void ccu_mp_disable(struct clk_hw
*hw
)
127 struct ccu_mp
*cmp
= hw_to_ccu_mp(hw
);
129 return ccu_gate_helper_disable(&cmp
->common
, cmp
->enable
);
132 static int ccu_mp_enable(struct clk_hw
*hw
)
134 struct ccu_mp
*cmp
= hw_to_ccu_mp(hw
);
136 return ccu_gate_helper_enable(&cmp
->common
, cmp
->enable
);
139 static int ccu_mp_is_enabled(struct clk_hw
*hw
)
141 struct ccu_mp
*cmp
= hw_to_ccu_mp(hw
);
143 return ccu_gate_helper_is_enabled(&cmp
->common
, cmp
->enable
);
146 static unsigned long ccu_mp_recalc_rate(struct clk_hw
*hw
,
147 unsigned long parent_rate
)
149 struct ccu_mp
*cmp
= hw_to_ccu_mp(hw
);
154 /* Adjust parent_rate according to pre-dividers */
155 parent_rate
= ccu_mux_helper_apply_prediv(&cmp
->common
, &cmp
->mux
, -1,
158 reg
= readl(cmp
->common
.base
+ cmp
->common
.reg
);
160 m
= reg
>> cmp
->m
.shift
;
161 m
&= (1 << cmp
->m
.width
) - 1;
166 p
= reg
>> cmp
->p
.shift
;
167 p
&= (1 << cmp
->p
.width
) - 1;
169 rate
= (parent_rate
>> p
) / m
;
170 if (cmp
->common
.features
& CCU_FEATURE_FIXED_POSTDIV
)
171 rate
/= cmp
->fixed_post_div
;
176 static int ccu_mp_determine_rate(struct clk_hw
*hw
,
177 struct clk_rate_request
*req
)
179 struct ccu_mp
*cmp
= hw_to_ccu_mp(hw
);
181 return ccu_mux_helper_determine_rate(&cmp
->common
, &cmp
->mux
,
182 req
, ccu_mp_round_rate
, cmp
);
185 static int ccu_mp_set_rate(struct clk_hw
*hw
, unsigned long rate
,
186 unsigned long parent_rate
)
188 struct ccu_mp
*cmp
= hw_to_ccu_mp(hw
);
190 unsigned int max_m
, max_p
;
194 /* Adjust parent_rate according to pre-dividers */
195 parent_rate
= ccu_mux_helper_apply_prediv(&cmp
->common
, &cmp
->mux
, -1,
198 max_m
= cmp
->m
.max
?: 1 << cmp
->m
.width
;
199 max_p
= cmp
->p
.max
?: 1 << ((1 << cmp
->p
.width
) - 1);
201 /* Adjust target rate according to post-dividers */
202 if (cmp
->common
.features
& CCU_FEATURE_FIXED_POSTDIV
)
203 rate
= rate
* cmp
->fixed_post_div
;
205 ccu_mp_find_best(parent_rate
, rate
, max_m
, max_p
, &m
, &p
);
207 spin_lock_irqsave(cmp
->common
.lock
, flags
);
209 reg
= readl(cmp
->common
.base
+ cmp
->common
.reg
);
210 reg
&= ~GENMASK(cmp
->m
.width
+ cmp
->m
.shift
- 1, cmp
->m
.shift
);
211 reg
&= ~GENMASK(cmp
->p
.width
+ cmp
->p
.shift
- 1, cmp
->p
.shift
);
212 reg
|= (m
- cmp
->m
.offset
) << cmp
->m
.shift
;
213 reg
|= ilog2(p
) << cmp
->p
.shift
;
215 writel(reg
, cmp
->common
.base
+ cmp
->common
.reg
);
217 spin_unlock_irqrestore(cmp
->common
.lock
, flags
);
222 static u8
ccu_mp_get_parent(struct clk_hw
*hw
)
224 struct ccu_mp
*cmp
= hw_to_ccu_mp(hw
);
226 return ccu_mux_helper_get_parent(&cmp
->common
, &cmp
->mux
);
229 static int ccu_mp_set_parent(struct clk_hw
*hw
, u8 index
)
231 struct ccu_mp
*cmp
= hw_to_ccu_mp(hw
);
233 return ccu_mux_helper_set_parent(&cmp
->common
, &cmp
->mux
, index
);
236 const struct clk_ops ccu_mp_ops
= {
237 .disable
= ccu_mp_disable
,
238 .enable
= ccu_mp_enable
,
239 .is_enabled
= ccu_mp_is_enabled
,
241 .get_parent
= ccu_mp_get_parent
,
242 .set_parent
= ccu_mp_set_parent
,
244 .determine_rate
= ccu_mp_determine_rate
,
245 .recalc_rate
= ccu_mp_recalc_rate
,
246 .set_rate
= ccu_mp_set_rate
,
250 * Support for MMC timing mode switching
252 * The MMC clocks on some SoCs support switching between old and
253 * new timing modes. A platform specific API is provided to query
254 * and set the timing mode on supported SoCs.
256 * In addition, a special class of ccu_mp_ops is provided, which
257 * takes in to account the timing mode switch. When the new timing
258 * mode is active, the clock output rate is halved. This new class
259 * is a wrapper around the generic ccu_mp_ops. When clock rates
260 * are passed through to ccu_mp_ops callbacks, they are doubled
261 * if the new timing mode bit is set, to account for the post
262 * divider. Conversely, when clock rates are passed back, they
263 * are halved if the mode bit is set.
266 static unsigned long ccu_mp_mmc_recalc_rate(struct clk_hw
*hw
,
267 unsigned long parent_rate
)
269 unsigned long rate
= ccu_mp_recalc_rate(hw
, parent_rate
);
270 struct ccu_common
*cm
= hw_to_ccu_common(hw
);
271 u32 val
= readl(cm
->base
+ cm
->reg
);
273 if (val
& CCU_MMC_NEW_TIMING_MODE
)
278 static int ccu_mp_mmc_determine_rate(struct clk_hw
*hw
,
279 struct clk_rate_request
*req
)
281 struct ccu_common
*cm
= hw_to_ccu_common(hw
);
282 u32 val
= readl(cm
->base
+ cm
->reg
);
285 /* adjust the requested clock rate */
286 if (val
& CCU_MMC_NEW_TIMING_MODE
) {
292 ret
= ccu_mp_determine_rate(hw
, req
);
294 /* re-adjust the requested clock rate back */
295 if (val
& CCU_MMC_NEW_TIMING_MODE
) {
304 static int ccu_mp_mmc_set_rate(struct clk_hw
*hw
, unsigned long rate
,
305 unsigned long parent_rate
)
307 struct ccu_common
*cm
= hw_to_ccu_common(hw
);
308 u32 val
= readl(cm
->base
+ cm
->reg
);
310 if (val
& CCU_MMC_NEW_TIMING_MODE
)
313 return ccu_mp_set_rate(hw
, rate
, parent_rate
);
316 const struct clk_ops ccu_mp_mmc_ops
= {
317 .disable
= ccu_mp_disable
,
318 .enable
= ccu_mp_enable
,
319 .is_enabled
= ccu_mp_is_enabled
,
321 .get_parent
= ccu_mp_get_parent
,
322 .set_parent
= ccu_mp_set_parent
,
324 .determine_rate
= ccu_mp_mmc_determine_rate
,
325 .recalc_rate
= ccu_mp_mmc_recalc_rate
,
326 .set_rate
= ccu_mp_mmc_set_rate
,