1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Copyright (C) 2016 Maxime Ripard
4 * Maxime Ripard <maxime.ripard@free-electrons.com>
7 #include <linux/clk-provider.h>
13 static unsigned long ccu_mp_find_best(unsigned long parent
, unsigned long rate
,
14 unsigned int max_m
, unsigned int max_p
,
15 unsigned int *m
, unsigned int *p
)
17 unsigned long best_rate
= 0;
18 unsigned int best_m
= 0, best_p
= 0;
21 for (_p
= 1; _p
<= max_p
; _p
<<= 1) {
22 for (_m
= 1; _m
<= max_m
; _m
++) {
23 unsigned long tmp_rate
= parent
/ _p
/ _m
;
28 if ((rate
- tmp_rate
) < (rate
- best_rate
)) {
42 static unsigned long ccu_mp_find_best_with_parent_adj(struct clk_hw
*hw
,
43 unsigned long *parent
,
48 unsigned long parent_rate_saved
;
49 unsigned long parent_rate
, now
;
50 unsigned long best_rate
= 0;
51 unsigned int _m
, _p
, div
;
54 parent_rate_saved
= *parent
;
57 * The maximum divider we can use without overflowing
58 * unsigned long in rate * m * p below
60 maxdiv
= max_m
* max_p
;
61 maxdiv
= min(ULONG_MAX
/ rate
, maxdiv
);
63 for (_p
= 1; _p
<= max_p
; _p
<<= 1) {
64 for (_m
= 1; _m
<= max_m
; _m
++) {
70 if (rate
* div
== parent_rate_saved
) {
72 * It's the most ideal case if the requested
73 * rate can be divided from parent clock without
74 * needing to change parent rate, so return the
75 * divider immediately.
77 *parent
= parent_rate_saved
;
81 parent_rate
= clk_hw_round_rate(hw
, rate
* div
);
82 now
= parent_rate
/ div
;
84 if (now
<= rate
&& now
> best_rate
) {
86 *parent
= parent_rate
;
97 static unsigned long ccu_mp_round_rate(struct ccu_mux_internal
*mux
,
99 unsigned long *parent_rate
,
103 struct ccu_mp
*cmp
= data
;
104 unsigned int max_m
, max_p
;
107 if (cmp
->common
.features
& CCU_FEATURE_FIXED_POSTDIV
)
108 rate
*= cmp
->fixed_post_div
;
110 max_m
= cmp
->m
.max
?: 1 << cmp
->m
.width
;
111 max_p
= cmp
->p
.max
?: 1 << ((1 << cmp
->p
.width
) - 1);
113 if (!clk_hw_can_set_rate_parent(&cmp
->common
.hw
)) {
114 rate
= ccu_mp_find_best(*parent_rate
, rate
, max_m
, max_p
, &m
, &p
);
116 rate
= ccu_mp_find_best_with_parent_adj(hw
, parent_rate
, rate
,
120 if (cmp
->common
.features
& CCU_FEATURE_FIXED_POSTDIV
)
121 rate
/= cmp
->fixed_post_div
;
126 static void ccu_mp_disable(struct clk_hw
*hw
)
128 struct ccu_mp
*cmp
= hw_to_ccu_mp(hw
);
130 return ccu_gate_helper_disable(&cmp
->common
, cmp
->enable
);
133 static int ccu_mp_enable(struct clk_hw
*hw
)
135 struct ccu_mp
*cmp
= hw_to_ccu_mp(hw
);
137 return ccu_gate_helper_enable(&cmp
->common
, cmp
->enable
);
140 static int ccu_mp_is_enabled(struct clk_hw
*hw
)
142 struct ccu_mp
*cmp
= hw_to_ccu_mp(hw
);
144 return ccu_gate_helper_is_enabled(&cmp
->common
, cmp
->enable
);
147 static unsigned long ccu_mp_recalc_rate(struct clk_hw
*hw
,
148 unsigned long parent_rate
)
150 struct ccu_mp
*cmp
= hw_to_ccu_mp(hw
);
155 /* Adjust parent_rate according to pre-dividers */
156 parent_rate
= ccu_mux_helper_apply_prediv(&cmp
->common
, &cmp
->mux
, -1,
159 reg
= readl(cmp
->common
.base
+ cmp
->common
.reg
);
161 m
= reg
>> cmp
->m
.shift
;
162 m
&= (1 << cmp
->m
.width
) - 1;
167 p
= reg
>> cmp
->p
.shift
;
168 p
&= (1 << cmp
->p
.width
) - 1;
170 rate
= (parent_rate
>> p
) / m
;
171 if (cmp
->common
.features
& CCU_FEATURE_FIXED_POSTDIV
)
172 rate
/= cmp
->fixed_post_div
;
177 static int ccu_mp_determine_rate(struct clk_hw
*hw
,
178 struct clk_rate_request
*req
)
180 struct ccu_mp
*cmp
= hw_to_ccu_mp(hw
);
182 return ccu_mux_helper_determine_rate(&cmp
->common
, &cmp
->mux
,
183 req
, ccu_mp_round_rate
, cmp
);
186 static int ccu_mp_set_rate(struct clk_hw
*hw
, unsigned long rate
,
187 unsigned long parent_rate
)
189 struct ccu_mp
*cmp
= hw_to_ccu_mp(hw
);
191 unsigned int max_m
, max_p
;
195 /* Adjust parent_rate according to pre-dividers */
196 parent_rate
= ccu_mux_helper_apply_prediv(&cmp
->common
, &cmp
->mux
, -1,
199 max_m
= cmp
->m
.max
?: 1 << cmp
->m
.width
;
200 max_p
= cmp
->p
.max
?: 1 << ((1 << cmp
->p
.width
) - 1);
202 /* Adjust target rate according to post-dividers */
203 if (cmp
->common
.features
& CCU_FEATURE_FIXED_POSTDIV
)
204 rate
= rate
* cmp
->fixed_post_div
;
206 ccu_mp_find_best(parent_rate
, rate
, max_m
, max_p
, &m
, &p
);
208 spin_lock_irqsave(cmp
->common
.lock
, flags
);
210 reg
= readl(cmp
->common
.base
+ cmp
->common
.reg
);
211 reg
&= ~GENMASK(cmp
->m
.width
+ cmp
->m
.shift
- 1, cmp
->m
.shift
);
212 reg
&= ~GENMASK(cmp
->p
.width
+ cmp
->p
.shift
- 1, cmp
->p
.shift
);
213 reg
|= (m
- cmp
->m
.offset
) << cmp
->m
.shift
;
214 reg
|= ilog2(p
) << cmp
->p
.shift
;
216 writel(reg
, cmp
->common
.base
+ cmp
->common
.reg
);
218 spin_unlock_irqrestore(cmp
->common
.lock
, flags
);
223 static u8
ccu_mp_get_parent(struct clk_hw
*hw
)
225 struct ccu_mp
*cmp
= hw_to_ccu_mp(hw
);
227 return ccu_mux_helper_get_parent(&cmp
->common
, &cmp
->mux
);
230 static int ccu_mp_set_parent(struct clk_hw
*hw
, u8 index
)
232 struct ccu_mp
*cmp
= hw_to_ccu_mp(hw
);
234 return ccu_mux_helper_set_parent(&cmp
->common
, &cmp
->mux
, index
);
237 const struct clk_ops ccu_mp_ops
= {
238 .disable
= ccu_mp_disable
,
239 .enable
= ccu_mp_enable
,
240 .is_enabled
= ccu_mp_is_enabled
,
242 .get_parent
= ccu_mp_get_parent
,
243 .set_parent
= ccu_mp_set_parent
,
245 .determine_rate
= ccu_mp_determine_rate
,
246 .recalc_rate
= ccu_mp_recalc_rate
,
247 .set_rate
= ccu_mp_set_rate
,
249 EXPORT_SYMBOL_NS_GPL(ccu_mp_ops
, "SUNXI_CCU");
252 * Support for MMC timing mode switching
254 * The MMC clocks on some SoCs support switching between old and
255 * new timing modes. A platform specific API is provided to query
256 * and set the timing mode on supported SoCs.
258 * In addition, a special class of ccu_mp_ops is provided, which
259 * takes in to account the timing mode switch. When the new timing
260 * mode is active, the clock output rate is halved. This new class
261 * is a wrapper around the generic ccu_mp_ops. When clock rates
262 * are passed through to ccu_mp_ops callbacks, they are doubled
263 * if the new timing mode bit is set, to account for the post
264 * divider. Conversely, when clock rates are passed back, they
265 * are halved if the mode bit is set.
268 static unsigned long ccu_mp_mmc_recalc_rate(struct clk_hw
*hw
,
269 unsigned long parent_rate
)
271 unsigned long rate
= ccu_mp_recalc_rate(hw
, parent_rate
);
272 struct ccu_common
*cm
= hw_to_ccu_common(hw
);
273 u32 val
= readl(cm
->base
+ cm
->reg
);
275 if (val
& CCU_MMC_NEW_TIMING_MODE
)
280 static int ccu_mp_mmc_determine_rate(struct clk_hw
*hw
,
281 struct clk_rate_request
*req
)
283 struct ccu_common
*cm
= hw_to_ccu_common(hw
);
284 u32 val
= readl(cm
->base
+ cm
->reg
);
287 /* adjust the requested clock rate */
288 if (val
& CCU_MMC_NEW_TIMING_MODE
) {
294 ret
= ccu_mp_determine_rate(hw
, req
);
296 /* re-adjust the requested clock rate back */
297 if (val
& CCU_MMC_NEW_TIMING_MODE
) {
306 static int ccu_mp_mmc_set_rate(struct clk_hw
*hw
, unsigned long rate
,
307 unsigned long parent_rate
)
309 struct ccu_common
*cm
= hw_to_ccu_common(hw
);
310 u32 val
= readl(cm
->base
+ cm
->reg
);
312 if (val
& CCU_MMC_NEW_TIMING_MODE
)
315 return ccu_mp_set_rate(hw
, rate
, parent_rate
);
318 const struct clk_ops ccu_mp_mmc_ops
= {
319 .disable
= ccu_mp_disable
,
320 .enable
= ccu_mp_enable
,
321 .is_enabled
= ccu_mp_is_enabled
,
323 .get_parent
= ccu_mp_get_parent
,
324 .set_parent
= ccu_mp_set_parent
,
326 .determine_rate
= ccu_mp_mmc_determine_rate
,
327 .recalc_rate
= ccu_mp_mmc_recalc_rate
,
328 .set_rate
= ccu_mp_mmc_set_rate
,
330 EXPORT_SYMBOL_NS_GPL(ccu_mp_mmc_ops
, "SUNXI_CCU");