Merge tag 'trace-printf-v6.13' of git://git.kernel.org/pub/scm/linux/kernel/git/trace...
[drm/drm-misc.git] / drivers / clk / sophgo / clk-cv18xx-ip.c
blobb186e64d4813e2701b17520e544c453fc13d0e77
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Copyright (C) 2023 Inochi Amaoto <inochiama@outlook.com>
4 */
6 #include <linux/clk-provider.h>
7 #include <linux/io.h>
8 #include <linux/gcd.h>
9 #include <linux/spinlock.h>
11 #include "clk-cv18xx-ip.h"
13 /* GATE */
14 static inline struct cv1800_clk_gate *hw_to_cv1800_clk_gate(struct clk_hw *hw)
16 struct cv1800_clk_common *common = hw_to_cv1800_clk_common(hw);
18 return container_of(common, struct cv1800_clk_gate, common);
21 static int gate_enable(struct clk_hw *hw)
23 struct cv1800_clk_gate *gate = hw_to_cv1800_clk_gate(hw);
25 return cv1800_clk_setbit(&gate->common, &gate->gate);
28 static void gate_disable(struct clk_hw *hw)
30 struct cv1800_clk_gate *gate = hw_to_cv1800_clk_gate(hw);
32 cv1800_clk_clearbit(&gate->common, &gate->gate);
35 static int gate_is_enabled(struct clk_hw *hw)
37 struct cv1800_clk_gate *gate = hw_to_cv1800_clk_gate(hw);
39 return cv1800_clk_checkbit(&gate->common, &gate->gate);
42 static unsigned long gate_recalc_rate(struct clk_hw *hw,
43 unsigned long parent_rate)
45 return parent_rate;
48 static long gate_round_rate(struct clk_hw *hw, unsigned long rate,
49 unsigned long *parent_rate)
51 return *parent_rate;
54 static int gate_set_rate(struct clk_hw *hw, unsigned long rate,
55 unsigned long parent_rate)
57 return 0;
60 const struct clk_ops cv1800_clk_gate_ops = {
61 .disable = gate_disable,
62 .enable = gate_enable,
63 .is_enabled = gate_is_enabled,
65 .recalc_rate = gate_recalc_rate,
66 .round_rate = gate_round_rate,
67 .set_rate = gate_set_rate,
70 /* DIV */
71 #define _DIV_EN_CLK_DIV_FACTOR_FIELD BIT(3)
73 #define DIV_GET_EN_CLK_DIV_FACTOR(_reg) \
74 FIELD_GET(_DIV_EN_CLK_DIV_FACTOR_FIELD, _reg)
76 #define DIV_SET_EN_DIV_FACTOR(_reg) \
77 _CV1800_SET_FIELD(_reg, 1, _DIV_EN_CLK_DIV_FACTOR_FIELD)
79 static inline struct cv1800_clk_div *hw_to_cv1800_clk_div(struct clk_hw *hw)
81 struct cv1800_clk_common *common = hw_to_cv1800_clk_common(hw);
83 return container_of(common, struct cv1800_clk_div, common);
86 static int div_enable(struct clk_hw *hw)
88 struct cv1800_clk_div *div = hw_to_cv1800_clk_div(hw);
90 return cv1800_clk_setbit(&div->common, &div->gate);
93 static void div_disable(struct clk_hw *hw)
95 struct cv1800_clk_div *div = hw_to_cv1800_clk_div(hw);
97 cv1800_clk_clearbit(&div->common, &div->gate);
100 static int div_is_enabled(struct clk_hw *hw)
102 struct cv1800_clk_div *div = hw_to_cv1800_clk_div(hw);
104 return cv1800_clk_checkbit(&div->common, &div->gate);
107 static int div_helper_set_rate(struct cv1800_clk_common *common,
108 struct cv1800_clk_regfield *div,
109 unsigned long val)
111 unsigned long flags;
112 u32 reg;
114 if (div->width == 0)
115 return 0;
117 spin_lock_irqsave(common->lock, flags);
119 reg = readl(common->base + div->reg);
120 reg = cv1800_clk_regfield_set(reg, val, div);
121 if (div->initval > 0)
122 reg = DIV_SET_EN_DIV_FACTOR(reg);
124 writel(reg, common->base + div->reg);
126 spin_unlock_irqrestore(common->lock, flags);
128 return 0;
131 static u32 div_helper_get_clockdiv(struct cv1800_clk_common *common,
132 struct cv1800_clk_regfield *div)
134 u32 clockdiv = 1;
135 u32 reg;
137 if (!div || div->initval < 0 || (div->width == 0 && div->initval <= 0))
138 return 1;
140 if (div->width == 0 && div->initval > 0)
141 return div->initval;
143 reg = readl(common->base + div->reg);
145 if (div->initval == 0 || DIV_GET_EN_CLK_DIV_FACTOR(reg))
146 clockdiv = cv1800_clk_regfield_get(reg, div);
147 else if (div->initval > 0)
148 clockdiv = div->initval;
150 return clockdiv;
153 static u32 div_helper_round_rate(struct cv1800_clk_regfield *div,
154 struct clk_hw *hw, struct clk_hw *parent,
155 unsigned long rate, unsigned long *prate)
157 if (div->width == 0) {
158 if (div->initval <= 0)
159 return DIV_ROUND_UP_ULL(*prate, 1);
160 else
161 return DIV_ROUND_UP_ULL(*prate, div->initval);
164 return divider_round_rate_parent(hw, parent, rate, prate, NULL,
165 div->width, div->flags);
168 static long div_round_rate(struct clk_hw *parent, unsigned long *parent_rate,
169 unsigned long rate, int id, void *data)
171 struct cv1800_clk_div *div = data;
173 return div_helper_round_rate(&div->div, &div->common.hw, parent,
174 rate, parent_rate);
177 static bool div_is_better_rate(struct cv1800_clk_common *common,
178 unsigned long target, unsigned long now,
179 unsigned long best)
181 if (common->features & CLK_DIVIDER_ROUND_CLOSEST)
182 return abs_diff(target, now) < abs_diff(target, best);
184 return now <= target && now > best;
187 static int mux_helper_determine_rate(struct cv1800_clk_common *common,
188 struct clk_rate_request *req,
189 long (*round)(struct clk_hw *,
190 unsigned long *,
191 unsigned long,
192 int,
193 void *),
194 void *data)
196 unsigned long best_parent_rate = 0, best_rate = 0;
197 struct clk_hw *best_parent, *hw = &common->hw;
198 unsigned int i;
200 if (clk_hw_get_flags(hw) & CLK_SET_RATE_NO_REPARENT) {
201 unsigned long adj_parent_rate;
203 best_parent = clk_hw_get_parent(hw);
204 best_parent_rate = clk_hw_get_rate(best_parent);
206 best_rate = round(best_parent, &adj_parent_rate,
207 req->rate, -1, data);
209 goto find;
212 for (i = 0; i < clk_hw_get_num_parents(hw); i++) {
213 unsigned long tmp_rate, parent_rate;
214 struct clk_hw *parent;
216 parent = clk_hw_get_parent_by_index(hw, i);
217 if (!parent)
218 continue;
220 parent_rate = clk_hw_get_rate(parent);
222 tmp_rate = round(parent, &parent_rate, req->rate, i, data);
224 if (tmp_rate == req->rate) {
225 best_parent = parent;
226 best_parent_rate = parent_rate;
227 best_rate = tmp_rate;
228 goto find;
231 if (div_is_better_rate(common, req->rate,
232 tmp_rate, best_rate)) {
233 best_parent = parent;
234 best_parent_rate = parent_rate;
235 best_rate = tmp_rate;
239 if (best_rate == 0)
240 return -EINVAL;
242 find:
243 req->best_parent_hw = best_parent;
244 req->best_parent_rate = best_parent_rate;
245 req->rate = best_rate;
246 return 0;
249 static int div_determine_rate(struct clk_hw *hw,
250 struct clk_rate_request *req)
252 struct cv1800_clk_div *div = hw_to_cv1800_clk_div(hw);
254 return mux_helper_determine_rate(&div->common, req,
255 div_round_rate, div);
258 static unsigned long div_recalc_rate(struct clk_hw *hw,
259 unsigned long parent_rate)
261 struct cv1800_clk_div *div = hw_to_cv1800_clk_div(hw);
262 unsigned long val;
264 val = div_helper_get_clockdiv(&div->common, &div->div);
265 if (val == 0)
266 return 0;
268 return divider_recalc_rate(hw, parent_rate, val, NULL,
269 div->div.flags, div->div.width);
272 static int div_set_rate(struct clk_hw *hw, unsigned long rate,
273 unsigned long parent_rate)
275 struct cv1800_clk_div *div = hw_to_cv1800_clk_div(hw);
276 unsigned long val;
278 val = divider_get_val(rate, parent_rate, NULL,
279 div->div.width, div->div.flags);
281 return div_helper_set_rate(&div->common, &div->div, val);
284 const struct clk_ops cv1800_clk_div_ops = {
285 .disable = div_disable,
286 .enable = div_enable,
287 .is_enabled = div_is_enabled,
289 .determine_rate = div_determine_rate,
290 .recalc_rate = div_recalc_rate,
291 .set_rate = div_set_rate,
294 static inline struct cv1800_clk_bypass_div *
295 hw_to_cv1800_clk_bypass_div(struct clk_hw *hw)
297 struct cv1800_clk_div *div = hw_to_cv1800_clk_div(hw);
299 return container_of(div, struct cv1800_clk_bypass_div, div);
302 static long bypass_div_round_rate(struct clk_hw *parent,
303 unsigned long *parent_rate,
304 unsigned long rate, int id, void *data)
306 struct cv1800_clk_bypass_div *div = data;
308 if (id == -1) {
309 if (cv1800_clk_checkbit(&div->div.common, &div->bypass))
310 return *parent_rate;
311 else
312 return div_round_rate(parent, parent_rate, rate,
313 -1, &div->div);
316 if (id == 0)
317 return *parent_rate;
319 return div_round_rate(parent, parent_rate, rate, id - 1, &div->div);
322 static int bypass_div_determine_rate(struct clk_hw *hw,
323 struct clk_rate_request *req)
325 struct cv1800_clk_bypass_div *div = hw_to_cv1800_clk_bypass_div(hw);
327 return mux_helper_determine_rate(&div->div.common, req,
328 bypass_div_round_rate, div);
331 static unsigned long bypass_div_recalc_rate(struct clk_hw *hw,
332 unsigned long parent_rate)
334 struct cv1800_clk_bypass_div *div = hw_to_cv1800_clk_bypass_div(hw);
336 if (cv1800_clk_checkbit(&div->div.common, &div->bypass))
337 return parent_rate;
339 return div_recalc_rate(hw, parent_rate);
342 static int bypass_div_set_rate(struct clk_hw *hw, unsigned long rate,
343 unsigned long parent_rate)
345 struct cv1800_clk_bypass_div *div = hw_to_cv1800_clk_bypass_div(hw);
347 if (cv1800_clk_checkbit(&div->div.common, &div->bypass))
348 return 0;
350 return div_set_rate(hw, rate, parent_rate);
353 static u8 bypass_div_get_parent(struct clk_hw *hw)
355 struct cv1800_clk_bypass_div *div = hw_to_cv1800_clk_bypass_div(hw);
357 if (cv1800_clk_checkbit(&div->div.common, &div->bypass))
358 return 0;
360 return 1;
363 static int bypass_div_set_parent(struct clk_hw *hw, u8 index)
365 struct cv1800_clk_bypass_div *div = hw_to_cv1800_clk_bypass_div(hw);
367 if (index)
368 return cv1800_clk_clearbit(&div->div.common, &div->bypass);
370 return cv1800_clk_setbit(&div->div.common, &div->bypass);
373 const struct clk_ops cv1800_clk_bypass_div_ops = {
374 .disable = div_disable,
375 .enable = div_enable,
376 .is_enabled = div_is_enabled,
378 .determine_rate = bypass_div_determine_rate,
379 .recalc_rate = bypass_div_recalc_rate,
380 .set_rate = bypass_div_set_rate,
382 .set_parent = bypass_div_set_parent,
383 .get_parent = bypass_div_get_parent,
386 /* MUX */
387 static inline struct cv1800_clk_mux *hw_to_cv1800_clk_mux(struct clk_hw *hw)
389 struct cv1800_clk_common *common = hw_to_cv1800_clk_common(hw);
391 return container_of(common, struct cv1800_clk_mux, common);
394 static int mux_enable(struct clk_hw *hw)
396 struct cv1800_clk_mux *mux = hw_to_cv1800_clk_mux(hw);
398 return cv1800_clk_setbit(&mux->common, &mux->gate);
401 static void mux_disable(struct clk_hw *hw)
403 struct cv1800_clk_mux *mux = hw_to_cv1800_clk_mux(hw);
405 cv1800_clk_clearbit(&mux->common, &mux->gate);
408 static int mux_is_enabled(struct clk_hw *hw)
410 struct cv1800_clk_mux *mux = hw_to_cv1800_clk_mux(hw);
412 return cv1800_clk_checkbit(&mux->common, &mux->gate);
415 static long mux_round_rate(struct clk_hw *parent, unsigned long *parent_rate,
416 unsigned long rate, int id, void *data)
418 struct cv1800_clk_mux *mux = data;
420 return div_helper_round_rate(&mux->div, &mux->common.hw, parent,
421 rate, parent_rate);
424 static int mux_determine_rate(struct clk_hw *hw,
425 struct clk_rate_request *req)
427 struct cv1800_clk_mux *mux = hw_to_cv1800_clk_mux(hw);
429 return mux_helper_determine_rate(&mux->common, req,
430 mux_round_rate, mux);
433 static unsigned long mux_recalc_rate(struct clk_hw *hw,
434 unsigned long parent_rate)
436 struct cv1800_clk_mux *mux = hw_to_cv1800_clk_mux(hw);
437 unsigned long val;
439 val = div_helper_get_clockdiv(&mux->common, &mux->div);
440 if (val == 0)
441 return 0;
443 return divider_recalc_rate(hw, parent_rate, val, NULL,
444 mux->div.flags, mux->div.width);
447 static int mux_set_rate(struct clk_hw *hw, unsigned long rate,
448 unsigned long parent_rate)
450 struct cv1800_clk_mux *mux = hw_to_cv1800_clk_mux(hw);
451 unsigned long val;
453 val = divider_get_val(rate, parent_rate, NULL,
454 mux->div.width, mux->div.flags);
456 return div_helper_set_rate(&mux->common, &mux->div, val);
459 static u8 mux_get_parent(struct clk_hw *hw)
461 struct cv1800_clk_mux *mux = hw_to_cv1800_clk_mux(hw);
462 u32 reg = readl(mux->common.base + mux->mux.reg);
464 return cv1800_clk_regfield_get(reg, &mux->mux);
467 static int _mux_set_parent(struct cv1800_clk_mux *mux, u8 index)
469 u32 reg;
471 reg = readl(mux->common.base + mux->mux.reg);
472 reg = cv1800_clk_regfield_set(reg, index, &mux->mux);
473 writel(reg, mux->common.base + mux->mux.reg);
475 return 0;
478 static int mux_set_parent(struct clk_hw *hw, u8 index)
480 struct cv1800_clk_mux *mux = hw_to_cv1800_clk_mux(hw);
481 unsigned long flags;
483 spin_lock_irqsave(mux->common.lock, flags);
485 _mux_set_parent(mux, index);
487 spin_unlock_irqrestore(mux->common.lock, flags);
489 return 0;
492 const struct clk_ops cv1800_clk_mux_ops = {
493 .disable = mux_disable,
494 .enable = mux_enable,
495 .is_enabled = mux_is_enabled,
497 .determine_rate = mux_determine_rate,
498 .recalc_rate = mux_recalc_rate,
499 .set_rate = mux_set_rate,
501 .set_parent = mux_set_parent,
502 .get_parent = mux_get_parent,
505 static inline struct cv1800_clk_bypass_mux *
506 hw_to_cv1800_clk_bypass_mux(struct clk_hw *hw)
508 struct cv1800_clk_mux *mux = hw_to_cv1800_clk_mux(hw);
510 return container_of(mux, struct cv1800_clk_bypass_mux, mux);
513 static long bypass_mux_round_rate(struct clk_hw *parent,
514 unsigned long *parent_rate,
515 unsigned long rate, int id, void *data)
517 struct cv1800_clk_bypass_mux *mux = data;
519 if (id == -1) {
520 if (cv1800_clk_checkbit(&mux->mux.common, &mux->bypass))
521 return *parent_rate;
522 else
523 return mux_round_rate(parent, parent_rate, rate,
524 -1, &mux->mux);
527 if (id == 0)
528 return *parent_rate;
530 return mux_round_rate(parent, parent_rate, rate, id - 1, &mux->mux);
533 static int bypass_mux_determine_rate(struct clk_hw *hw,
534 struct clk_rate_request *req)
536 struct cv1800_clk_bypass_mux *mux = hw_to_cv1800_clk_bypass_mux(hw);
538 return mux_helper_determine_rate(&mux->mux.common, req,
539 bypass_mux_round_rate, mux);
542 static unsigned long bypass_mux_recalc_rate(struct clk_hw *hw,
543 unsigned long parent_rate)
545 struct cv1800_clk_bypass_mux *mux = hw_to_cv1800_clk_bypass_mux(hw);
547 if (cv1800_clk_checkbit(&mux->mux.common, &mux->bypass))
548 return parent_rate;
550 return mux_recalc_rate(hw, parent_rate);
553 static int bypass_mux_set_rate(struct clk_hw *hw, unsigned long rate,
554 unsigned long parent_rate)
556 struct cv1800_clk_bypass_mux *mux = hw_to_cv1800_clk_bypass_mux(hw);
558 if (cv1800_clk_checkbit(&mux->mux.common, &mux->bypass))
559 return 0;
561 return mux_set_rate(hw, rate, parent_rate);
564 static u8 bypass_mux_get_parent(struct clk_hw *hw)
566 struct cv1800_clk_bypass_mux *mux = hw_to_cv1800_clk_bypass_mux(hw);
568 if (cv1800_clk_checkbit(&mux->mux.common, &mux->bypass))
569 return 0;
571 return mux_get_parent(hw) + 1;
574 static int bypass_mux_set_parent(struct clk_hw *hw, u8 index)
576 struct cv1800_clk_bypass_mux *mux = hw_to_cv1800_clk_bypass_mux(hw);
578 if (index == 0)
579 return cv1800_clk_setbit(&mux->mux.common, &mux->bypass);
581 return cv1800_clk_clearbit(&mux->mux.common, &mux->bypass);
584 const struct clk_ops cv1800_clk_bypass_mux_ops = {
585 .disable = mux_disable,
586 .enable = mux_enable,
587 .is_enabled = mux_is_enabled,
589 .determine_rate = bypass_mux_determine_rate,
590 .recalc_rate = bypass_mux_recalc_rate,
591 .set_rate = bypass_mux_set_rate,
593 .set_parent = bypass_mux_set_parent,
594 .get_parent = bypass_mux_get_parent,
597 /* MMUX */
598 static inline struct cv1800_clk_mmux *hw_to_cv1800_clk_mmux(struct clk_hw *hw)
600 struct cv1800_clk_common *common = hw_to_cv1800_clk_common(hw);
602 return container_of(common, struct cv1800_clk_mmux, common);
605 static u8 mmux_get_parent_id(struct cv1800_clk_mmux *mmux)
607 struct clk_hw *hw = &mmux->common.hw;
608 struct clk_hw *parent = clk_hw_get_parent(hw);
609 unsigned int i;
611 for (i = 0; i < clk_hw_get_num_parents(hw); i++) {
612 if (parent == clk_hw_get_parent_by_index(hw, i))
613 return i;
616 BUG();
619 static int mmux_enable(struct clk_hw *hw)
621 struct cv1800_clk_mmux *mmux = hw_to_cv1800_clk_mmux(hw);
623 return cv1800_clk_setbit(&mmux->common, &mmux->gate);
626 static void mmux_disable(struct clk_hw *hw)
628 struct cv1800_clk_mmux *mmux = hw_to_cv1800_clk_mmux(hw);
630 cv1800_clk_clearbit(&mmux->common, &mmux->gate);
633 static int mmux_is_enabled(struct clk_hw *hw)
635 struct cv1800_clk_mmux *mmux = hw_to_cv1800_clk_mmux(hw);
637 return cv1800_clk_checkbit(&mmux->common, &mmux->gate);
640 static long mmux_round_rate(struct clk_hw *parent, unsigned long *parent_rate,
641 unsigned long rate, int id, void *data)
643 struct cv1800_clk_mmux *mmux = data;
644 s8 div_id;
646 if (id == -1) {
647 if (cv1800_clk_checkbit(&mmux->common, &mmux->bypass))
648 return *parent_rate;
650 id = mmux_get_parent_id(mmux);
653 div_id = mmux->parent2sel[id];
655 if (div_id < 0)
656 return *parent_rate;
658 return div_helper_round_rate(&mmux->div[div_id],
659 &mmux->common.hw, parent,
660 rate, parent_rate);
663 static int mmux_determine_rate(struct clk_hw *hw,
664 struct clk_rate_request *req)
666 struct cv1800_clk_mmux *mmux = hw_to_cv1800_clk_mmux(hw);
668 return mux_helper_determine_rate(&mmux->common, req,
669 mmux_round_rate, mmux);
672 static unsigned long mmux_recalc_rate(struct clk_hw *hw,
673 unsigned long parent_rate)
675 struct cv1800_clk_mmux *mmux = hw_to_cv1800_clk_mmux(hw);
676 unsigned long val;
677 struct cv1800_clk_regfield *div;
679 if (cv1800_clk_checkbit(&mmux->common, &mmux->bypass))
680 return parent_rate;
682 if (cv1800_clk_checkbit(&mmux->common, &mmux->clk_sel))
683 div = &mmux->div[0];
684 else
685 div = &mmux->div[1];
687 val = div_helper_get_clockdiv(&mmux->common, div);
688 if (val == 0)
689 return 0;
691 return divider_recalc_rate(hw, parent_rate, val, NULL,
692 div->flags, div->width);
695 static int mmux_set_rate(struct clk_hw *hw, unsigned long rate,
696 unsigned long parent_rate)
698 struct cv1800_clk_mmux *mmux = hw_to_cv1800_clk_mmux(hw);
699 struct cv1800_clk_regfield *div;
700 unsigned long val;
702 if (cv1800_clk_checkbit(&mmux->common, &mmux->bypass))
703 return parent_rate;
705 if (cv1800_clk_checkbit(&mmux->common, &mmux->clk_sel))
706 div = &mmux->div[0];
707 else
708 div = &mmux->div[1];
710 val = divider_get_val(rate, parent_rate, NULL,
711 div->width, div->flags);
713 return div_helper_set_rate(&mmux->common, div, val);
716 static u8 mmux_get_parent(struct clk_hw *hw)
718 struct cv1800_clk_mmux *mmux = hw_to_cv1800_clk_mmux(hw);
719 struct cv1800_clk_regfield *mux;
720 u32 reg;
721 s8 clk_sel;
723 if (cv1800_clk_checkbit(&mmux->common, &mmux->bypass))
724 return 0;
726 if (cv1800_clk_checkbit(&mmux->common, &mmux->clk_sel))
727 clk_sel = 0;
728 else
729 clk_sel = 1;
730 mux = &mmux->mux[clk_sel];
732 reg = readl(mmux->common.base + mux->reg);
734 return mmux->sel2parent[clk_sel][cv1800_clk_regfield_get(reg, mux)];
737 static int mmux_set_parent(struct clk_hw *hw, u8 index)
739 struct cv1800_clk_mmux *mmux = hw_to_cv1800_clk_mmux(hw);
740 struct cv1800_clk_regfield *mux;
741 unsigned long flags;
742 u32 reg;
743 s8 clk_sel = mmux->parent2sel[index];
745 if (index == 0 || clk_sel == -1) {
746 cv1800_clk_setbit(&mmux->common, &mmux->bypass);
747 goto release;
750 cv1800_clk_clearbit(&mmux->common, &mmux->bypass);
752 if (clk_sel)
753 cv1800_clk_clearbit(&mmux->common, &mmux->clk_sel);
754 else
755 cv1800_clk_setbit(&mmux->common, &mmux->clk_sel);
757 spin_lock_irqsave(mmux->common.lock, flags);
759 mux = &mmux->mux[clk_sel];
760 reg = readl(mmux->common.base + mux->reg);
761 reg = cv1800_clk_regfield_set(reg, index, mux);
763 writel(reg, mmux->common.base + mux->reg);
765 spin_unlock_irqrestore(mmux->common.lock, flags);
767 release:
768 return 0;
771 const struct clk_ops cv1800_clk_mmux_ops = {
772 .disable = mmux_disable,
773 .enable = mmux_enable,
774 .is_enabled = mmux_is_enabled,
776 .determine_rate = mmux_determine_rate,
777 .recalc_rate = mmux_recalc_rate,
778 .set_rate = mmux_set_rate,
780 .set_parent = mmux_set_parent,
781 .get_parent = mmux_get_parent,
784 /* AUDIO CLK */
785 static inline struct cv1800_clk_audio *
786 hw_to_cv1800_clk_audio(struct clk_hw *hw)
788 struct cv1800_clk_common *common = hw_to_cv1800_clk_common(hw);
790 return container_of(common, struct cv1800_clk_audio, common);
793 static int aclk_enable(struct clk_hw *hw)
795 struct cv1800_clk_audio *aclk = hw_to_cv1800_clk_audio(hw);
797 cv1800_clk_setbit(&aclk->common, &aclk->src_en);
798 return cv1800_clk_setbit(&aclk->common, &aclk->output_en);
801 static void aclk_disable(struct clk_hw *hw)
803 struct cv1800_clk_audio *aclk = hw_to_cv1800_clk_audio(hw);
805 cv1800_clk_clearbit(&aclk->common, &aclk->output_en);
806 cv1800_clk_clearbit(&aclk->common, &aclk->src_en);
809 static int aclk_is_enabled(struct clk_hw *hw)
811 struct cv1800_clk_audio *aclk = hw_to_cv1800_clk_audio(hw);
813 return cv1800_clk_checkbit(&aclk->common, &aclk->output_en);
816 static int aclk_determine_rate(struct clk_hw *hw,
817 struct clk_rate_request *req)
819 struct cv1800_clk_audio *aclk = hw_to_cv1800_clk_audio(hw);
821 req->rate = aclk->target_rate;
823 return 0;
826 static unsigned long aclk_recalc_rate(struct clk_hw *hw,
827 unsigned long parent_rate)
829 struct cv1800_clk_audio *aclk = hw_to_cv1800_clk_audio(hw);
830 u64 rate = parent_rate;
831 u64 factor = 2;
832 u32 regval;
834 if (!cv1800_clk_checkbit(&aclk->common, &aclk->div_en))
835 return 0;
837 regval = readl(aclk->common.base + aclk->m.reg);
838 factor *= cv1800_clk_regfield_get(regval, &aclk->m);
840 regval = readl(aclk->common.base + aclk->n.reg);
841 rate *= cv1800_clk_regfield_get(regval, &aclk->n);
843 return DIV64_U64_ROUND_UP(rate, factor);
846 static void aclk_determine_mn(unsigned long parent_rate, unsigned long rate,
847 u32 *m, u32 *n)
849 u32 tm = parent_rate / 2;
850 u32 tn = rate;
851 u32 tcommon = gcd(tm, tn);
852 *m = tm / tcommon;
853 *n = tn / tcommon;
856 static int aclk_set_rate(struct clk_hw *hw, unsigned long rate,
857 unsigned long parent_rate)
859 struct cv1800_clk_audio *aclk = hw_to_cv1800_clk_audio(hw);
860 unsigned long flags;
861 u32 m, n;
863 aclk_determine_mn(parent_rate, rate,
864 &m, &n);
866 spin_lock_irqsave(aclk->common.lock, flags);
868 writel(m, aclk->common.base + aclk->m.reg);
869 writel(n, aclk->common.base + aclk->n.reg);
871 cv1800_clk_setbit(&aclk->common, &aclk->div_en);
872 cv1800_clk_setbit(&aclk->common, &aclk->div_up);
874 spin_unlock_irqrestore(aclk->common.lock, flags);
876 return 0;
879 const struct clk_ops cv1800_clk_audio_ops = {
880 .disable = aclk_disable,
881 .enable = aclk_enable,
882 .is_enabled = aclk_is_enabled,
884 .determine_rate = aclk_determine_rate,
885 .recalc_rate = aclk_recalc_rate,
886 .set_rate = aclk_set_rate,