scsi: qla2xxx: Fix NVMe session hang on unload
[linux/fpc-iii.git] / drivers / clk / meson / sclk-div.c
blobbc64019b8eeb122de779e85509f9b95d51fb890b
1 // SPDX-License-Identifier: (GPL-2.0 OR MIT)
2 /*
3 * Copyright (c) 2018 BayLibre, SAS.
4 * Author: Jerome Brunet <jbrunet@baylibre.com>
6 * Sample clock generator divider:
7 * This HW divider gates with value 0 but is otherwise a zero based divider:
9 * val >= 1
10 * divider = val + 1
12 * The duty cycle may also be set for the LR clock variant. The duty cycle
13 * ratio is:
15 * hi = [0 - val]
16 * duty_cycle = (1 + hi) / (1 + val)
19 #include "clkc-audio.h"
21 static inline struct meson_sclk_div_data *
22 meson_sclk_div_data(struct clk_regmap *clk)
24 return (struct meson_sclk_div_data *)clk->data;
27 static int sclk_div_maxval(struct meson_sclk_div_data *sclk)
29 return (1 << sclk->div.width) - 1;
32 static int sclk_div_maxdiv(struct meson_sclk_div_data *sclk)
34 return sclk_div_maxval(sclk) + 1;
37 static int sclk_div_getdiv(struct clk_hw *hw, unsigned long rate,
38 unsigned long prate, int maxdiv)
40 int div = DIV_ROUND_CLOSEST_ULL((u64)prate, rate);
42 return clamp(div, 2, maxdiv);
45 static int sclk_div_bestdiv(struct clk_hw *hw, unsigned long rate,
46 unsigned long *prate,
47 struct meson_sclk_div_data *sclk)
49 struct clk_hw *parent = clk_hw_get_parent(hw);
50 int bestdiv = 0, i;
51 unsigned long maxdiv, now, parent_now;
52 unsigned long best = 0, best_parent = 0;
54 if (!rate)
55 rate = 1;
57 maxdiv = sclk_div_maxdiv(sclk);
59 if (!(clk_hw_get_flags(hw) & CLK_SET_RATE_PARENT))
60 return sclk_div_getdiv(hw, rate, *prate, maxdiv);
63 * The maximum divider we can use without overflowing
64 * unsigned long in rate * i below
66 maxdiv = min(ULONG_MAX / rate, maxdiv);
68 for (i = 2; i <= maxdiv; i++) {
70 * It's the most ideal case if the requested rate can be
71 * divided from parent clock without needing to change
72 * parent rate, so return the divider immediately.
74 if (rate * i == *prate)
75 return i;
77 parent_now = clk_hw_round_rate(parent, rate * i);
78 now = DIV_ROUND_UP_ULL((u64)parent_now, i);
80 if (abs(rate - now) < abs(rate - best)) {
81 bestdiv = i;
82 best = now;
83 best_parent = parent_now;
87 if (!bestdiv)
88 bestdiv = sclk_div_maxdiv(sclk);
89 else
90 *prate = best_parent;
92 return bestdiv;
95 static long sclk_div_round_rate(struct clk_hw *hw, unsigned long rate,
96 unsigned long *prate)
98 struct clk_regmap *clk = to_clk_regmap(hw);
99 struct meson_sclk_div_data *sclk = meson_sclk_div_data(clk);
100 int div;
102 div = sclk_div_bestdiv(hw, rate, prate, sclk);
104 return DIV_ROUND_UP_ULL((u64)*prate, div);
107 static void sclk_apply_ratio(struct clk_regmap *clk,
108 struct meson_sclk_div_data *sclk)
110 unsigned int hi = DIV_ROUND_CLOSEST(sclk->cached_div *
111 sclk->cached_duty.num,
112 sclk->cached_duty.den);
114 if (hi)
115 hi -= 1;
117 meson_parm_write(clk->map, &sclk->hi, hi);
120 static int sclk_div_set_duty_cycle(struct clk_hw *hw,
121 struct clk_duty *duty)
123 struct clk_regmap *clk = to_clk_regmap(hw);
124 struct meson_sclk_div_data *sclk = meson_sclk_div_data(clk);
126 if (MESON_PARM_APPLICABLE(&sclk->hi)) {
127 memcpy(&sclk->cached_duty, duty, sizeof(*duty));
128 sclk_apply_ratio(clk, sclk);
131 return 0;
134 static int sclk_div_get_duty_cycle(struct clk_hw *hw,
135 struct clk_duty *duty)
137 struct clk_regmap *clk = to_clk_regmap(hw);
138 struct meson_sclk_div_data *sclk = meson_sclk_div_data(clk);
139 int hi;
141 if (!MESON_PARM_APPLICABLE(&sclk->hi)) {
142 duty->num = 1;
143 duty->den = 2;
144 return 0;
147 hi = meson_parm_read(clk->map, &sclk->hi);
148 duty->num = hi + 1;
149 duty->den = sclk->cached_div;
150 return 0;
153 static void sclk_apply_divider(struct clk_regmap *clk,
154 struct meson_sclk_div_data *sclk)
156 if (MESON_PARM_APPLICABLE(&sclk->hi))
157 sclk_apply_ratio(clk, sclk);
159 meson_parm_write(clk->map, &sclk->div, sclk->cached_div - 1);
162 static int sclk_div_set_rate(struct clk_hw *hw, unsigned long rate,
163 unsigned long prate)
165 struct clk_regmap *clk = to_clk_regmap(hw);
166 struct meson_sclk_div_data *sclk = meson_sclk_div_data(clk);
167 unsigned long maxdiv = sclk_div_maxdiv(sclk);
169 sclk->cached_div = sclk_div_getdiv(hw, rate, prate, maxdiv);
171 if (clk_hw_is_enabled(hw))
172 sclk_apply_divider(clk, sclk);
174 return 0;
177 static unsigned long sclk_div_recalc_rate(struct clk_hw *hw,
178 unsigned long prate)
180 struct clk_regmap *clk = to_clk_regmap(hw);
181 struct meson_sclk_div_data *sclk = meson_sclk_div_data(clk);
183 return DIV_ROUND_UP_ULL((u64)prate, sclk->cached_div);
186 static int sclk_div_enable(struct clk_hw *hw)
188 struct clk_regmap *clk = to_clk_regmap(hw);
189 struct meson_sclk_div_data *sclk = meson_sclk_div_data(clk);
191 sclk_apply_divider(clk, sclk);
193 return 0;
196 static void sclk_div_disable(struct clk_hw *hw)
198 struct clk_regmap *clk = to_clk_regmap(hw);
199 struct meson_sclk_div_data *sclk = meson_sclk_div_data(clk);
201 meson_parm_write(clk->map, &sclk->div, 0);
204 static int sclk_div_is_enabled(struct clk_hw *hw)
206 struct clk_regmap *clk = to_clk_regmap(hw);
207 struct meson_sclk_div_data *sclk = meson_sclk_div_data(clk);
209 if (meson_parm_read(clk->map, &sclk->div))
210 return 1;
212 return 0;
215 static void sclk_div_init(struct clk_hw *hw)
217 struct clk_regmap *clk = to_clk_regmap(hw);
218 struct meson_sclk_div_data *sclk = meson_sclk_div_data(clk);
219 unsigned int val;
221 val = meson_parm_read(clk->map, &sclk->div);
223 /* if the divider is initially disabled, assume max */
224 if (!val)
225 sclk->cached_div = sclk_div_maxdiv(sclk);
226 else
227 sclk->cached_div = val + 1;
229 sclk_div_get_duty_cycle(hw, &sclk->cached_duty);
232 const struct clk_ops meson_sclk_div_ops = {
233 .recalc_rate = sclk_div_recalc_rate,
234 .round_rate = sclk_div_round_rate,
235 .set_rate = sclk_div_set_rate,
236 .enable = sclk_div_enable,
237 .disable = sclk_div_disable,
238 .is_enabled = sclk_div_is_enabled,
239 .get_duty_cycle = sclk_div_get_duty_cycle,
240 .set_duty_cycle = sclk_div_set_duty_cycle,
241 .init = sclk_div_init,
243 EXPORT_SYMBOL_GPL(meson_sclk_div_ops);