1/*
2 * Copyright (c) 2013, The Linux Foundation. All rights reserved.
3 *
4 * This software is licensed under the terms of the GNU General Public
5 * License version 2, as published by the Free Software Foundation, and
6 * may be copied, distributed, and modified under those terms.
7 *
8 * This program is distributed in the hope that it will be useful,
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11 * GNU General Public License for more details.
12 */
13
14#include <linux/kernel.h>
15#include <linux/bitops.h>
16#include <linux/err.h>
17#include <linux/export.h>
18#include <linux/clk-provider.h>
19#include <linux/regmap.h>
20
21#include <asm/div64.h>
22
23#include "clk-rcg.h"
24#include "common.h"
25
26static u32 ns_to_src(struct src_sel *s, u32 ns)
27{
28	ns >>= s->src_sel_shift;
29	ns &= SRC_SEL_MASK;
30	return ns;
31}
32
33static u32 src_to_ns(struct src_sel *s, u8 src, u32 ns)
34{
35	u32 mask;
36
37	mask = SRC_SEL_MASK;
38	mask <<= s->src_sel_shift;
39	ns &= ~mask;
40
41	ns |= src << s->src_sel_shift;
42	return ns;
43}
44
45static u8 clk_rcg_get_parent(struct clk_hw *hw)
46{
47	struct clk_rcg *rcg = to_clk_rcg(hw);
48	int num_parents = __clk_get_num_parents(hw->clk);
49	u32 ns;
50	int i, ret;
51
52	ret = regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
53	if (ret)
54		goto err;
55	ns = ns_to_src(&rcg->s, ns);
56	for (i = 0; i < num_parents; i++)
57		if (ns == rcg->s.parent_map[i].cfg)
58			return i;
59
60err:
61	pr_debug("%s: Clock %s has invalid parent, using default.\n",
62		 __func__, __clk_get_name(hw->clk));
63	return 0;
64}
65
66static int reg_to_bank(struct clk_dyn_rcg *rcg, u32 bank)
67{
68	bank &= BIT(rcg->mux_sel_bit);
69	return !!bank;
70}
71
72static u8 clk_dyn_rcg_get_parent(struct clk_hw *hw)
73{
74	struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
75	int num_parents = __clk_get_num_parents(hw->clk);
76	u32 ns, reg;
77	int bank;
78	int i, ret;
79	struct src_sel *s;
80
81	ret = regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
82	if (ret)
83		goto err;
84	bank = reg_to_bank(rcg, reg);
85	s = &rcg->s[bank];
86
87	ret = regmap_read(rcg->clkr.regmap, rcg->ns_reg[bank], &ns);
88	if (ret)
89		goto err;
90	ns = ns_to_src(s, ns);
91
92	for (i = 0; i < num_parents; i++)
93		if (ns == s->parent_map[i].cfg)
94			return i;
95
96err:
97	pr_debug("%s: Clock %s has invalid parent, using default.\n",
98		 __func__, __clk_get_name(hw->clk));
99	return 0;
100}
101
102static int clk_rcg_set_parent(struct clk_hw *hw, u8 index)
103{
104	struct clk_rcg *rcg = to_clk_rcg(hw);
105	u32 ns;
106
107	regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
108	ns = src_to_ns(&rcg->s, rcg->s.parent_map[index].cfg, ns);
109	regmap_write(rcg->clkr.regmap, rcg->ns_reg, ns);
110
111	return 0;
112}
113
114static u32 md_to_m(struct mn *mn, u32 md)
115{
116	md >>= mn->m_val_shift;
117	md &= BIT(mn->width) - 1;
118	return md;
119}
120
121static u32 ns_to_pre_div(struct pre_div *p, u32 ns)
122{
123	ns >>= p->pre_div_shift;
124	ns &= BIT(p->pre_div_width) - 1;
125	return ns;
126}
127
128static u32 pre_div_to_ns(struct pre_div *p, u8 pre_div, u32 ns)
129{
130	u32 mask;
131
132	mask = BIT(p->pre_div_width) - 1;
133	mask <<= p->pre_div_shift;
134	ns &= ~mask;
135
136	ns |= pre_div << p->pre_div_shift;
137	return ns;
138}
139
140static u32 mn_to_md(struct mn *mn, u32 m, u32 n, u32 md)
141{
142	u32 mask, mask_w;
143
144	mask_w = BIT(mn->width) - 1;
145	mask = (mask_w << mn->m_val_shift) | mask_w;
146	md &= ~mask;
147
148	if (n) {
149		m <<= mn->m_val_shift;
150		md |= m;
151		md |= ~n & mask_w;
152	}
153
154	return md;
155}
156
157static u32 ns_m_to_n(struct mn *mn, u32 ns, u32 m)
158{
159	ns = ~ns >> mn->n_val_shift;
160	ns &= BIT(mn->width) - 1;
161	return ns + m;
162}
163
164static u32 reg_to_mnctr_mode(struct mn *mn, u32 val)
165{
166	val >>= mn->mnctr_mode_shift;
167	val &= MNCTR_MODE_MASK;
168	return val;
169}
170
171static u32 mn_to_ns(struct mn *mn, u32 m, u32 n, u32 ns)
172{
173	u32 mask;
174
175	mask = BIT(mn->width) - 1;
176	mask <<= mn->n_val_shift;
177	ns &= ~mask;
178
179	if (n) {
180		n = n - m;
181		n = ~n;
182		n &= BIT(mn->width) - 1;
183		n <<= mn->n_val_shift;
184		ns |= n;
185	}
186
187	return ns;
188}
189
190static u32 mn_to_reg(struct mn *mn, u32 m, u32 n, u32 val)
191{
192	u32 mask;
193
194	mask = MNCTR_MODE_MASK << mn->mnctr_mode_shift;
195	mask |= BIT(mn->mnctr_en_bit);
196	val &= ~mask;
197
198	if (n) {
199		val |= BIT(mn->mnctr_en_bit);
200		val |= MNCTR_MODE_DUAL << mn->mnctr_mode_shift;
201	}
202
203	return val;
204}
205
206static int configure_bank(struct clk_dyn_rcg *rcg, const struct freq_tbl *f)
207{
208	u32 ns, md, reg;
209	int bank, new_bank, ret, index;
210	struct mn *mn;
211	struct pre_div *p;
212	struct src_sel *s;
213	bool enabled;
214	u32 md_reg, ns_reg;
215	bool banked_mn = !!rcg->mn[1].width;
216	bool banked_p = !!rcg->p[1].pre_div_width;
217	struct clk_hw *hw = &rcg->clkr.hw;
218
219	enabled = __clk_is_enabled(hw->clk);
220
221	ret = regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
222	if (ret)
223		return ret;
224	bank = reg_to_bank(rcg, reg);
225	new_bank = enabled ? !bank : bank;
226
227	ns_reg = rcg->ns_reg[new_bank];
228	ret = regmap_read(rcg->clkr.regmap, ns_reg, &ns);
229	if (ret)
230		return ret;
231
232	if (banked_mn) {
233		mn = &rcg->mn[new_bank];
234		md_reg = rcg->md_reg[new_bank];
235
236		ns |= BIT(mn->mnctr_reset_bit);
237		ret = regmap_write(rcg->clkr.regmap, ns_reg, ns);
238		if (ret)
239			return ret;
240
241		ret = regmap_read(rcg->clkr.regmap, md_reg, &md);
242		if (ret)
243			return ret;
244		md = mn_to_md(mn, f->m, f->n, md);
245		ret = regmap_write(rcg->clkr.regmap, md_reg, md);
246		if (ret)
247			return ret;
248		ns = mn_to_ns(mn, f->m, f->n, ns);
249		ret = regmap_write(rcg->clkr.regmap, ns_reg, ns);
250		if (ret)
251			return ret;
252
253		/* Two NS registers means mode control is in NS register */
254		if (rcg->ns_reg[0] != rcg->ns_reg[1]) {
255			ns = mn_to_reg(mn, f->m, f->n, ns);
256			ret = regmap_write(rcg->clkr.regmap, ns_reg, ns);
257			if (ret)
258				return ret;
259		} else {
260			reg = mn_to_reg(mn, f->m, f->n, reg);
261			ret = regmap_write(rcg->clkr.regmap, rcg->bank_reg,
262					   reg);
263			if (ret)
264				return ret;
265		}
266
267		ns &= ~BIT(mn->mnctr_reset_bit);
268		ret = regmap_write(rcg->clkr.regmap, ns_reg, ns);
269		if (ret)
270			return ret;
271	}
272
273	if (banked_p) {
274		p = &rcg->p[new_bank];
275		ns = pre_div_to_ns(p, f->pre_div - 1, ns);
276	}
277
278	s = &rcg->s[new_bank];
279	index = qcom_find_src_index(hw, s->parent_map, f->src);
280	if (index < 0)
281		return index;
282	ns = src_to_ns(s, s->parent_map[index].cfg, ns);
283	ret = regmap_write(rcg->clkr.regmap, ns_reg, ns);
284	if (ret)
285		return ret;
286
287	if (enabled) {
288		ret = regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
289		if (ret)
290			return ret;
291		reg ^= BIT(rcg->mux_sel_bit);
292		ret = regmap_write(rcg->clkr.regmap, rcg->bank_reg, reg);
293		if (ret)
294			return ret;
295	}
296	return 0;
297}
298
299static int clk_dyn_rcg_set_parent(struct clk_hw *hw, u8 index)
300{
301	struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
302	u32 ns, md, reg;
303	int bank;
304	struct freq_tbl f = { 0 };
305	bool banked_mn = !!rcg->mn[1].width;
306	bool banked_p = !!rcg->p[1].pre_div_width;
307
308	regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
309	bank = reg_to_bank(rcg, reg);
310
311	regmap_read(rcg->clkr.regmap, rcg->ns_reg[bank], &ns);
312
313	if (banked_mn) {
314		regmap_read(rcg->clkr.regmap, rcg->md_reg[bank], &md);
315		f.m = md_to_m(&rcg->mn[bank], md);
316		f.n = ns_m_to_n(&rcg->mn[bank], ns, f.m);
317	}
318
319	if (banked_p)
320		f.pre_div = ns_to_pre_div(&rcg->p[bank], ns) + 1;
321
322	f.src = qcom_find_src_index(hw, rcg->s[bank].parent_map, index);
323	return configure_bank(rcg, &f);
324}
325
326/*
327 * Calculate m/n:d rate
328 *
329 *          parent_rate     m
330 *   rate = ----------- x  ---
331 *            pre_div       n
332 */
333static unsigned long
334calc_rate(unsigned long rate, u32 m, u32 n, u32 mode, u32 pre_div)
335{
336	if (pre_div)
337		rate /= pre_div + 1;
338
339	if (mode) {
340		u64 tmp = rate;
341		tmp *= m;
342		do_div(tmp, n);
343		rate = tmp;
344	}
345
346	return rate;
347}
348
349static unsigned long
350clk_rcg_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
351{
352	struct clk_rcg *rcg = to_clk_rcg(hw);
353	u32 pre_div, m = 0, n = 0, ns, md, mode = 0;
354	struct mn *mn = &rcg->mn;
355
356	regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
357	pre_div = ns_to_pre_div(&rcg->p, ns);
358
359	if (rcg->mn.width) {
360		regmap_read(rcg->clkr.regmap, rcg->md_reg, &md);
361		m = md_to_m(mn, md);
362		n = ns_m_to_n(mn, ns, m);
363		/* MN counter mode is in hw.enable_reg sometimes */
364		if (rcg->clkr.enable_reg != rcg->ns_reg)
365			regmap_read(rcg->clkr.regmap, rcg->clkr.enable_reg, &mode);
366		else
367			mode = ns;
368		mode = reg_to_mnctr_mode(mn, mode);
369	}
370
371	return calc_rate(parent_rate, m, n, mode, pre_div);
372}
373
374static unsigned long
375clk_dyn_rcg_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
376{
377	struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
378	u32 m, n, pre_div, ns, md, mode, reg;
379	int bank;
380	struct mn *mn;
381	bool banked_p = !!rcg->p[1].pre_div_width;
382	bool banked_mn = !!rcg->mn[1].width;
383
384	regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
385	bank = reg_to_bank(rcg, reg);
386
387	regmap_read(rcg->clkr.regmap, rcg->ns_reg[bank], &ns);
388	m = n = pre_div = mode = 0;
389
390	if (banked_mn) {
391		mn = &rcg->mn[bank];
392		regmap_read(rcg->clkr.regmap, rcg->md_reg[bank], &md);
393		m = md_to_m(mn, md);
394		n = ns_m_to_n(mn, ns, m);
395		/* Two NS registers means mode control is in NS register */
396		if (rcg->ns_reg[0] != rcg->ns_reg[1])
397			reg = ns;
398		mode = reg_to_mnctr_mode(mn, reg);
399	}
400
401	if (banked_p)
402		pre_div = ns_to_pre_div(&rcg->p[bank], ns);
403
404	return calc_rate(parent_rate, m, n, mode, pre_div);
405}
406
407static long _freq_tbl_determine_rate(struct clk_hw *hw,
408		const struct freq_tbl *f, unsigned long rate,
409		unsigned long min_rate, unsigned long max_rate,
410		unsigned long *p_rate, struct clk_hw **p_hw,
411		const struct parent_map *parent_map)
412{
413	unsigned long clk_flags;
414	struct clk *p;
415	int index;
416
417	f = qcom_find_freq(f, rate);
418	if (!f)
419		return -EINVAL;
420
421	index = qcom_find_src_index(hw, parent_map, f->src);
422	if (index < 0)
423		return index;
424
425	clk_flags = __clk_get_flags(hw->clk);
426	p = clk_get_parent_by_index(hw->clk, index);
427	if (clk_flags & CLK_SET_RATE_PARENT) {
428		rate = rate * f->pre_div;
429		if (f->n) {
430			u64 tmp = rate;
431			tmp = tmp * f->n;
432			do_div(tmp, f->m);
433			rate = tmp;
434		}
435	} else {
436		rate =  __clk_get_rate(p);
437	}
438	*p_hw = __clk_get_hw(p);
439	*p_rate = rate;
440
441	return f->freq;
442}
443
444static long clk_rcg_determine_rate(struct clk_hw *hw, unsigned long rate,
445		unsigned long min_rate, unsigned long max_rate,
446		unsigned long *p_rate, struct clk_hw **p)
447{
448	struct clk_rcg *rcg = to_clk_rcg(hw);
449
450	return _freq_tbl_determine_rate(hw, rcg->freq_tbl, rate, min_rate,
451			max_rate, p_rate, p, rcg->s.parent_map);
452}
453
454static long clk_dyn_rcg_determine_rate(struct clk_hw *hw, unsigned long rate,
455		unsigned long min_rate, unsigned long max_rate,
456		unsigned long *p_rate, struct clk_hw **p)
457{
458	struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
459	u32 reg;
460	int bank;
461	struct src_sel *s;
462
463	regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
464	bank = reg_to_bank(rcg, reg);
465	s = &rcg->s[bank];
466
467	return _freq_tbl_determine_rate(hw, rcg->freq_tbl, rate, min_rate,
468			max_rate, p_rate, p, s->parent_map);
469}
470
471static long clk_rcg_bypass_determine_rate(struct clk_hw *hw, unsigned long rate,
472		unsigned long min_rate, unsigned long max_rate,
473		unsigned long *p_rate, struct clk_hw **p_hw)
474{
475	struct clk_rcg *rcg = to_clk_rcg(hw);
476	const struct freq_tbl *f = rcg->freq_tbl;
477	struct clk *p;
478	int index = qcom_find_src_index(hw, rcg->s.parent_map, f->src);
479
480	p = clk_get_parent_by_index(hw->clk, index);
481	*p_hw = __clk_get_hw(p);
482	*p_rate = __clk_round_rate(p, rate);
483
484	return *p_rate;
485}
486
487static int __clk_rcg_set_rate(struct clk_rcg *rcg, const struct freq_tbl *f)
488{
489	u32 ns, md, ctl;
490	struct mn *mn = &rcg->mn;
491	u32 mask = 0;
492	unsigned int reset_reg;
493
494	if (rcg->mn.reset_in_cc)
495		reset_reg = rcg->clkr.enable_reg;
496	else
497		reset_reg = rcg->ns_reg;
498
499	if (rcg->mn.width) {
500		mask = BIT(mn->mnctr_reset_bit);
501		regmap_update_bits(rcg->clkr.regmap, reset_reg, mask, mask);
502
503		regmap_read(rcg->clkr.regmap, rcg->md_reg, &md);
504		md = mn_to_md(mn, f->m, f->n, md);
505		regmap_write(rcg->clkr.regmap, rcg->md_reg, md);
506
507		regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
508		/* MN counter mode is in hw.enable_reg sometimes */
509		if (rcg->clkr.enable_reg != rcg->ns_reg) {
510			regmap_read(rcg->clkr.regmap, rcg->clkr.enable_reg, &ctl);
511			ctl = mn_to_reg(mn, f->m, f->n, ctl);
512			regmap_write(rcg->clkr.regmap, rcg->clkr.enable_reg, ctl);
513		} else {
514			ns = mn_to_reg(mn, f->m, f->n, ns);
515		}
516		ns = mn_to_ns(mn, f->m, f->n, ns);
517	} else {
518		regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
519	}
520
521	ns = pre_div_to_ns(&rcg->p, f->pre_div - 1, ns);
522	regmap_write(rcg->clkr.regmap, rcg->ns_reg, ns);
523
524	regmap_update_bits(rcg->clkr.regmap, reset_reg, mask, 0);
525
526	return 0;
527}
528
529static int clk_rcg_set_rate(struct clk_hw *hw, unsigned long rate,
530			    unsigned long parent_rate)
531{
532	struct clk_rcg *rcg = to_clk_rcg(hw);
533	const struct freq_tbl *f;
534
535	f = qcom_find_freq(rcg->freq_tbl, rate);
536	if (!f)
537		return -EINVAL;
538
539	return __clk_rcg_set_rate(rcg, f);
540}
541
542static int clk_rcg_bypass_set_rate(struct clk_hw *hw, unsigned long rate,
543				unsigned long parent_rate)
544{
545	struct clk_rcg *rcg = to_clk_rcg(hw);
546
547	return __clk_rcg_set_rate(rcg, rcg->freq_tbl);
548}
549
550/*
551 * This type of clock has a glitch-free mux that switches between the output of
552 * the M/N counter and an always on clock source (XO). When clk_set_rate() is
553 * called we need to make sure that we don't switch to the M/N counter if it
554 * isn't clocking because the mux will get stuck and the clock will stop
555 * outputting a clock. This can happen if the framework isn't aware that this
556 * clock is on and so clk_set_rate() doesn't turn on the new parent. To fix
557 * this we switch the mux in the enable/disable ops and reprogram the M/N
558 * counter in the set_rate op. We also make sure to switch away from the M/N
559 * counter in set_rate if software thinks the clock is off.
560 */
561static int clk_rcg_lcc_set_rate(struct clk_hw *hw, unsigned long rate,
562				unsigned long parent_rate)
563{
564	struct clk_rcg *rcg = to_clk_rcg(hw);
565	const struct freq_tbl *f;
566	int ret;
567	u32 gfm = BIT(10);
568
569	f = qcom_find_freq(rcg->freq_tbl, rate);
570	if (!f)
571		return -EINVAL;
572
573	/* Switch to XO to avoid glitches */
574	regmap_update_bits(rcg->clkr.regmap, rcg->ns_reg, gfm, 0);
575	ret = __clk_rcg_set_rate(rcg, f);
576	/* Switch back to M/N if it's clocking */
577	if (__clk_is_enabled(hw->clk))
578		regmap_update_bits(rcg->clkr.regmap, rcg->ns_reg, gfm, gfm);
579
580	return ret;
581}
582
583static int clk_rcg_lcc_enable(struct clk_hw *hw)
584{
585	struct clk_rcg *rcg = to_clk_rcg(hw);
586	u32 gfm = BIT(10);
587
588	/* Use M/N */
589	return regmap_update_bits(rcg->clkr.regmap, rcg->ns_reg, gfm, gfm);
590}
591
592static void clk_rcg_lcc_disable(struct clk_hw *hw)
593{
594	struct clk_rcg *rcg = to_clk_rcg(hw);
595	u32 gfm = BIT(10);
596
597	/* Use XO */
598	regmap_update_bits(rcg->clkr.regmap, rcg->ns_reg, gfm, 0);
599}
600
601static int __clk_dyn_rcg_set_rate(struct clk_hw *hw, unsigned long rate)
602{
603	struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
604	const struct freq_tbl *f;
605
606	f = qcom_find_freq(rcg->freq_tbl, rate);
607	if (!f)
608		return -EINVAL;
609
610	return configure_bank(rcg, f);
611}
612
613static int clk_dyn_rcg_set_rate(struct clk_hw *hw, unsigned long rate,
614			    unsigned long parent_rate)
615{
616	return __clk_dyn_rcg_set_rate(hw, rate);
617}
618
619static int clk_dyn_rcg_set_rate_and_parent(struct clk_hw *hw,
620		unsigned long rate, unsigned long parent_rate, u8 index)
621{
622	return __clk_dyn_rcg_set_rate(hw, rate);
623}
624
625const struct clk_ops clk_rcg_ops = {
626	.enable = clk_enable_regmap,
627	.disable = clk_disable_regmap,
628	.get_parent = clk_rcg_get_parent,
629	.set_parent = clk_rcg_set_parent,
630	.recalc_rate = clk_rcg_recalc_rate,
631	.determine_rate = clk_rcg_determine_rate,
632	.set_rate = clk_rcg_set_rate,
633};
634EXPORT_SYMBOL_GPL(clk_rcg_ops);
635
636const struct clk_ops clk_rcg_bypass_ops = {
637	.enable = clk_enable_regmap,
638	.disable = clk_disable_regmap,
639	.get_parent = clk_rcg_get_parent,
640	.set_parent = clk_rcg_set_parent,
641	.recalc_rate = clk_rcg_recalc_rate,
642	.determine_rate = clk_rcg_bypass_determine_rate,
643	.set_rate = clk_rcg_bypass_set_rate,
644};
645EXPORT_SYMBOL_GPL(clk_rcg_bypass_ops);
646
647const struct clk_ops clk_rcg_lcc_ops = {
648	.enable = clk_rcg_lcc_enable,
649	.disable = clk_rcg_lcc_disable,
650	.get_parent = clk_rcg_get_parent,
651	.set_parent = clk_rcg_set_parent,
652	.recalc_rate = clk_rcg_recalc_rate,
653	.determine_rate = clk_rcg_determine_rate,
654	.set_rate = clk_rcg_lcc_set_rate,
655};
656EXPORT_SYMBOL_GPL(clk_rcg_lcc_ops);
657
658const struct clk_ops clk_dyn_rcg_ops = {
659	.enable = clk_enable_regmap,
660	.is_enabled = clk_is_enabled_regmap,
661	.disable = clk_disable_regmap,
662	.get_parent = clk_dyn_rcg_get_parent,
663	.set_parent = clk_dyn_rcg_set_parent,
664	.recalc_rate = clk_dyn_rcg_recalc_rate,
665	.determine_rate = clk_dyn_rcg_determine_rate,
666	.set_rate = clk_dyn_rcg_set_rate,
667	.set_rate_and_parent = clk_dyn_rcg_set_rate_and_parent,
668};
669EXPORT_SYMBOL_GPL(clk_dyn_rcg_ops);
670