Linux Audio

Check our new training course

Loading...
v4.17
 
  1/*
  2 *  Atheros AR71XX/AR724X/AR913X common routines
  3 *
  4 *  Copyright (C) 2010-2011 Jaiganesh Narayanan <jnarayanan@atheros.com>
  5 *  Copyright (C) 2011 Gabor Juhos <juhosg@openwrt.org>
  6 *
  7 *  Parts of this file are based on Atheros' 2.6.15/2.6.31 BSP
  8 *
  9 *  This program is free software; you can redistribute it and/or modify it
 10 *  under the terms of the GNU General Public License version 2 as published
 11 *  by the Free Software Foundation.
 12 */
 13
 14#include <linux/kernel.h>
 15#include <linux/init.h>
 
 16#include <linux/err.h>
 17#include <linux/clk.h>
 18#include <linux/clkdev.h>
 19#include <linux/clk-provider.h>
 20#include <linux/of.h>
 21#include <linux/of_address.h>
 22#include <dt-bindings/clock/ath79-clk.h>
 23
 24#include <asm/div64.h>
 25
 26#include <asm/mach-ath79/ath79.h>
 27#include <asm/mach-ath79/ar71xx_regs.h>
 28#include "common.h"
 29#include "machtypes.h"
 30
 31#define AR71XX_BASE_FREQ	40000000
 32#define AR724X_BASE_FREQ	40000000
 33
 34static struct clk *clks[ATH79_CLK_END];
 35static struct clk_onecell_data clk_data = {
 36	.clks = clks,
 37	.clk_num = ARRAY_SIZE(clks),
 38};
 39
 40static struct clk *__init ath79_add_sys_clkdev(
 41	const char *id, unsigned long rate)
 
 
 
 
 
 
 
 42{
 43	struct clk *clk;
 44	int err;
 
 45
 46	clk = clk_register_fixed_rate(NULL, id, NULL, 0, rate);
 
 47	if (IS_ERR(clk))
 48		panic("failed to allocate %s clock structure", id);
 49
 50	err = clk_register_clkdev(clk, id, NULL);
 51	if (err)
 52		panic("unable to register %s clock device", id);
 53
 
 
 
 
 
 
 
 54	return clk;
 55}
 56
 57static void __init ar71xx_clocks_init(void)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 58{
 59	unsigned long ref_rate;
 60	unsigned long cpu_rate;
 61	unsigned long ddr_rate;
 62	unsigned long ahb_rate;
 63	u32 pll;
 64	u32 freq;
 65	u32 div;
 66
 67	ref_rate = AR71XX_BASE_FREQ;
 68
 69	pll = ath79_pll_rr(AR71XX_PLL_REG_CPU_CONFIG);
 70
 71	div = ((pll >> AR71XX_PLL_FB_SHIFT) & AR71XX_PLL_FB_MASK) + 1;
 72	freq = div * ref_rate;
 73
 74	div = ((pll >> AR71XX_CPU_DIV_SHIFT) & AR71XX_CPU_DIV_MASK) + 1;
 75	cpu_rate = freq / div;
 76
 77	div = ((pll >> AR71XX_DDR_DIV_SHIFT) & AR71XX_DDR_DIV_MASK) + 1;
 78	ddr_rate = freq / div;
 79
 80	div = (((pll >> AR71XX_AHB_DIV_SHIFT) & AR71XX_AHB_DIV_MASK) + 1) * 2;
 81	ahb_rate = cpu_rate / div;
 82
 83	ath79_add_sys_clkdev("ref", ref_rate);
 84	clks[ATH79_CLK_CPU] = ath79_add_sys_clkdev("cpu", cpu_rate);
 85	clks[ATH79_CLK_DDR] = ath79_add_sys_clkdev("ddr", ddr_rate);
 86	clks[ATH79_CLK_AHB] = ath79_add_sys_clkdev("ahb", ahb_rate);
 87
 88	clk_add_alias("wdt", NULL, "ahb", NULL);
 89	clk_add_alias("uart", NULL, "ahb", NULL);
 90}
 91
 92static struct clk * __init ath79_reg_ffclk(const char *name,
 93		const char *parent_name, unsigned int mult, unsigned int div)
 94{
 95	struct clk *clk;
 96
 97	clk = clk_register_fixed_factor(NULL, name, parent_name, 0, mult, div);
 98	if (IS_ERR(clk))
 99		panic("failed to allocate %s clock structure", name);
100
101	return clk;
102}
103
104static void __init ar724x_clk_init(struct clk *ref_clk, void __iomem *pll_base)
105{
106	u32 pll;
107	u32 mult, div, ddr_div, ahb_div;
 
 
 
108
109	pll = __raw_readl(pll_base + AR724X_PLL_REG_CPU_CONFIG);
110
111	mult = ((pll >> AR724X_PLL_FB_SHIFT) & AR724X_PLL_FB_MASK);
112	div = ((pll >> AR724X_PLL_REF_DIV_SHIFT) & AR724X_PLL_REF_DIV_MASK) * 2;
113
114	ddr_div = ((pll >> AR724X_DDR_DIV_SHIFT) & AR724X_DDR_DIV_MASK) + 1;
115	ahb_div = (((pll >> AR724X_AHB_DIV_SHIFT) & AR724X_AHB_DIV_MASK) + 1) * 2;
116
117	clks[ATH79_CLK_CPU] = ath79_reg_ffclk("cpu", "ref", mult, div);
118	clks[ATH79_CLK_DDR] = ath79_reg_ffclk("ddr", "ref", mult, div * ddr_div);
119	clks[ATH79_CLK_AHB] = ath79_reg_ffclk("ahb", "ref", mult, div * ahb_div);
120}
121
122static void __init ar724x_clocks_init(void)
123{
124	struct clk *ref_clk;
125
126	ref_clk = ath79_add_sys_clkdev("ref", AR724X_BASE_FREQ);
127
128	ar724x_clk_init(ref_clk, ath79_pll_base);
129
130	/* just make happy plat_time_init() from arch/mips/ath79/setup.c */
131	clk_register_clkdev(clks[ATH79_CLK_CPU], "cpu", NULL);
132	clk_register_clkdev(clks[ATH79_CLK_DDR], "ddr", NULL);
133	clk_register_clkdev(clks[ATH79_CLK_AHB], "ahb", NULL);
134
135	clk_add_alias("wdt", NULL, "ahb", NULL);
136	clk_add_alias("uart", NULL, "ahb", NULL);
137}
138
139static void __init ar9330_clk_init(struct clk *ref_clk, void __iomem *pll_base)
140{
 
141	u32 clock_ctrl;
142	u32 ref_div;
143	u32 ninit_mul;
144	u32 out_div;
145
146	u32 cpu_div;
147	u32 ddr_div;
148	u32 ahb_div;
 
 
 
 
 
 
 
 
 
149
150	clock_ctrl = __raw_readl(pll_base + AR933X_PLL_CLOCK_CTRL_REG);
151	if (clock_ctrl & AR933X_PLL_CLOCK_CTRL_BYPASS) {
152		ref_div = 1;
153		ninit_mul = 1;
154		out_div = 1;
155
156		cpu_div = 1;
157		ddr_div = 1;
158		ahb_div = 1;
159	} else {
160		u32 cpu_config;
161		u32 t;
162
163		cpu_config = __raw_readl(pll_base + AR933X_PLL_CPU_CONFIG_REG);
164
165		t = (cpu_config >> AR933X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
166		    AR933X_PLL_CPU_CONFIG_REFDIV_MASK;
167		ref_div = t;
168
169		ninit_mul = (cpu_config >> AR933X_PLL_CPU_CONFIG_NINT_SHIFT) &
170		    AR933X_PLL_CPU_CONFIG_NINT_MASK;
171
172		t = (cpu_config >> AR933X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
173		    AR933X_PLL_CPU_CONFIG_OUTDIV_MASK;
174		if (t == 0)
175			t = 1;
176
177		out_div = (1 << t);
178
179		cpu_div = ((clock_ctrl >> AR933X_PLL_CLOCK_CTRL_CPU_DIV_SHIFT) &
180		     AR933X_PLL_CLOCK_CTRL_CPU_DIV_MASK) + 1;
181
182		ddr_div = ((clock_ctrl >> AR933X_PLL_CLOCK_CTRL_DDR_DIV_SHIFT) &
183		      AR933X_PLL_CLOCK_CTRL_DDR_DIV_MASK) + 1;
184
185		ahb_div = ((clock_ctrl >> AR933X_PLL_CLOCK_CTRL_AHB_DIV_SHIFT) &
186		     AR933X_PLL_CLOCK_CTRL_AHB_DIV_MASK) + 1;
187	}
188
189	clks[ATH79_CLK_CPU] = ath79_reg_ffclk("cpu", "ref",
190					ninit_mul, ref_div * out_div * cpu_div);
191	clks[ATH79_CLK_DDR] = ath79_reg_ffclk("ddr", "ref",
192					ninit_mul, ref_div * out_div * ddr_div);
193	clks[ATH79_CLK_AHB] = ath79_reg_ffclk("ahb", "ref",
194					ninit_mul, ref_div * out_div * ahb_div);
195}
196
197static void __init ar933x_clocks_init(void)
198{
199	struct clk *ref_clk;
200	unsigned long ref_rate;
201	u32 t;
202
203	t = ath79_reset_rr(AR933X_RESET_REG_BOOTSTRAP);
204	if (t & AR933X_BOOTSTRAP_REF_CLK_40)
205		ref_rate = (40 * 1000 * 1000);
206	else
207		ref_rate = (25 * 1000 * 1000);
208
209	ref_clk = ath79_add_sys_clkdev("ref", ref_rate);
210
211	ar9330_clk_init(ref_clk, ath79_pll_base);
212
213	/* just make happy plat_time_init() from arch/mips/ath79/setup.c */
214	clk_register_clkdev(clks[ATH79_CLK_CPU], "cpu", NULL);
215	clk_register_clkdev(clks[ATH79_CLK_DDR], "ddr", NULL);
216	clk_register_clkdev(clks[ATH79_CLK_AHB], "ahb", NULL);
217
218	clk_add_alias("wdt", NULL, "ahb", NULL);
219	clk_add_alias("uart", NULL, "ref", NULL);
220}
221
222static u32 __init ar934x_get_pll_freq(u32 ref, u32 ref_div, u32 nint, u32 nfrac,
223				      u32 frac, u32 out_div)
224{
225	u64 t;
226	u32 ret;
227
228	t = ref;
229	t *= nint;
230	do_div(t, ref_div);
231	ret = t;
232
233	t = ref;
234	t *= nfrac;
235	do_div(t, ref_div * frac);
236	ret += t;
237
238	ret /= (1 << out_div);
239	return ret;
240}
241
242static void __init ar934x_clocks_init(void)
243{
244	unsigned long ref_rate;
245	unsigned long cpu_rate;
246	unsigned long ddr_rate;
247	unsigned long ahb_rate;
248	u32 pll, out_div, ref_div, nint, nfrac, frac, clk_ctrl, postdiv;
249	u32 cpu_pll, ddr_pll;
250	u32 bootstrap;
251	void __iomem *dpll_base;
252
253	dpll_base = ioremap(AR934X_SRIF_BASE, AR934X_SRIF_SIZE);
254
255	bootstrap = ath79_reset_rr(AR934X_RESET_REG_BOOTSTRAP);
256	if (bootstrap & AR934X_BOOTSTRAP_REF_CLK_40)
257		ref_rate = 40 * 1000 * 1000;
258	else
259		ref_rate = 25 * 1000 * 1000;
260
 
 
261	pll = __raw_readl(dpll_base + AR934X_SRIF_CPU_DPLL2_REG);
262	if (pll & AR934X_SRIF_DPLL2_LOCAL_PLL) {
263		out_div = (pll >> AR934X_SRIF_DPLL2_OUTDIV_SHIFT) &
264			  AR934X_SRIF_DPLL2_OUTDIV_MASK;
265		pll = __raw_readl(dpll_base + AR934X_SRIF_CPU_DPLL1_REG);
266		nint = (pll >> AR934X_SRIF_DPLL1_NINT_SHIFT) &
267		       AR934X_SRIF_DPLL1_NINT_MASK;
268		nfrac = pll & AR934X_SRIF_DPLL1_NFRAC_MASK;
269		ref_div = (pll >> AR934X_SRIF_DPLL1_REFDIV_SHIFT) &
270			  AR934X_SRIF_DPLL1_REFDIV_MASK;
271		frac = 1 << 18;
272	} else {
273		pll = ath79_pll_rr(AR934X_PLL_CPU_CONFIG_REG);
274		out_div = (pll >> AR934X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
275			AR934X_PLL_CPU_CONFIG_OUTDIV_MASK;
276		ref_div = (pll >> AR934X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
277			  AR934X_PLL_CPU_CONFIG_REFDIV_MASK;
278		nint = (pll >> AR934X_PLL_CPU_CONFIG_NINT_SHIFT) &
279		       AR934X_PLL_CPU_CONFIG_NINT_MASK;
280		nfrac = (pll >> AR934X_PLL_CPU_CONFIG_NFRAC_SHIFT) &
281			AR934X_PLL_CPU_CONFIG_NFRAC_MASK;
282		frac = 1 << 6;
283	}
284
285	cpu_pll = ar934x_get_pll_freq(ref_rate, ref_div, nint,
286				      nfrac, frac, out_div);
287
288	pll = __raw_readl(dpll_base + AR934X_SRIF_DDR_DPLL2_REG);
289	if (pll & AR934X_SRIF_DPLL2_LOCAL_PLL) {
290		out_div = (pll >> AR934X_SRIF_DPLL2_OUTDIV_SHIFT) &
291			  AR934X_SRIF_DPLL2_OUTDIV_MASK;
292		pll = __raw_readl(dpll_base + AR934X_SRIF_DDR_DPLL1_REG);
293		nint = (pll >> AR934X_SRIF_DPLL1_NINT_SHIFT) &
294		       AR934X_SRIF_DPLL1_NINT_MASK;
295		nfrac = pll & AR934X_SRIF_DPLL1_NFRAC_MASK;
296		ref_div = (pll >> AR934X_SRIF_DPLL1_REFDIV_SHIFT) &
297			  AR934X_SRIF_DPLL1_REFDIV_MASK;
298		frac = 1 << 18;
299	} else {
300		pll = ath79_pll_rr(AR934X_PLL_DDR_CONFIG_REG);
301		out_div = (pll >> AR934X_PLL_DDR_CONFIG_OUTDIV_SHIFT) &
302			  AR934X_PLL_DDR_CONFIG_OUTDIV_MASK;
303		ref_div = (pll >> AR934X_PLL_DDR_CONFIG_REFDIV_SHIFT) &
304			   AR934X_PLL_DDR_CONFIG_REFDIV_MASK;
305		nint = (pll >> AR934X_PLL_DDR_CONFIG_NINT_SHIFT) &
306		       AR934X_PLL_DDR_CONFIG_NINT_MASK;
307		nfrac = (pll >> AR934X_PLL_DDR_CONFIG_NFRAC_SHIFT) &
308			AR934X_PLL_DDR_CONFIG_NFRAC_MASK;
309		frac = 1 << 10;
310	}
311
312	ddr_pll = ar934x_get_pll_freq(ref_rate, ref_div, nint,
313				      nfrac, frac, out_div);
314
315	clk_ctrl = ath79_pll_rr(AR934X_PLL_CPU_DDR_CLK_CTRL_REG);
316
317	postdiv = (clk_ctrl >> AR934X_PLL_CPU_DDR_CLK_CTRL_CPU_POST_DIV_SHIFT) &
318		  AR934X_PLL_CPU_DDR_CLK_CTRL_CPU_POST_DIV_MASK;
319
320	if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_CPU_PLL_BYPASS)
321		cpu_rate = ref_rate;
322	else if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_CPUCLK_FROM_CPUPLL)
323		cpu_rate = cpu_pll / (postdiv + 1);
324	else
325		cpu_rate = ddr_pll / (postdiv + 1);
326
327	postdiv = (clk_ctrl >> AR934X_PLL_CPU_DDR_CLK_CTRL_DDR_POST_DIV_SHIFT) &
328		  AR934X_PLL_CPU_DDR_CLK_CTRL_DDR_POST_DIV_MASK;
329
330	if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_DDR_PLL_BYPASS)
331		ddr_rate = ref_rate;
332	else if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_DDRCLK_FROM_DDRPLL)
333		ddr_rate = ddr_pll / (postdiv + 1);
334	else
335		ddr_rate = cpu_pll / (postdiv + 1);
336
337	postdiv = (clk_ctrl >> AR934X_PLL_CPU_DDR_CLK_CTRL_AHB_POST_DIV_SHIFT) &
338		  AR934X_PLL_CPU_DDR_CLK_CTRL_AHB_POST_DIV_MASK;
339
340	if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_AHB_PLL_BYPASS)
341		ahb_rate = ref_rate;
342	else if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_AHBCLK_FROM_DDRPLL)
343		ahb_rate = ddr_pll / (postdiv + 1);
344	else
345		ahb_rate = cpu_pll / (postdiv + 1);
346
347	ath79_add_sys_clkdev("ref", ref_rate);
348	clks[ATH79_CLK_CPU] = ath79_add_sys_clkdev("cpu", cpu_rate);
349	clks[ATH79_CLK_DDR] = ath79_add_sys_clkdev("ddr", ddr_rate);
350	clks[ATH79_CLK_AHB] = ath79_add_sys_clkdev("ahb", ahb_rate);
351
352	clk_add_alias("wdt", NULL, "ref", NULL);
353	clk_add_alias("uart", NULL, "ref", NULL);
354
355	iounmap(dpll_base);
356}
357
358static void __init qca955x_clocks_init(void)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
359{
360	unsigned long ref_rate;
361	unsigned long cpu_rate;
362	unsigned long ddr_rate;
363	unsigned long ahb_rate;
364	u32 pll, out_div, ref_div, nint, frac, clk_ctrl, postdiv;
365	u32 cpu_pll, ddr_pll;
366	u32 bootstrap;
367
368	bootstrap = ath79_reset_rr(QCA955X_RESET_REG_BOOTSTRAP);
369	if (bootstrap &	QCA955X_BOOTSTRAP_REF_CLK_40)
370		ref_rate = 40 * 1000 * 1000;
371	else
372		ref_rate = 25 * 1000 * 1000;
373
374	pll = ath79_pll_rr(QCA955X_PLL_CPU_CONFIG_REG);
 
 
375	out_div = (pll >> QCA955X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
376		  QCA955X_PLL_CPU_CONFIG_OUTDIV_MASK;
377	ref_div = (pll >> QCA955X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
378		  QCA955X_PLL_CPU_CONFIG_REFDIV_MASK;
379	nint = (pll >> QCA955X_PLL_CPU_CONFIG_NINT_SHIFT) &
380	       QCA955X_PLL_CPU_CONFIG_NINT_MASK;
381	frac = (pll >> QCA955X_PLL_CPU_CONFIG_NFRAC_SHIFT) &
382	       QCA955X_PLL_CPU_CONFIG_NFRAC_MASK;
383
384	cpu_pll = nint * ref_rate / ref_div;
385	cpu_pll += frac * ref_rate / (ref_div * (1 << 6));
386	cpu_pll /= (1 << out_div);
387
388	pll = ath79_pll_rr(QCA955X_PLL_DDR_CONFIG_REG);
389	out_div = (pll >> QCA955X_PLL_DDR_CONFIG_OUTDIV_SHIFT) &
390		  QCA955X_PLL_DDR_CONFIG_OUTDIV_MASK;
391	ref_div = (pll >> QCA955X_PLL_DDR_CONFIG_REFDIV_SHIFT) &
392		  QCA955X_PLL_DDR_CONFIG_REFDIV_MASK;
393	nint = (pll >> QCA955X_PLL_DDR_CONFIG_NINT_SHIFT) &
394	       QCA955X_PLL_DDR_CONFIG_NINT_MASK;
395	frac = (pll >> QCA955X_PLL_DDR_CONFIG_NFRAC_SHIFT) &
396	       QCA955X_PLL_DDR_CONFIG_NFRAC_MASK;
397
398	ddr_pll = nint * ref_rate / ref_div;
399	ddr_pll += frac * ref_rate / (ref_div * (1 << 10));
400	ddr_pll /= (1 << out_div);
401
402	clk_ctrl = ath79_pll_rr(QCA955X_PLL_CLK_CTRL_REG);
403
404	postdiv = (clk_ctrl >> QCA955X_PLL_CLK_CTRL_CPU_POST_DIV_SHIFT) &
405		  QCA955X_PLL_CLK_CTRL_CPU_POST_DIV_MASK;
406
407	if (clk_ctrl & QCA955X_PLL_CLK_CTRL_CPU_PLL_BYPASS)
408		cpu_rate = ref_rate;
409	else if (clk_ctrl & QCA955X_PLL_CLK_CTRL_CPUCLK_FROM_CPUPLL)
410		cpu_rate = ddr_pll / (postdiv + 1);
411	else
412		cpu_rate = cpu_pll / (postdiv + 1);
413
414	postdiv = (clk_ctrl >> QCA955X_PLL_CLK_CTRL_DDR_POST_DIV_SHIFT) &
415		  QCA955X_PLL_CLK_CTRL_DDR_POST_DIV_MASK;
416
417	if (clk_ctrl & QCA955X_PLL_CLK_CTRL_DDR_PLL_BYPASS)
418		ddr_rate = ref_rate;
419	else if (clk_ctrl & QCA955X_PLL_CLK_CTRL_DDRCLK_FROM_DDRPLL)
420		ddr_rate = cpu_pll / (postdiv + 1);
421	else
422		ddr_rate = ddr_pll / (postdiv + 1);
423
424	postdiv = (clk_ctrl >> QCA955X_PLL_CLK_CTRL_AHB_POST_DIV_SHIFT) &
425		  QCA955X_PLL_CLK_CTRL_AHB_POST_DIV_MASK;
426
427	if (clk_ctrl & QCA955X_PLL_CLK_CTRL_AHB_PLL_BYPASS)
428		ahb_rate = ref_rate;
429	else if (clk_ctrl & QCA955X_PLL_CLK_CTRL_AHBCLK_FROM_DDRPLL)
430		ahb_rate = ddr_pll / (postdiv + 1);
431	else
432		ahb_rate = cpu_pll / (postdiv + 1);
433
434	ath79_add_sys_clkdev("ref", ref_rate);
435	clks[ATH79_CLK_CPU] = ath79_add_sys_clkdev("cpu", cpu_rate);
436	clks[ATH79_CLK_DDR] = ath79_add_sys_clkdev("ddr", ddr_rate);
437	clks[ATH79_CLK_AHB] = ath79_add_sys_clkdev("ahb", ahb_rate);
438
439	clk_add_alias("wdt", NULL, "ref", NULL);
440	clk_add_alias("uart", NULL, "ref", NULL);
441}
442
443void __init ath79_clocks_init(void)
444{
445	if (soc_is_ar71xx())
446		ar71xx_clocks_init();
447	else if (soc_is_ar724x() || soc_is_ar913x())
448		ar724x_clocks_init();
449	else if (soc_is_ar933x())
450		ar933x_clocks_init();
451	else if (soc_is_ar934x())
452		ar934x_clocks_init();
453	else if (soc_is_qca955x())
454		qca955x_clocks_init();
 
 
 
 
 
 
 
 
 
 
 
455	else
456		BUG();
457}
458
459unsigned long __init
460ath79_get_sys_clk_rate(const char *id)
461{
462	struct clk *clk;
463	unsigned long rate;
464
465	clk = clk_get(NULL, id);
466	if (IS_ERR(clk))
467		panic("unable to get %s clock, err=%d", id, (int) PTR_ERR(clk));
 
 
 
 
 
 
 
 
 
 
468
469	rate = clk_get_rate(clk);
470	clk_put(clk);
 
 
471
472	return rate;
473}
 
 
 
 
 
 
 
 
 
 
474
475#ifdef CONFIG_OF
476static void __init ath79_clocks_init_dt(struct device_node *np)
477{
478	of_clk_add_provider(np, of_clk_src_onecell_get, &clk_data);
479}
 
 
 
 
 
 
 
 
 
 
 
480
481CLK_OF_DECLARE(ar7100, "qca,ar7100-pll", ath79_clocks_init_dt);
482CLK_OF_DECLARE(ar7240, "qca,ar7240-pll", ath79_clocks_init_dt);
483CLK_OF_DECLARE(ar9340, "qca,ar9340-pll", ath79_clocks_init_dt);
484CLK_OF_DECLARE(ar9550, "qca,qca9550-pll", ath79_clocks_init_dt);
485
486static void __init ath79_clocks_init_dt_ng(struct device_node *np)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
487{
488	struct clk *ref_clk;
489	void __iomem *pll_base;
490
491	ref_clk = of_clk_get(np, 0);
492	if (IS_ERR(ref_clk)) {
493		pr_err("%pOF: of_clk_get failed\n", np);
494		goto err;
495	}
496
497	pll_base = of_iomap(np, 0);
498	if (!pll_base) {
499		pr_err("%pOF: can't map pll registers\n", np);
500		goto err_clk;
501	}
502
503	if (of_device_is_compatible(np, "qca,ar9130-pll"))
504		ar724x_clk_init(ref_clk, pll_base);
 
 
 
505	else if (of_device_is_compatible(np, "qca,ar9330-pll"))
506		ar9330_clk_init(ref_clk, pll_base);
507	else {
508		pr_err("%pOF: could not find any appropriate clk_init()\n", np);
509		goto err_iounmap;
510	}
 
 
 
 
 
 
 
511
512	if (of_clk_add_provider(np, of_clk_src_onecell_get, &clk_data)) {
513		pr_err("%pOF: could not register clk provider\n", np);
514		goto err_iounmap;
515	}
516
517	return;
518
519err_iounmap:
520	iounmap(pll_base);
521
522err_clk:
523	clk_put(ref_clk);
524
525err:
526	return;
527}
528CLK_OF_DECLARE(ar9130_clk, "qca,ar9130-pll", ath79_clocks_init_dt_ng);
529CLK_OF_DECLARE(ar9330_clk, "qca,ar9330-pll", ath79_clocks_init_dt_ng);
530#endif
 
 
 
 
 
 
v6.2
  1// SPDX-License-Identifier: GPL-2.0-only
  2/*
  3 *  Atheros AR71XX/AR724X/AR913X common routines
  4 *
  5 *  Copyright (C) 2010-2011 Jaiganesh Narayanan <jnarayanan@atheros.com>
  6 *  Copyright (C) 2011 Gabor Juhos <juhosg@openwrt.org>
  7 *
  8 *  Parts of this file are based on Atheros' 2.6.15/2.6.31 BSP
 
 
 
 
  9 */
 10
 11#include <linux/kernel.h>
 12#include <linux/init.h>
 13#include <linux/io.h>
 14#include <linux/err.h>
 15#include <linux/clk.h>
 16#include <linux/clkdev.h>
 17#include <linux/clk-provider.h>
 18#include <linux/of.h>
 19#include <linux/of_address.h>
 20#include <dt-bindings/clock/ath79-clk.h>
 21
 22#include <asm/div64.h>
 23
 24#include <asm/mach-ath79/ath79.h>
 25#include <asm/mach-ath79/ar71xx_regs.h>
 26#include "common.h"
 
 27
 28#define AR71XX_BASE_FREQ	40000000
 29#define AR724X_BASE_FREQ	40000000
 30
 31static struct clk *clks[ATH79_CLK_END];
 32static struct clk_onecell_data clk_data = {
 33	.clks = clks,
 34	.clk_num = ARRAY_SIZE(clks),
 35};
 36
 37static const char * const clk_names[ATH79_CLK_END] = {
 38	[ATH79_CLK_CPU] = "cpu",
 39	[ATH79_CLK_DDR] = "ddr",
 40	[ATH79_CLK_AHB] = "ahb",
 41	[ATH79_CLK_REF] = "ref",
 42	[ATH79_CLK_MDIO] = "mdio",
 43};
 44
 45static const char * __init ath79_clk_name(int type)
 46{
 47	BUG_ON(type >= ARRAY_SIZE(clk_names) || !clk_names[type]);
 48	return clk_names[type];
 49}
 50
 51static void __init __ath79_set_clk(int type, const char *name, struct clk *clk)
 52{
 53	if (IS_ERR(clk))
 54		panic("failed to allocate %s clock structure", clk_names[type]);
 55
 56	clks[type] = clk;
 57	clk_register_clkdev(clk, name, NULL);
 58}
 59
 60static struct clk * __init ath79_set_clk(int type, unsigned long rate)
 61{
 62	const char *name = ath79_clk_name(type);
 63	struct clk *clk;
 64
 65	clk = clk_register_fixed_rate(NULL, name, NULL, 0, rate);
 66	__ath79_set_clk(type, name, clk);
 67	return clk;
 68}
 69
 70static struct clk * __init ath79_set_ff_clk(int type, const char *parent,
 71					    unsigned int mult, unsigned int div)
 72{
 73	const char *name = ath79_clk_name(type);
 74	struct clk *clk;
 75
 76	clk = clk_register_fixed_factor(NULL, name, parent, 0, mult, div);
 77	__ath79_set_clk(type, name, clk);
 78	return clk;
 79}
 80
 81static unsigned long __init ath79_setup_ref_clk(unsigned long rate)
 82{
 83	struct clk *clk = clks[ATH79_CLK_REF];
 84
 85	if (clk)
 86		rate = clk_get_rate(clk);
 87	else
 88		clk = ath79_set_clk(ATH79_CLK_REF, rate);
 89
 90	return rate;
 91}
 92
 93static void __init ar71xx_clocks_init(void __iomem *pll_base)
 94{
 95	unsigned long ref_rate;
 96	unsigned long cpu_rate;
 97	unsigned long ddr_rate;
 98	unsigned long ahb_rate;
 99	u32 pll;
100	u32 freq;
101	u32 div;
102
103	ref_rate = ath79_setup_ref_clk(AR71XX_BASE_FREQ);
104
105	pll = __raw_readl(pll_base + AR71XX_PLL_REG_CPU_CONFIG);
106
107	div = ((pll >> AR71XX_PLL_FB_SHIFT) & AR71XX_PLL_FB_MASK) + 1;
108	freq = div * ref_rate;
109
110	div = ((pll >> AR71XX_CPU_DIV_SHIFT) & AR71XX_CPU_DIV_MASK) + 1;
111	cpu_rate = freq / div;
112
113	div = ((pll >> AR71XX_DDR_DIV_SHIFT) & AR71XX_DDR_DIV_MASK) + 1;
114	ddr_rate = freq / div;
115
116	div = (((pll >> AR71XX_AHB_DIV_SHIFT) & AR71XX_AHB_DIV_MASK) + 1) * 2;
117	ahb_rate = cpu_rate / div;
118
119	ath79_set_clk(ATH79_CLK_CPU, cpu_rate);
120	ath79_set_clk(ATH79_CLK_DDR, ddr_rate);
121	ath79_set_clk(ATH79_CLK_AHB, ahb_rate);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
122}
123
124static void __init ar724x_clocks_init(void __iomem *pll_base)
125{
 
126	u32 mult, div, ddr_div, ahb_div;
127	u32 pll;
128
129	ath79_setup_ref_clk(AR71XX_BASE_FREQ);
130
131	pll = __raw_readl(pll_base + AR724X_PLL_REG_CPU_CONFIG);
132
133	mult = ((pll >> AR724X_PLL_FB_SHIFT) & AR724X_PLL_FB_MASK);
134	div = ((pll >> AR724X_PLL_REF_DIV_SHIFT) & AR724X_PLL_REF_DIV_MASK) * 2;
135
136	ddr_div = ((pll >> AR724X_DDR_DIV_SHIFT) & AR724X_DDR_DIV_MASK) + 1;
137	ahb_div = (((pll >> AR724X_AHB_DIV_SHIFT) & AR724X_AHB_DIV_MASK) + 1) * 2;
138
139	ath79_set_ff_clk(ATH79_CLK_CPU, "ref", mult, div);
140	ath79_set_ff_clk(ATH79_CLK_DDR, "ref", mult, div * ddr_div);
141	ath79_set_ff_clk(ATH79_CLK_AHB, "ref", mult, div * ahb_div);
142}
143
144static void __init ar933x_clocks_init(void __iomem *pll_base)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
145{
146	unsigned long ref_rate;
147	u32 clock_ctrl;
148	u32 ref_div;
149	u32 ninit_mul;
150	u32 out_div;
151
152	u32 cpu_div;
153	u32 ddr_div;
154	u32 ahb_div;
155	u32 t;
156
157	t = ath79_reset_rr(AR933X_RESET_REG_BOOTSTRAP);
158	if (t & AR933X_BOOTSTRAP_REF_CLK_40)
159		ref_rate = (40 * 1000 * 1000);
160	else
161		ref_rate = (25 * 1000 * 1000);
162
163	ath79_setup_ref_clk(ref_rate);
164
165	clock_ctrl = __raw_readl(pll_base + AR933X_PLL_CLOCK_CTRL_REG);
166	if (clock_ctrl & AR933X_PLL_CLOCK_CTRL_BYPASS) {
167		ref_div = 1;
168		ninit_mul = 1;
169		out_div = 1;
170
171		cpu_div = 1;
172		ddr_div = 1;
173		ahb_div = 1;
174	} else {
175		u32 cpu_config;
176		u32 t;
177
178		cpu_config = __raw_readl(pll_base + AR933X_PLL_CPU_CONFIG_REG);
179
180		t = (cpu_config >> AR933X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
181		    AR933X_PLL_CPU_CONFIG_REFDIV_MASK;
182		ref_div = t;
183
184		ninit_mul = (cpu_config >> AR933X_PLL_CPU_CONFIG_NINT_SHIFT) &
185		    AR933X_PLL_CPU_CONFIG_NINT_MASK;
186
187		t = (cpu_config >> AR933X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
188		    AR933X_PLL_CPU_CONFIG_OUTDIV_MASK;
189		if (t == 0)
190			t = 1;
191
192		out_div = (1 << t);
193
194		cpu_div = ((clock_ctrl >> AR933X_PLL_CLOCK_CTRL_CPU_DIV_SHIFT) &
195		     AR933X_PLL_CLOCK_CTRL_CPU_DIV_MASK) + 1;
196
197		ddr_div = ((clock_ctrl >> AR933X_PLL_CLOCK_CTRL_DDR_DIV_SHIFT) &
198		      AR933X_PLL_CLOCK_CTRL_DDR_DIV_MASK) + 1;
199
200		ahb_div = ((clock_ctrl >> AR933X_PLL_CLOCK_CTRL_AHB_DIV_SHIFT) &
201		     AR933X_PLL_CLOCK_CTRL_AHB_DIV_MASK) + 1;
202	}
203
204	ath79_set_ff_clk(ATH79_CLK_CPU, "ref", ninit_mul,
205			 ref_div * out_div * cpu_div);
206	ath79_set_ff_clk(ATH79_CLK_DDR, "ref", ninit_mul,
207			 ref_div * out_div * ddr_div);
208	ath79_set_ff_clk(ATH79_CLK_AHB, "ref", ninit_mul,
209			 ref_div * out_div * ahb_div);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
210}
211
212static u32 __init ar934x_get_pll_freq(u32 ref, u32 ref_div, u32 nint, u32 nfrac,
213				      u32 frac, u32 out_div)
214{
215	u64 t;
216	u32 ret;
217
218	t = ref;
219	t *= nint;
220	do_div(t, ref_div);
221	ret = t;
222
223	t = ref;
224	t *= nfrac;
225	do_div(t, ref_div * frac);
226	ret += t;
227
228	ret /= (1 << out_div);
229	return ret;
230}
231
232static void __init ar934x_clocks_init(void __iomem *pll_base)
233{
234	unsigned long ref_rate;
235	unsigned long cpu_rate;
236	unsigned long ddr_rate;
237	unsigned long ahb_rate;
238	u32 pll, out_div, ref_div, nint, nfrac, frac, clk_ctrl, postdiv;
239	u32 cpu_pll, ddr_pll;
240	u32 bootstrap;
241	void __iomem *dpll_base;
242
243	dpll_base = ioremap(AR934X_SRIF_BASE, AR934X_SRIF_SIZE);
244
245	bootstrap = ath79_reset_rr(AR934X_RESET_REG_BOOTSTRAP);
246	if (bootstrap & AR934X_BOOTSTRAP_REF_CLK_40)
247		ref_rate = 40 * 1000 * 1000;
248	else
249		ref_rate = 25 * 1000 * 1000;
250
251	ref_rate = ath79_setup_ref_clk(ref_rate);
252
253	pll = __raw_readl(dpll_base + AR934X_SRIF_CPU_DPLL2_REG);
254	if (pll & AR934X_SRIF_DPLL2_LOCAL_PLL) {
255		out_div = (pll >> AR934X_SRIF_DPLL2_OUTDIV_SHIFT) &
256			  AR934X_SRIF_DPLL2_OUTDIV_MASK;
257		pll = __raw_readl(dpll_base + AR934X_SRIF_CPU_DPLL1_REG);
258		nint = (pll >> AR934X_SRIF_DPLL1_NINT_SHIFT) &
259		       AR934X_SRIF_DPLL1_NINT_MASK;
260		nfrac = pll & AR934X_SRIF_DPLL1_NFRAC_MASK;
261		ref_div = (pll >> AR934X_SRIF_DPLL1_REFDIV_SHIFT) &
262			  AR934X_SRIF_DPLL1_REFDIV_MASK;
263		frac = 1 << 18;
264	} else {
265		pll = __raw_readl(pll_base + AR934X_PLL_CPU_CONFIG_REG);
266		out_div = (pll >> AR934X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
267			AR934X_PLL_CPU_CONFIG_OUTDIV_MASK;
268		ref_div = (pll >> AR934X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
269			  AR934X_PLL_CPU_CONFIG_REFDIV_MASK;
270		nint = (pll >> AR934X_PLL_CPU_CONFIG_NINT_SHIFT) &
271		       AR934X_PLL_CPU_CONFIG_NINT_MASK;
272		nfrac = (pll >> AR934X_PLL_CPU_CONFIG_NFRAC_SHIFT) &
273			AR934X_PLL_CPU_CONFIG_NFRAC_MASK;
274		frac = 1 << 6;
275	}
276
277	cpu_pll = ar934x_get_pll_freq(ref_rate, ref_div, nint,
278				      nfrac, frac, out_div);
279
280	pll = __raw_readl(dpll_base + AR934X_SRIF_DDR_DPLL2_REG);
281	if (pll & AR934X_SRIF_DPLL2_LOCAL_PLL) {
282		out_div = (pll >> AR934X_SRIF_DPLL2_OUTDIV_SHIFT) &
283			  AR934X_SRIF_DPLL2_OUTDIV_MASK;
284		pll = __raw_readl(dpll_base + AR934X_SRIF_DDR_DPLL1_REG);
285		nint = (pll >> AR934X_SRIF_DPLL1_NINT_SHIFT) &
286		       AR934X_SRIF_DPLL1_NINT_MASK;
287		nfrac = pll & AR934X_SRIF_DPLL1_NFRAC_MASK;
288		ref_div = (pll >> AR934X_SRIF_DPLL1_REFDIV_SHIFT) &
289			  AR934X_SRIF_DPLL1_REFDIV_MASK;
290		frac = 1 << 18;
291	} else {
292		pll = __raw_readl(pll_base + AR934X_PLL_DDR_CONFIG_REG);
293		out_div = (pll >> AR934X_PLL_DDR_CONFIG_OUTDIV_SHIFT) &
294			  AR934X_PLL_DDR_CONFIG_OUTDIV_MASK;
295		ref_div = (pll >> AR934X_PLL_DDR_CONFIG_REFDIV_SHIFT) &
296			   AR934X_PLL_DDR_CONFIG_REFDIV_MASK;
297		nint = (pll >> AR934X_PLL_DDR_CONFIG_NINT_SHIFT) &
298		       AR934X_PLL_DDR_CONFIG_NINT_MASK;
299		nfrac = (pll >> AR934X_PLL_DDR_CONFIG_NFRAC_SHIFT) &
300			AR934X_PLL_DDR_CONFIG_NFRAC_MASK;
301		frac = 1 << 10;
302	}
303
304	ddr_pll = ar934x_get_pll_freq(ref_rate, ref_div, nint,
305				      nfrac, frac, out_div);
306
307	clk_ctrl = __raw_readl(pll_base + AR934X_PLL_CPU_DDR_CLK_CTRL_REG);
308
309	postdiv = (clk_ctrl >> AR934X_PLL_CPU_DDR_CLK_CTRL_CPU_POST_DIV_SHIFT) &
310		  AR934X_PLL_CPU_DDR_CLK_CTRL_CPU_POST_DIV_MASK;
311
312	if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_CPU_PLL_BYPASS)
313		cpu_rate = ref_rate;
314	else if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_CPUCLK_FROM_CPUPLL)
315		cpu_rate = cpu_pll / (postdiv + 1);
316	else
317		cpu_rate = ddr_pll / (postdiv + 1);
318
319	postdiv = (clk_ctrl >> AR934X_PLL_CPU_DDR_CLK_CTRL_DDR_POST_DIV_SHIFT) &
320		  AR934X_PLL_CPU_DDR_CLK_CTRL_DDR_POST_DIV_MASK;
321
322	if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_DDR_PLL_BYPASS)
323		ddr_rate = ref_rate;
324	else if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_DDRCLK_FROM_DDRPLL)
325		ddr_rate = ddr_pll / (postdiv + 1);
326	else
327		ddr_rate = cpu_pll / (postdiv + 1);
328
329	postdiv = (clk_ctrl >> AR934X_PLL_CPU_DDR_CLK_CTRL_AHB_POST_DIV_SHIFT) &
330		  AR934X_PLL_CPU_DDR_CLK_CTRL_AHB_POST_DIV_MASK;
331
332	if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_AHB_PLL_BYPASS)
333		ahb_rate = ref_rate;
334	else if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_AHBCLK_FROM_DDRPLL)
335		ahb_rate = ddr_pll / (postdiv + 1);
336	else
337		ahb_rate = cpu_pll / (postdiv + 1);
338
339	ath79_set_clk(ATH79_CLK_CPU, cpu_rate);
340	ath79_set_clk(ATH79_CLK_DDR, ddr_rate);
341	ath79_set_clk(ATH79_CLK_AHB, ahb_rate);
342
343	clk_ctrl = __raw_readl(pll_base + AR934X_PLL_SWITCH_CLOCK_CONTROL_REG);
344	if (clk_ctrl & AR934X_PLL_SWITCH_CLOCK_CONTROL_MDIO_CLK_SEL)
345		ath79_set_clk(ATH79_CLK_MDIO, 100 * 1000 * 1000);
346
347	iounmap(dpll_base);
348}
349
350static void __init qca953x_clocks_init(void __iomem *pll_base)
351{
352	unsigned long ref_rate;
353	unsigned long cpu_rate;
354	unsigned long ddr_rate;
355	unsigned long ahb_rate;
356	u32 pll, out_div, ref_div, nint, frac, clk_ctrl, postdiv;
357	u32 cpu_pll, ddr_pll;
358	u32 bootstrap;
359
360	bootstrap = ath79_reset_rr(QCA953X_RESET_REG_BOOTSTRAP);
361	if (bootstrap &	QCA953X_BOOTSTRAP_REF_CLK_40)
362		ref_rate = 40 * 1000 * 1000;
363	else
364		ref_rate = 25 * 1000 * 1000;
365
366	ref_rate = ath79_setup_ref_clk(ref_rate);
367
368	pll = __raw_readl(pll_base + QCA953X_PLL_CPU_CONFIG_REG);
369	out_div = (pll >> QCA953X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
370		  QCA953X_PLL_CPU_CONFIG_OUTDIV_MASK;
371	ref_div = (pll >> QCA953X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
372		  QCA953X_PLL_CPU_CONFIG_REFDIV_MASK;
373	nint = (pll >> QCA953X_PLL_CPU_CONFIG_NINT_SHIFT) &
374	       QCA953X_PLL_CPU_CONFIG_NINT_MASK;
375	frac = (pll >> QCA953X_PLL_CPU_CONFIG_NFRAC_SHIFT) &
376	       QCA953X_PLL_CPU_CONFIG_NFRAC_MASK;
377
378	cpu_pll = nint * ref_rate / ref_div;
379	cpu_pll += frac * (ref_rate >> 6) / ref_div;
380	cpu_pll /= (1 << out_div);
381
382	pll = __raw_readl(pll_base + QCA953X_PLL_DDR_CONFIG_REG);
383	out_div = (pll >> QCA953X_PLL_DDR_CONFIG_OUTDIV_SHIFT) &
384		  QCA953X_PLL_DDR_CONFIG_OUTDIV_MASK;
385	ref_div = (pll >> QCA953X_PLL_DDR_CONFIG_REFDIV_SHIFT) &
386		  QCA953X_PLL_DDR_CONFIG_REFDIV_MASK;
387	nint = (pll >> QCA953X_PLL_DDR_CONFIG_NINT_SHIFT) &
388	       QCA953X_PLL_DDR_CONFIG_NINT_MASK;
389	frac = (pll >> QCA953X_PLL_DDR_CONFIG_NFRAC_SHIFT) &
390	       QCA953X_PLL_DDR_CONFIG_NFRAC_MASK;
391
392	ddr_pll = nint * ref_rate / ref_div;
393	ddr_pll += frac * (ref_rate >> 6) / (ref_div << 4);
394	ddr_pll /= (1 << out_div);
395
396	clk_ctrl = __raw_readl(pll_base + QCA953X_PLL_CLK_CTRL_REG);
397
398	postdiv = (clk_ctrl >> QCA953X_PLL_CLK_CTRL_CPU_POST_DIV_SHIFT) &
399		  QCA953X_PLL_CLK_CTRL_CPU_POST_DIV_MASK;
400
401	if (clk_ctrl & QCA953X_PLL_CLK_CTRL_CPU_PLL_BYPASS)
402		cpu_rate = ref_rate;
403	else if (clk_ctrl & QCA953X_PLL_CLK_CTRL_CPUCLK_FROM_CPUPLL)
404		cpu_rate = cpu_pll / (postdiv + 1);
405	else
406		cpu_rate = ddr_pll / (postdiv + 1);
407
408	postdiv = (clk_ctrl >> QCA953X_PLL_CLK_CTRL_DDR_POST_DIV_SHIFT) &
409		  QCA953X_PLL_CLK_CTRL_DDR_POST_DIV_MASK;
410
411	if (clk_ctrl & QCA953X_PLL_CLK_CTRL_DDR_PLL_BYPASS)
412		ddr_rate = ref_rate;
413	else if (clk_ctrl & QCA953X_PLL_CLK_CTRL_DDRCLK_FROM_DDRPLL)
414		ddr_rate = ddr_pll / (postdiv + 1);
415	else
416		ddr_rate = cpu_pll / (postdiv + 1);
417
418	postdiv = (clk_ctrl >> QCA953X_PLL_CLK_CTRL_AHB_POST_DIV_SHIFT) &
419		  QCA953X_PLL_CLK_CTRL_AHB_POST_DIV_MASK;
420
421	if (clk_ctrl & QCA953X_PLL_CLK_CTRL_AHB_PLL_BYPASS)
422		ahb_rate = ref_rate;
423	else if (clk_ctrl & QCA953X_PLL_CLK_CTRL_AHBCLK_FROM_DDRPLL)
424		ahb_rate = ddr_pll / (postdiv + 1);
425	else
426		ahb_rate = cpu_pll / (postdiv + 1);
427
428	ath79_set_clk(ATH79_CLK_CPU, cpu_rate);
429	ath79_set_clk(ATH79_CLK_DDR, ddr_rate);
430	ath79_set_clk(ATH79_CLK_AHB, ahb_rate);
431}
432
433static void __init qca955x_clocks_init(void __iomem *pll_base)
434{
435	unsigned long ref_rate;
436	unsigned long cpu_rate;
437	unsigned long ddr_rate;
438	unsigned long ahb_rate;
439	u32 pll, out_div, ref_div, nint, frac, clk_ctrl, postdiv;
440	u32 cpu_pll, ddr_pll;
441	u32 bootstrap;
442
443	bootstrap = ath79_reset_rr(QCA955X_RESET_REG_BOOTSTRAP);
444	if (bootstrap &	QCA955X_BOOTSTRAP_REF_CLK_40)
445		ref_rate = 40 * 1000 * 1000;
446	else
447		ref_rate = 25 * 1000 * 1000;
448
449	ref_rate = ath79_setup_ref_clk(ref_rate);
450
451	pll = __raw_readl(pll_base + QCA955X_PLL_CPU_CONFIG_REG);
452	out_div = (pll >> QCA955X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
453		  QCA955X_PLL_CPU_CONFIG_OUTDIV_MASK;
454	ref_div = (pll >> QCA955X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
455		  QCA955X_PLL_CPU_CONFIG_REFDIV_MASK;
456	nint = (pll >> QCA955X_PLL_CPU_CONFIG_NINT_SHIFT) &
457	       QCA955X_PLL_CPU_CONFIG_NINT_MASK;
458	frac = (pll >> QCA955X_PLL_CPU_CONFIG_NFRAC_SHIFT) &
459	       QCA955X_PLL_CPU_CONFIG_NFRAC_MASK;
460
461	cpu_pll = nint * ref_rate / ref_div;
462	cpu_pll += frac * ref_rate / (ref_div * (1 << 6));
463	cpu_pll /= (1 << out_div);
464
465	pll = __raw_readl(pll_base + QCA955X_PLL_DDR_CONFIG_REG);
466	out_div = (pll >> QCA955X_PLL_DDR_CONFIG_OUTDIV_SHIFT) &
467		  QCA955X_PLL_DDR_CONFIG_OUTDIV_MASK;
468	ref_div = (pll >> QCA955X_PLL_DDR_CONFIG_REFDIV_SHIFT) &
469		  QCA955X_PLL_DDR_CONFIG_REFDIV_MASK;
470	nint = (pll >> QCA955X_PLL_DDR_CONFIG_NINT_SHIFT) &
471	       QCA955X_PLL_DDR_CONFIG_NINT_MASK;
472	frac = (pll >> QCA955X_PLL_DDR_CONFIG_NFRAC_SHIFT) &
473	       QCA955X_PLL_DDR_CONFIG_NFRAC_MASK;
474
475	ddr_pll = nint * ref_rate / ref_div;
476	ddr_pll += frac * ref_rate / (ref_div * (1 << 10));
477	ddr_pll /= (1 << out_div);
478
479	clk_ctrl = __raw_readl(pll_base + QCA955X_PLL_CLK_CTRL_REG);
480
481	postdiv = (clk_ctrl >> QCA955X_PLL_CLK_CTRL_CPU_POST_DIV_SHIFT) &
482		  QCA955X_PLL_CLK_CTRL_CPU_POST_DIV_MASK;
483
484	if (clk_ctrl & QCA955X_PLL_CLK_CTRL_CPU_PLL_BYPASS)
485		cpu_rate = ref_rate;
486	else if (clk_ctrl & QCA955X_PLL_CLK_CTRL_CPUCLK_FROM_CPUPLL)
487		cpu_rate = ddr_pll / (postdiv + 1);
488	else
489		cpu_rate = cpu_pll / (postdiv + 1);
490
491	postdiv = (clk_ctrl >> QCA955X_PLL_CLK_CTRL_DDR_POST_DIV_SHIFT) &
492		  QCA955X_PLL_CLK_CTRL_DDR_POST_DIV_MASK;
493
494	if (clk_ctrl & QCA955X_PLL_CLK_CTRL_DDR_PLL_BYPASS)
495		ddr_rate = ref_rate;
496	else if (clk_ctrl & QCA955X_PLL_CLK_CTRL_DDRCLK_FROM_DDRPLL)
497		ddr_rate = cpu_pll / (postdiv + 1);
498	else
499		ddr_rate = ddr_pll / (postdiv + 1);
500
501	postdiv = (clk_ctrl >> QCA955X_PLL_CLK_CTRL_AHB_POST_DIV_SHIFT) &
502		  QCA955X_PLL_CLK_CTRL_AHB_POST_DIV_MASK;
503
504	if (clk_ctrl & QCA955X_PLL_CLK_CTRL_AHB_PLL_BYPASS)
505		ahb_rate = ref_rate;
506	else if (clk_ctrl & QCA955X_PLL_CLK_CTRL_AHBCLK_FROM_DDRPLL)
507		ahb_rate = ddr_pll / (postdiv + 1);
508	else
509		ahb_rate = cpu_pll / (postdiv + 1);
510
511	ath79_set_clk(ATH79_CLK_CPU, cpu_rate);
512	ath79_set_clk(ATH79_CLK_DDR, ddr_rate);
513	ath79_set_clk(ATH79_CLK_AHB, ahb_rate);
 
 
 
 
514}
515
516static void __init qca956x_clocks_init(void __iomem *pll_base)
517{
518	unsigned long ref_rate;
519	unsigned long cpu_rate;
520	unsigned long ddr_rate;
521	unsigned long ahb_rate;
522	u32 pll, out_div, ref_div, nint, hfrac, lfrac, clk_ctrl, postdiv;
523	u32 cpu_pll, ddr_pll;
524	u32 bootstrap;
525
526	/*
527	 * QCA956x timer init workaround has to be applied right before setting
528	 * up the clock. Else, there will be no jiffies
529	 */
530	u32 misc;
531
532	misc = ath79_reset_rr(AR71XX_RESET_REG_MISC_INT_ENABLE);
533	misc |= MISC_INT_MIPS_SI_TIMERINT_MASK;
534	ath79_reset_wr(AR71XX_RESET_REG_MISC_INT_ENABLE, misc);
535
536	bootstrap = ath79_reset_rr(QCA956X_RESET_REG_BOOTSTRAP);
537	if (bootstrap &	QCA956X_BOOTSTRAP_REF_CLK_40)
538		ref_rate = 40 * 1000 * 1000;
539	else
540		ref_rate = 25 * 1000 * 1000;
 
541
542	ref_rate = ath79_setup_ref_clk(ref_rate);
 
 
 
 
543
544	pll = __raw_readl(pll_base + QCA956X_PLL_CPU_CONFIG_REG);
545	out_div = (pll >> QCA956X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
546		  QCA956X_PLL_CPU_CONFIG_OUTDIV_MASK;
547	ref_div = (pll >> QCA956X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
548		  QCA956X_PLL_CPU_CONFIG_REFDIV_MASK;
549
550	pll = __raw_readl(pll_base + QCA956X_PLL_CPU_CONFIG1_REG);
551	nint = (pll >> QCA956X_PLL_CPU_CONFIG1_NINT_SHIFT) &
552	       QCA956X_PLL_CPU_CONFIG1_NINT_MASK;
553	hfrac = (pll >> QCA956X_PLL_CPU_CONFIG1_NFRAC_H_SHIFT) &
554	       QCA956X_PLL_CPU_CONFIG1_NFRAC_H_MASK;
555	lfrac = (pll >> QCA956X_PLL_CPU_CONFIG1_NFRAC_L_SHIFT) &
556	       QCA956X_PLL_CPU_CONFIG1_NFRAC_L_MASK;
557
558	cpu_pll = nint * ref_rate / ref_div;
559	cpu_pll += (lfrac * ref_rate) / ((ref_div * 25) << 13);
560	cpu_pll += (hfrac >> 13) * ref_rate / ref_div;
561	cpu_pll /= (1 << out_div);
562
563	pll = __raw_readl(pll_base + QCA956X_PLL_DDR_CONFIG_REG);
564	out_div = (pll >> QCA956X_PLL_DDR_CONFIG_OUTDIV_SHIFT) &
565		  QCA956X_PLL_DDR_CONFIG_OUTDIV_MASK;
566	ref_div = (pll >> QCA956X_PLL_DDR_CONFIG_REFDIV_SHIFT) &
567		  QCA956X_PLL_DDR_CONFIG_REFDIV_MASK;
568	pll = __raw_readl(pll_base + QCA956X_PLL_DDR_CONFIG1_REG);
569	nint = (pll >> QCA956X_PLL_DDR_CONFIG1_NINT_SHIFT) &
570	       QCA956X_PLL_DDR_CONFIG1_NINT_MASK;
571	hfrac = (pll >> QCA956X_PLL_DDR_CONFIG1_NFRAC_H_SHIFT) &
572	       QCA956X_PLL_DDR_CONFIG1_NFRAC_H_MASK;
573	lfrac = (pll >> QCA956X_PLL_DDR_CONFIG1_NFRAC_L_SHIFT) &
574	       QCA956X_PLL_DDR_CONFIG1_NFRAC_L_MASK;
575
576	ddr_pll = nint * ref_rate / ref_div;
577	ddr_pll += (lfrac * ref_rate) / ((ref_div * 25) << 13);
578	ddr_pll += (hfrac >> 13) * ref_rate / ref_div;
579	ddr_pll /= (1 << out_div);
580
581	clk_ctrl = __raw_readl(pll_base + QCA956X_PLL_CLK_CTRL_REG);
582
583	postdiv = (clk_ctrl >> QCA956X_PLL_CLK_CTRL_CPU_POST_DIV_SHIFT) &
584		  QCA956X_PLL_CLK_CTRL_CPU_POST_DIV_MASK;
585
586	if (clk_ctrl & QCA956X_PLL_CLK_CTRL_CPU_PLL_BYPASS)
587		cpu_rate = ref_rate;
588	else if (clk_ctrl & QCA956X_PLL_CLK_CTRL_CPU_DDRCLK_FROM_CPUPLL)
589		cpu_rate = ddr_pll / (postdiv + 1);
590	else
591		cpu_rate = cpu_pll / (postdiv + 1);
592
593	postdiv = (clk_ctrl >> QCA956X_PLL_CLK_CTRL_DDR_POST_DIV_SHIFT) &
594		  QCA956X_PLL_CLK_CTRL_DDR_POST_DIV_MASK;
 
 
595
596	if (clk_ctrl & QCA956X_PLL_CLK_CTRL_DDR_PLL_BYPASS)
597		ddr_rate = ref_rate;
598	else if (clk_ctrl & QCA956X_PLL_CLK_CTRL_CPU_DDRCLK_FROM_DDRPLL)
599		ddr_rate = cpu_pll / (postdiv + 1);
600	else
601		ddr_rate = ddr_pll / (postdiv + 1);
602
603	postdiv = (clk_ctrl >> QCA956X_PLL_CLK_CTRL_AHB_POST_DIV_SHIFT) &
604		  QCA956X_PLL_CLK_CTRL_AHB_POST_DIV_MASK;
605
606	if (clk_ctrl & QCA956X_PLL_CLK_CTRL_AHB_PLL_BYPASS)
607		ahb_rate = ref_rate;
608	else if (clk_ctrl & QCA956X_PLL_CLK_CTRL_AHBCLK_FROM_DDRPLL)
609		ahb_rate = ddr_pll / (postdiv + 1);
610	else
611		ahb_rate = cpu_pll / (postdiv + 1);
612
613	ath79_set_clk(ATH79_CLK_CPU, cpu_rate);
614	ath79_set_clk(ATH79_CLK_DDR, ddr_rate);
615	ath79_set_clk(ATH79_CLK_AHB, ahb_rate);
616}
617
618static void __init ath79_clocks_init_dt(struct device_node *np)
619{
620	struct clk *ref_clk;
621	void __iomem *pll_base;
622
623	ref_clk = of_clk_get(np, 0);
624	if (!IS_ERR(ref_clk))
625		clks[ATH79_CLK_REF] = ref_clk;
 
 
626
627	pll_base = of_iomap(np, 0);
628	if (!pll_base) {
629		pr_err("%pOF: can't map pll registers\n", np);
630		goto err_clk;
631	}
632
633	if (of_device_is_compatible(np, "qca,ar7100-pll"))
634		ar71xx_clocks_init(pll_base);
635	else if (of_device_is_compatible(np, "qca,ar7240-pll") ||
636		 of_device_is_compatible(np, "qca,ar9130-pll"))
637		ar724x_clocks_init(pll_base);
638	else if (of_device_is_compatible(np, "qca,ar9330-pll"))
639		ar933x_clocks_init(pll_base);
640	else if (of_device_is_compatible(np, "qca,ar9340-pll"))
641		ar934x_clocks_init(pll_base);
642	else if (of_device_is_compatible(np, "qca,qca9530-pll"))
643		qca953x_clocks_init(pll_base);
644	else if (of_device_is_compatible(np, "qca,qca9550-pll"))
645		qca955x_clocks_init(pll_base);
646	else if (of_device_is_compatible(np, "qca,qca9560-pll"))
647		qca956x_clocks_init(pll_base);
648
649	if (!clks[ATH79_CLK_MDIO])
650		clks[ATH79_CLK_MDIO] = clks[ATH79_CLK_REF];
651
652	if (of_clk_add_provider(np, of_clk_src_onecell_get, &clk_data)) {
653		pr_err("%pOF: could not register clk provider\n", np);
654		goto err_iounmap;
655	}
656
657	return;
658
659err_iounmap:
660	iounmap(pll_base);
661
662err_clk:
663	clk_put(ref_clk);
 
 
 
664}
665
666CLK_OF_DECLARE(ar7100_clk, "qca,ar7100-pll", ath79_clocks_init_dt);
667CLK_OF_DECLARE(ar7240_clk, "qca,ar7240-pll", ath79_clocks_init_dt);
668CLK_OF_DECLARE(ar9130_clk, "qca,ar9130-pll", ath79_clocks_init_dt);
669CLK_OF_DECLARE(ar9330_clk, "qca,ar9330-pll", ath79_clocks_init_dt);
670CLK_OF_DECLARE(ar9340_clk, "qca,ar9340-pll", ath79_clocks_init_dt);
671CLK_OF_DECLARE(ar9530_clk, "qca,qca9530-pll", ath79_clocks_init_dt);
672CLK_OF_DECLARE(ar9550_clk, "qca,qca9550-pll", ath79_clocks_init_dt);
673CLK_OF_DECLARE(ar9560_clk, "qca,qca9560-pll", ath79_clocks_init_dt);