Linux Audio

Check our new training course

Loading...
v3.5.6
 
  1/*
  2 * (C) Copyright 2007
  3 * Texas Instruments
  4 * Karthik Dasu <karthik-dp@ti.com>
  5 *
  6 * (C) Copyright 2004
  7 * Texas Instruments, <www.ti.com>
  8 * Richard Woodruff <r-woodruff2@ti.com>
  9 *
 10 * This program is free software; you can redistribute it and/or
 11 * modify it under the terms of the GNU General Public License as
 12 * published by the Free Software Foundation; either version 2 of
 13 * the License, or (at your option) any later version.
 14 *
 15 * This program is distributed in the hope that it will be useful,
 16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE.  See the
 18 * GNU General Public License for more details.
 19 *
 20 * You should have received a copy of the GNU General Public License
 21 * along with this program; if not, write to the Free Software
 22 * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
 23 * MA 02111-1307 USA
 24 */
 25#include <linux/linkage.h>
 26
 27#include <asm/assembler.h>
 28
 29#include <plat/hardware.h>
 30#include <plat/sram.h>
 31
 32#include "iomap.h"
 33#include "cm2xxx_3xxx.h"
 34#include "prm2xxx_3xxx.h"
 35#include "sdrc.h"
 
 36#include "control.h"
 37
 38/*
 39 * Registers access definitions
 40 */
 41#define SDRC_SCRATCHPAD_SEM_OFFS	0xc
 42#define SDRC_SCRATCHPAD_SEM_V	OMAP343X_SCRATCHPAD_REGADDR\
 43					(SDRC_SCRATCHPAD_SEM_OFFS)
 44#define PM_PREPWSTST_CORE_P	OMAP3430_PRM_BASE + CORE_MOD +\
 45					OMAP3430_PM_PREPWSTST
 46#define PM_PWSTCTRL_MPU_P	OMAP3430_PRM_BASE + MPU_MOD + OMAP2_PM_PWSTCTRL
 47#define CM_IDLEST1_CORE_V	OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1)
 48#define CM_IDLEST_CKGEN_V	OMAP34XX_CM_REGADDR(PLL_MOD, CM_IDLEST)
 49#define SRAM_BASE_P		OMAP3_SRAM_PA
 50#define CONTROL_STAT		OMAP343X_CTRL_BASE + OMAP343X_CONTROL_STATUS
 51#define CONTROL_MEM_RTA_CTRL	(OMAP343X_CTRL_BASE +\
 52					OMAP36XX_CONTROL_MEM_RTA_CTRL)
 53
 54/* Move this as correct place is available */
 55#define SCRATCHPAD_MEM_OFFS	0x310
 56#define SCRATCHPAD_BASE_P	(OMAP343X_CTRL_BASE +\
 57					OMAP343X_CONTROL_MEM_WKUP +\
 58					SCRATCHPAD_MEM_OFFS)
 59#define SDRC_POWER_V		OMAP34XX_SDRC_REGADDR(SDRC_POWER)
 60#define SDRC_SYSCONFIG_P	(OMAP343X_SDRC_BASE + SDRC_SYSCONFIG)
 61#define SDRC_MR_0_P		(OMAP343X_SDRC_BASE + SDRC_MR_0)
 62#define SDRC_EMR2_0_P		(OMAP343X_SDRC_BASE + SDRC_EMR2_0)
 63#define SDRC_MANUAL_0_P		(OMAP343X_SDRC_BASE + SDRC_MANUAL_0)
 64#define SDRC_MR_1_P		(OMAP343X_SDRC_BASE + SDRC_MR_1)
 65#define SDRC_EMR2_1_P		(OMAP343X_SDRC_BASE + SDRC_EMR2_1)
 66#define SDRC_MANUAL_1_P		(OMAP343X_SDRC_BASE + SDRC_MANUAL_1)
 67#define SDRC_DLLA_STATUS_V	OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS)
 68#define SDRC_DLLA_CTRL_V	OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL)
 69
 70/*
 71 * This file needs be built unconditionally as ARM to interoperate correctly
 72 * with non-Thumb-2-capable firmware.
 73 */
 74	.arm
 75
 76/*
 77 * API functions
 78 */
 79
 80	.text
 81/*
 82 * L2 cache needs to be toggled for stable OFF mode functionality on 3630.
 83 * This function sets up a flag that will allow for this toggling to take
 84 * place on 3630. Hopefully some version in the future may not need this.
 85 */
 86ENTRY(enable_omap3630_toggle_l2_on_restore)
 87	stmfd	sp!, {lr}	@ save registers on stack
 88	/* Setup so that we will disable and enable l2 */
 89	mov	r1, #0x1
 90	adrl	r2, l2dis_3630	@ may be too distant for plain adr
 91	str	r1, [r2]
 
 92	ldmfd	sp!, {pc}	@ restore regs and return
 93ENDPROC(enable_omap3630_toggle_l2_on_restore)
 94
 95	.text
 96/* Function to call rom code to save secure ram context */
 97	.align	3
 
 
 
 
 98ENTRY(save_secure_ram_context)
 99	stmfd	sp!, {r4 - r11, lr}	@ save registers on stack
100	adr	r3, api_params		@ r3 points to parameters
101	str	r0, [r3,#0x4]		@ r0 has sdram address
102	ldr	r12, high_mask
103	and	r3, r3, r12
104	ldr	r12, sram_phy_addr_mask
105	orr	r3, r3, r12
106	mov	r0, #25			@ set service ID for PPA
107	mov	r12, r0			@ copy secure service ID in r12
108	mov	r1, #0			@ set task id for ROM code in r1
109	mov	r2, #4			@ set some flags in r2, r6
110	mov	r6, #0xff
111	dsb				@ data write barrier
112	dmb				@ data memory barrier
113	smc	#1			@ call SMI monitor (smi #1)
114	nop
115	nop
116	nop
117	nop
118	ldmfd	sp!, {r4 - r11, pc}
119	.align
120sram_phy_addr_mask:
121	.word	SRAM_BASE_P
122high_mask:
123	.word	0xffff
124api_params:
125	.word	0x4, 0x0, 0x0, 0x1, 0x1
126ENDPROC(save_secure_ram_context)
127ENTRY(save_secure_ram_context_sz)
128	.word	. - save_secure_ram_context
129
130/*
131 * ======================
132 * == Idle entry point ==
133 * ======================
134 */
135
136/*
137 * Forces OMAP into idle state
138 *
139 * omap34xx_cpu_suspend() - This bit of code saves the CPU context if needed
140 * and executes the WFI instruction. Calling WFI effectively changes the
141 * power domains states to the desired target power states.
142 *
143 *
144 * Notes:
145 * - only the minimum set of functions gets copied to internal SRAM at boot
146 *   and after wake-up from OFF mode, cf. omap_push_sram_idle. The function
147 *   pointers in SDRAM or SRAM are called depending on the desired low power
148 *   target state.
149 * - when the OMAP wakes up it continues at different execution points
150 *   depending on the low power mode (non-OFF vs OFF modes),
151 *   cf. 'Resume path for xxx mode' comments.
152 */
153	.align	3
154ENTRY(omap34xx_cpu_suspend)
155	stmfd	sp!, {r4 - r11, lr}	@ save registers on stack
156
157	/*
158	 * r0 contains information about saving context:
159	 *   0 - No context lost
160	 *   1 - Only L1 and logic lost
161	 *   2 - Only L2 lost (Even L1 is retained we clean it along with L2)
162	 *   3 - Both L1 and L2 lost and logic lost
163	 */
164
165	/*
166	 * For OFF mode: save context and jump to WFI in SDRAM (omap3_do_wfi)
167	 * For non-OFF modes: jump to the WFI code in SRAM (omap3_do_wfi_sram)
168	 */
169	ldr	r4, omap3_do_wfi_sram_addr
170	ldr	r5, [r4]
171	cmp	r0, #0x0		@ If no context save required,
172	bxeq	r5			@  jump to the WFI code in SRAM
173
174
175	/* Otherwise fall through to the save context code */
176save_context_wfi:
177	/*
178	 * jump out to kernel flush routine
179	 *  - reuse that code is better
180	 *  - it executes in a cached space so is faster than refetch per-block
181	 *  - should be faster and will change with kernel
182	 *  - 'might' have to copy address, load and jump to it
183	 * Flush all data from the L1 data cache before disabling
184	 * SCTLR.C bit.
185	 */
186	ldr	r1, kernel_flush
187	mov	lr, pc
188	bx	r1
189
190	/*
191	 * Clear the SCTLR.C bit to prevent further data cache
192	 * allocation. Clearing SCTLR.C would make all the data accesses
193	 * strongly ordered and would not hit the cache.
194	 */
195	mrc	p15, 0, r0, c1, c0, 0
196	bic	r0, r0, #(1 << 2)	@ Disable the C bit
197	mcr	p15, 0, r0, c1, c0, 0
198	isb
199
200	/*
201	 * Invalidate L1 data cache. Even though only invalidate is
202	 * necessary exported flush API is used here. Doing clean
203	 * on already clean cache would be almost NOP.
204	 */
205	ldr	r1, kernel_flush
206	blx	r1
207	/*
208	 * The kernel doesn't interwork: v7_flush_dcache_all in particluar will
209	 * always return in Thumb state when CONFIG_THUMB2_KERNEL is enabled.
210	 * This sequence switches back to ARM.  Note that .align may insert a
211	 * nop: bx pc needs to be word-aligned in order to work.
212	 */
213 THUMB(	.thumb		)
214 THUMB(	.align		)
215 THUMB(	bx	pc	)
216 THUMB(	nop		)
217	.arm
218
219	b	omap3_do_wfi
220
221/*
222 * Local variables
223 */
224omap3_do_wfi_sram_addr:
225	.word omap3_do_wfi_sram
226kernel_flush:
227	.word v7_flush_dcache_all
228
229/* ===================================
230 * == WFI instruction => Enter idle ==
231 * ===================================
232 */
233
234/*
235 * Do WFI instruction
236 * Includes the resume path for non-OFF modes
237 *
238 * This code gets copied to internal SRAM and is accessible
239 * from both SDRAM and SRAM:
240 * - executed from SRAM for non-off modes (omap3_do_wfi_sram),
241 * - executed from SDRAM for OFF mode (omap3_do_wfi).
242 */
243	.align	3
244ENTRY(omap3_do_wfi)
245	ldr	r4, sdrc_power		@ read the SDRC_POWER register
246	ldr	r5, [r4]		@ read the contents of SDRC_POWER
247	orr	r5, r5, #0x40		@ enable self refresh on idle req
248	str	r5, [r4]		@ write back to SDRC_POWER register
249
250	/* Data memory barrier and Data sync barrier */
251	dsb
252	dmb
253
254/*
255 * ===================================
256 * == WFI instruction => Enter idle ==
257 * ===================================
258 */
259	wfi				@ wait for interrupt
260
261/*
262 * ===================================
263 * == Resume path for non-OFF modes ==
264 * ===================================
265 */
266	nop
267	nop
268	nop
269	nop
270	nop
271	nop
272	nop
273	nop
274	nop
275	nop
276
277/*
278 * This function implements the erratum ID i581 WA:
279 *  SDRC state restore before accessing the SDRAM
280 *
281 * Only used at return from non-OFF mode. For OFF
282 * mode the ROM code configures the SDRC and
283 * the DPLL before calling the restore code directly
284 * from DDR.
285 */
286
287/* Make sure SDRC accesses are ok */
288wait_sdrc_ok:
289
290/* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this */
291	ldr	r4, cm_idlest_ckgen
292wait_dpll3_lock:
293	ldr	r5, [r4]
294	tst	r5, #1
295	beq	wait_dpll3_lock
296
297	ldr	r4, cm_idlest1_core
298wait_sdrc_ready:
299	ldr	r5, [r4]
300	tst	r5, #0x2
301	bne	wait_sdrc_ready
302	/* allow DLL powerdown upon hw idle req */
303	ldr	r4, sdrc_power
304	ldr	r5, [r4]
305	bic	r5, r5, #0x40
306	str	r5, [r4]
307
308/*
309 * PC-relative stores lead to undefined behaviour in Thumb-2: use a r7 as a
310 * base instead.
311 * Be careful not to clobber r7 when maintaing this code.
312 */
313
314is_dll_in_lock_mode:
315	/* Is dll in lock mode? */
316	ldr	r4, sdrc_dlla_ctrl
317	ldr	r5, [r4]
318	tst	r5, #0x4
319	bne	exit_nonoff_modes	@ Return if locked
320	/* wait till dll locks */
321	adr	r7, kick_counter
322wait_dll_lock_timed:
323	ldr	r4, wait_dll_lock_counter
324	add	r4, r4, #1
325	str	r4, [r7, #wait_dll_lock_counter - kick_counter]
326	ldr	r4, sdrc_dlla_status
327	/* Wait 20uS for lock */
328	mov	r6, #8
329wait_dll_lock:
330	subs	r6, r6, #0x1
331	beq	kick_dll
332	ldr	r5, [r4]
333	and	r5, r5, #0x4
334	cmp	r5, #0x4
335	bne	wait_dll_lock
336	b	exit_nonoff_modes	@ Return when locked
337
338	/* disable/reenable DLL if not locked */
339kick_dll:
340	ldr	r4, sdrc_dlla_ctrl
341	ldr	r5, [r4]
342	mov	r6, r5
343	bic	r6, #(1<<3)		@ disable dll
344	str	r6, [r4]
345	dsb
346	orr	r6, r6, #(1<<3)		@ enable dll
347	str	r6, [r4]
348	dsb
349	ldr	r4, kick_counter
350	add	r4, r4, #1
351	str	r4, [r7]		@ kick_counter
352	b	wait_dll_lock_timed
353
354exit_nonoff_modes:
355	/* Re-enable C-bit if needed */
356	mrc	p15, 0, r0, c1, c0, 0
357	tst	r0, #(1 << 2)		@ Check C bit enabled?
358	orreq	r0, r0, #(1 << 2)	@ Enable the C bit if cleared
359	mcreq	p15, 0, r0, c1, c0, 0
360	isb
361
362/*
363 * ===================================
364 * == Exit point from non-OFF modes ==
365 * ===================================
366 */
367	ldmfd	sp!, {r4 - r11, pc}	@ restore regs and return
368
369/*
370 * Local variables
371 */
372sdrc_power:
373	.word	SDRC_POWER_V
374cm_idlest1_core:
375	.word	CM_IDLEST1_CORE_V
376cm_idlest_ckgen:
377	.word	CM_IDLEST_CKGEN_V
378sdrc_dlla_status:
379	.word	SDRC_DLLA_STATUS_V
380sdrc_dlla_ctrl:
381	.word	SDRC_DLLA_CTRL_V
382	/*
383	 * When exporting to userspace while the counters are in SRAM,
384	 * these 2 words need to be at the end to facilitate retrival!
385	 */
386kick_counter:
387	.word	0
388wait_dll_lock_counter:
389	.word	0
390
391ENTRY(omap3_do_wfi_sz)
392	.word	. - omap3_do_wfi
393
394
395/*
396 * ==============================
397 * == Resume path for OFF mode ==
398 * ==============================
399 */
400
401/*
402 * The restore_* functions are called by the ROM code
403 *  when back from WFI in OFF mode.
404 * Cf. the get_*restore_pointer functions.
405 *
406 *  restore_es3: applies to 34xx >= ES3.0
407 *  restore_3630: applies to 36xx
408 *  restore: common code for 3xxx
409 *
410 * Note: when back from CORE and MPU OFF mode we are running
411 *  from SDRAM, without MMU, without the caches and prediction.
412 *  Also the SRAM content has been cleared.
413 */
414ENTRY(omap3_restore_es3)
415	ldr	r5, pm_prepwstst_core_p
416	ldr	r4, [r5]
417	and	r4, r4, #0x3
418	cmp	r4, #0x0	@ Check if previous power state of CORE is OFF
419	bne	omap3_restore	@ Fall through to OMAP3 common code
420	adr	r0, es3_sdrc_fix
421	ldr	r1, sram_base
422	ldr	r2, es3_sdrc_fix_sz
423	mov	r2, r2, ror #2
424copy_to_sram:
425	ldmia	r0!, {r3}	@ val = *src
426	stmia	r1!, {r3}	@ *dst = val
427	subs	r2, r2, #0x1	@ num_words--
428	bne	copy_to_sram
429	ldr	r1, sram_base
430	blx	r1
431	b	omap3_restore	@ Fall through to OMAP3 common code
432ENDPROC(omap3_restore_es3)
433
434ENTRY(omap3_restore_3630)
435	ldr	r1, pm_prepwstst_core_p
436	ldr	r2, [r1]
437	and	r2, r2, #0x3
438	cmp	r2, #0x0	@ Check if previous power state of CORE is OFF
439	bne	omap3_restore	@ Fall through to OMAP3 common code
440	/* Disable RTA before giving control */
441	ldr	r1, control_mem_rta
442	mov	r2, #OMAP36XX_RTA_DISABLE
443	str	r2, [r1]
444ENDPROC(omap3_restore_3630)
445
446	/* Fall through to common code for the remaining logic */
447
448ENTRY(omap3_restore)
449	/*
450	 * Read the pwstctrl register to check the reason for mpu reset.
451	 * This tells us what was lost.
452	 */
453	ldr	r1, pm_pwstctrl_mpu
454	ldr	r2, [r1]
455	and	r2, r2, #0x3
456	cmp	r2, #0x0	@ Check if target power state was OFF or RET
457	bne	logic_l1_restore
458
459	ldr	r0, l2dis_3630
 
 
460	cmp	r0, #0x1	@ should we disable L2 on 3630?
461	bne	skipl2dis
462	mrc	p15, 0, r0, c1, c0, 1
463	bic	r0, r0, #2	@ disable L2 cache
464	mcr	p15, 0, r0, c1, c0, 1
465skipl2dis:
466	ldr	r0, control_stat
467	ldr	r1, [r0]
468	and	r1, #0x700
469	cmp	r1, #0x300
470	beq	l2_inv_gp
 
 
 
471	mov	r0, #40			@ set service ID for PPA
472	mov	r12, r0			@ copy secure Service ID in r12
473	mov	r1, #0			@ set task id for ROM code in r1
474	mov	r2, #4			@ set some flags in r2, r6
475	mov	r6, #0xff
476	adr	r3, l2_inv_api_params	@ r3 points to dummy parameters
477	dsb				@ data write barrier
478	dmb				@ data memory barrier
479	smc	#1			@ call SMI monitor (smi #1)
480	/* Write to Aux control register to set some bits */
481	mov	r0, #42			@ set service ID for PPA
482	mov	r12, r0			@ copy secure Service ID in r12
483	mov	r1, #0			@ set task id for ROM code in r1
484	mov	r2, #4			@ set some flags in r2, r6
485	mov	r6, #0xff
486	ldr	r4, scratchpad_base
487	ldr	r3, [r4, #0xBC]		@ r3 points to parameters
488	dsb				@ data write barrier
489	dmb				@ data memory barrier
490	smc	#1			@ call SMI monitor (smi #1)
491
492#ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE
493	/* Restore L2 aux control register */
494					@ set service ID for PPA
495	mov	r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID
496	mov	r12, r0			@ copy service ID in r12
497	mov	r1, #0			@ set task ID for ROM code in r1
498	mov	r2, #4			@ set some flags in r2, r6
499	mov	r6, #0xff
500	ldr	r4, scratchpad_base
501	ldr	r3, [r4, #0xBC]
502	adds	r3, r3, #8		@ r3 points to parameters
503	dsb				@ data write barrier
504	dmb				@ data memory barrier
505	smc	#1			@ call SMI monitor (smi #1)
506#endif
507	b	logic_l1_restore
508
509	.align
510l2_inv_api_params:
511	.word	0x1, 0x00
512l2_inv_gp:
513	/* Execute smi to invalidate L2 cache */
514	mov r12, #0x1			@ set up to invalidate L2
515	smc	#0			@ Call SMI monitor (smieq)
516	/* Write to Aux control register to set some bits */
517	ldr	r4, scratchpad_base
518	ldr	r3, [r4,#0xBC]
519	ldr	r0, [r3,#4]
520	mov	r12, #0x3
521	smc	#0			@ Call SMI monitor (smieq)
522	ldr	r4, scratchpad_base
523	ldr	r3, [r4,#0xBC]
524	ldr	r0, [r3,#12]
525	mov	r12, #0x2
526	smc	#0			@ Call SMI monitor (smieq)
527logic_l1_restore:
528	ldr	r1, l2dis_3630
 
 
529	cmp	r1, #0x1		@ Test if L2 re-enable needed on 3630
530	bne	skipl2reen
531	mrc	p15, 0, r1, c1, c0, 1
532	orr	r1, r1, #2		@ re-enable L2 cache
533	mcr	p15, 0, r1, c1, c0, 1
534skipl2reen:
535
536	/* Now branch to the common CPU resume function */
537	b	cpu_resume
538ENDPROC(omap3_restore)
539
540	.ltorg
541
542/*
543 * Local variables
544 */
545pm_prepwstst_core_p:
546	.word	PM_PREPWSTST_CORE_P
547pm_pwstctrl_mpu:
548	.word	PM_PWSTCTRL_MPU_P
549scratchpad_base:
550	.word	SCRATCHPAD_BASE_P
551sram_base:
552	.word	SRAM_BASE_P + 0x8000
553control_stat:
554	.word	CONTROL_STAT
555control_mem_rta:
556	.word	CONTROL_MEM_RTA_CTRL
 
 
 
 
 
557l2dis_3630:
558	.word	0
 
 
 
 
 
559
560/*
561 * Internal functions
562 */
563
564/*
565 * This function implements the erratum ID i443 WA, applies to 34xx >= ES3.0
566 * Copied to and run from SRAM in order to reconfigure the SDRC parameters.
567 */
568	.text
569	.align	3
570ENTRY(es3_sdrc_fix)
571	ldr	r4, sdrc_syscfg		@ get config addr
572	ldr	r5, [r4]		@ get value
573	tst	r5, #0x100		@ is part access blocked
574	it	eq
575	biceq	r5, r5, #0x100		@ clear bit if set
576	str	r5, [r4]		@ write back change
577	ldr	r4, sdrc_mr_0		@ get config addr
578	ldr	r5, [r4]		@ get value
579	str	r5, [r4]		@ write back change
580	ldr	r4, sdrc_emr2_0		@ get config addr
581	ldr	r5, [r4]		@ get value
582	str	r5, [r4]		@ write back change
583	ldr	r4, sdrc_manual_0	@ get config addr
584	mov	r5, #0x2		@ autorefresh command
585	str	r5, [r4]		@ kick off refreshes
586	ldr	r4, sdrc_mr_1		@ get config addr
587	ldr	r5, [r4]		@ get value
588	str	r5, [r4]		@ write back change
589	ldr	r4, sdrc_emr2_1		@ get config addr
590	ldr	r5, [r4]		@ get value
591	str	r5, [r4]		@ write back change
592	ldr	r4, sdrc_manual_1	@ get config addr
593	mov	r5, #0x2		@ autorefresh command
594	str	r5, [r4]		@ kick off refreshes
595	bx	lr
596
597/*
598 * Local variables
599 */
600	.align
601sdrc_syscfg:
602	.word	SDRC_SYSCONFIG_P
603sdrc_mr_0:
604	.word	SDRC_MR_0_P
605sdrc_emr2_0:
606	.word	SDRC_EMR2_0_P
607sdrc_manual_0:
608	.word	SDRC_MANUAL_0_P
609sdrc_mr_1:
610	.word	SDRC_MR_1_P
611sdrc_emr2_1:
612	.word	SDRC_EMR2_1_P
613sdrc_manual_1:
614	.word	SDRC_MANUAL_1_P
615ENDPROC(es3_sdrc_fix)
616ENTRY(es3_sdrc_fix_sz)
617	.word	. - es3_sdrc_fix
v6.2
  1/* SPDX-License-Identifier: GPL-2.0-or-later */
  2/*
  3 * (C) Copyright 2007
  4 * Texas Instruments
  5 * Karthik Dasu <karthik-dp@ti.com>
  6 *
  7 * (C) Copyright 2004
  8 * Texas Instruments, <www.ti.com>
  9 * Richard Woodruff <r-woodruff2@ti.com>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 10 */
 11#include <linux/linkage.h>
 12
 13#include <asm/assembler.h>
 14
 15#include "omap34xx.h"
 
 
 16#include "iomap.h"
 17#include "cm3xxx.h"
 18#include "prm3xxx.h"
 19#include "sdrc.h"
 20#include "sram.h"
 21#include "control.h"
 22
 23/*
 24 * Registers access definitions
 25 */
 26#define SDRC_SCRATCHPAD_SEM_OFFS	0xc
 27#define SDRC_SCRATCHPAD_SEM_V	OMAP343X_SCRATCHPAD_REGADDR\
 28					(SDRC_SCRATCHPAD_SEM_OFFS)
 29#define PM_PREPWSTST_CORE_P	OMAP3430_PRM_BASE + CORE_MOD +\
 30					OMAP3430_PM_PREPWSTST
 31#define PM_PWSTCTRL_MPU_P	OMAP3430_PRM_BASE + MPU_MOD + OMAP2_PM_PWSTCTRL
 32#define CM_IDLEST1_CORE_V	OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1)
 33#define CM_IDLEST_CKGEN_V	OMAP34XX_CM_REGADDR(PLL_MOD, CM_IDLEST)
 34#define SRAM_BASE_P		OMAP3_SRAM_PA
 35#define CONTROL_STAT		OMAP343X_CTRL_BASE + OMAP343X_CONTROL_STATUS
 36#define CONTROL_MEM_RTA_CTRL	(OMAP343X_CTRL_BASE +\
 37					OMAP36XX_CONTROL_MEM_RTA_CTRL)
 38
 39/* Move this as correct place is available */
 40#define SCRATCHPAD_MEM_OFFS	0x310
 41#define SCRATCHPAD_BASE_P	(OMAP343X_CTRL_BASE +\
 42					OMAP343X_CONTROL_MEM_WKUP +\
 43					SCRATCHPAD_MEM_OFFS)
 44#define SDRC_POWER_V		OMAP34XX_SDRC_REGADDR(SDRC_POWER)
 45#define SDRC_SYSCONFIG_P	(OMAP343X_SDRC_BASE + SDRC_SYSCONFIG)
 46#define SDRC_MR_0_P		(OMAP343X_SDRC_BASE + SDRC_MR_0)
 47#define SDRC_EMR2_0_P		(OMAP343X_SDRC_BASE + SDRC_EMR2_0)
 48#define SDRC_MANUAL_0_P		(OMAP343X_SDRC_BASE + SDRC_MANUAL_0)
 49#define SDRC_MR_1_P		(OMAP343X_SDRC_BASE + SDRC_MR_1)
 50#define SDRC_EMR2_1_P		(OMAP343X_SDRC_BASE + SDRC_EMR2_1)
 51#define SDRC_MANUAL_1_P		(OMAP343X_SDRC_BASE + SDRC_MANUAL_1)
 52#define SDRC_DLLA_STATUS_V	OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS)
 53#define SDRC_DLLA_CTRL_V	OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL)
 54
 55/*
 56 * This file needs be built unconditionally as ARM to interoperate correctly
 57 * with non-Thumb-2-capable firmware.
 58 */
 59	.arm
 60
 61/*
 62 * API functions
 63 */
 64
 65	.text
 66/*
 67 * L2 cache needs to be toggled for stable OFF mode functionality on 3630.
 68 * This function sets up a flag that will allow for this toggling to take
 69 * place on 3630. Hopefully some version in the future may not need this.
 70 */
 71ENTRY(enable_omap3630_toggle_l2_on_restore)
 72	stmfd	sp!, {lr}	@ save registers on stack
 73	/* Setup so that we will disable and enable l2 */
 74	mov	r1, #0x1
 75	adr	r3, l2dis_3630_offset
 76	ldr	r2, [r3]		@ value for offset
 77	str	r1, [r2, r3]		@ write to l2dis_3630
 78	ldmfd	sp!, {pc}	@ restore regs and return
 79ENDPROC(enable_omap3630_toggle_l2_on_restore)
 80
 81/*
 82 * Function to call rom code to save secure ram context.
 83 *
 84 * r0 = physical address of the parameters
 85 */
 86	.arch armv7-a
 87	.arch_extension sec
 88ENTRY(save_secure_ram_context)
 89	stmfd	sp!, {r4 - r11, lr}	@ save registers on stack
 90	mov	r3, r0			@ physical address of parameters
 
 
 
 
 
 91	mov	r0, #25			@ set service ID for PPA
 92	mov	r12, r0			@ copy secure service ID in r12
 93	mov	r1, #0			@ set task id for ROM code in r1
 94	mov	r2, #4			@ set some flags in r2, r6
 95	mov	r6, #0xff
 96	dsb				@ data write barrier
 97	dmb				@ data memory barrier
 98	smc	#1			@ call SMI monitor (smi #1)
 99	nop
100	nop
101	nop
102	nop
103	ldmfd	sp!, {r4 - r11, pc}
 
 
 
 
 
 
 
104ENDPROC(save_secure_ram_context)
 
 
105
106/*
107 * ======================
108 * == Idle entry point ==
109 * ======================
110 */
111
112/*
113 * Forces OMAP into idle state
114 *
115 * omap34xx_cpu_suspend() - This bit of code saves the CPU context if needed
116 * and executes the WFI instruction. Calling WFI effectively changes the
117 * power domains states to the desired target power states.
118 *
119 *
120 * Notes:
121 * - only the minimum set of functions gets copied to internal SRAM at boot
122 *   and after wake-up from OFF mode, cf. omap_push_sram_idle. The function
123 *   pointers in SDRAM or SRAM are called depending on the desired low power
124 *   target state.
125 * - when the OMAP wakes up it continues at different execution points
126 *   depending on the low power mode (non-OFF vs OFF modes),
127 *   cf. 'Resume path for xxx mode' comments.
128 */
129	.align	3
130ENTRY(omap34xx_cpu_suspend)
131	stmfd	sp!, {r4 - r11, lr}	@ save registers on stack
132
133	/*
134	 * r0 contains information about saving context:
135	 *   0 - No context lost
136	 *   1 - Only L1 and logic lost
137	 *   2 - Only L2 lost (Even L1 is retained we clean it along with L2)
138	 *   3 - Both L1 and L2 lost and logic lost
139	 */
140
141	/*
142	 * For OFF mode: save context and jump to WFI in SDRAM (omap3_do_wfi)
143	 * For non-OFF modes: jump to the WFI code in SRAM (omap3_do_wfi_sram)
144	 */
145	ldr	r4, omap3_do_wfi_sram_addr
146	ldr	r5, [r4]
147	cmp	r0, #0x0		@ If no context save required,
148	bxeq	r5			@  jump to the WFI code in SRAM
149
150
151	/* Otherwise fall through to the save context code */
152save_context_wfi:
153	/*
154	 * jump out to kernel flush routine
155	 *  - reuse that code is better
156	 *  - it executes in a cached space so is faster than refetch per-block
157	 *  - should be faster and will change with kernel
158	 *  - 'might' have to copy address, load and jump to it
159	 * Flush all data from the L1 data cache before disabling
160	 * SCTLR.C bit.
161	 */
162	ldr	r1, kernel_flush
163	mov	lr, pc
164	bx	r1
165
166	/*
167	 * Clear the SCTLR.C bit to prevent further data cache
168	 * allocation. Clearing SCTLR.C would make all the data accesses
169	 * strongly ordered and would not hit the cache.
170	 */
171	mrc	p15, 0, r0, c1, c0, 0
172	bic	r0, r0, #(1 << 2)	@ Disable the C bit
173	mcr	p15, 0, r0, c1, c0, 0
174	isb
175
176	/*
177	 * Invalidate L1 data cache. Even though only invalidate is
178	 * necessary exported flush API is used here. Doing clean
179	 * on already clean cache would be almost NOP.
180	 */
181	ldr	r1, kernel_flush
182	blx	r1
 
 
 
 
 
 
 
 
 
 
 
 
183	b	omap3_do_wfi
184ENDPROC(omap34xx_cpu_suspend)
 
 
 
185omap3_do_wfi_sram_addr:
186	.word omap3_do_wfi_sram
187kernel_flush:
188	.word v7_flush_dcache_all
189
190/* ===================================
191 * == WFI instruction => Enter idle ==
192 * ===================================
193 */
194
195/*
196 * Do WFI instruction
197 * Includes the resume path for non-OFF modes
198 *
199 * This code gets copied to internal SRAM and is accessible
200 * from both SDRAM and SRAM:
201 * - executed from SRAM for non-off modes (omap3_do_wfi_sram),
202 * - executed from SDRAM for OFF mode (omap3_do_wfi).
203 */
204	.align	3
205ENTRY(omap3_do_wfi)
206	ldr	r4, sdrc_power		@ read the SDRC_POWER register
207	ldr	r5, [r4]		@ read the contents of SDRC_POWER
208	orr	r5, r5, #0x40		@ enable self refresh on idle req
209	str	r5, [r4]		@ write back to SDRC_POWER register
210
211	/* Data memory barrier and Data sync barrier */
212	dsb
213	dmb
214
215/*
216 * ===================================
217 * == WFI instruction => Enter idle ==
218 * ===================================
219 */
220	wfi				@ wait for interrupt
221
222/*
223 * ===================================
224 * == Resume path for non-OFF modes ==
225 * ===================================
226 */
227	nop
228	nop
229	nop
230	nop
231	nop
232	nop
233	nop
234	nop
235	nop
236	nop
237
238/*
239 * This function implements the erratum ID i581 WA:
240 *  SDRC state restore before accessing the SDRAM
241 *
242 * Only used at return from non-OFF mode. For OFF
243 * mode the ROM code configures the SDRC and
244 * the DPLL before calling the restore code directly
245 * from DDR.
246 */
247
248/* Make sure SDRC accesses are ok */
249wait_sdrc_ok:
250
251/* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this */
252	ldr	r4, cm_idlest_ckgen
253wait_dpll3_lock:
254	ldr	r5, [r4]
255	tst	r5, #1
256	beq	wait_dpll3_lock
257
258	ldr	r4, cm_idlest1_core
259wait_sdrc_ready:
260	ldr	r5, [r4]
261	tst	r5, #0x2
262	bne	wait_sdrc_ready
263	/* allow DLL powerdown upon hw idle req */
264	ldr	r4, sdrc_power
265	ldr	r5, [r4]
266	bic	r5, r5, #0x40
267	str	r5, [r4]
268
 
 
 
 
 
 
269is_dll_in_lock_mode:
270	/* Is dll in lock mode? */
271	ldr	r4, sdrc_dlla_ctrl
272	ldr	r5, [r4]
273	tst	r5, #0x4
274	bne	exit_nonoff_modes	@ Return if locked
275	/* wait till dll locks */
 
276wait_dll_lock_timed:
 
 
 
277	ldr	r4, sdrc_dlla_status
278	/* Wait 20uS for lock */
279	mov	r6, #8
280wait_dll_lock:
281	subs	r6, r6, #0x1
282	beq	kick_dll
283	ldr	r5, [r4]
284	and	r5, r5, #0x4
285	cmp	r5, #0x4
286	bne	wait_dll_lock
287	b	exit_nonoff_modes	@ Return when locked
288
289	/* disable/reenable DLL if not locked */
290kick_dll:
291	ldr	r4, sdrc_dlla_ctrl
292	ldr	r5, [r4]
293	mov	r6, r5
294	bic	r6, #(1<<3)		@ disable dll
295	str	r6, [r4]
296	dsb
297	orr	r6, r6, #(1<<3)		@ enable dll
298	str	r6, [r4]
299	dsb
 
 
 
300	b	wait_dll_lock_timed
301
302exit_nonoff_modes:
303	/* Re-enable C-bit if needed */
304	mrc	p15, 0, r0, c1, c0, 0
305	tst	r0, #(1 << 2)		@ Check C bit enabled?
306	orreq	r0, r0, #(1 << 2)	@ Enable the C bit if cleared
307	mcreq	p15, 0, r0, c1, c0, 0
308	isb
309
310/*
311 * ===================================
312 * == Exit point from non-OFF modes ==
313 * ===================================
314 */
315	ldmfd	sp!, {r4 - r11, pc}	@ restore regs and return
316ENDPROC(omap3_do_wfi)
 
 
 
317sdrc_power:
318	.word	SDRC_POWER_V
319cm_idlest1_core:
320	.word	CM_IDLEST1_CORE_V
321cm_idlest_ckgen:
322	.word	CM_IDLEST_CKGEN_V
323sdrc_dlla_status:
324	.word	SDRC_DLLA_STATUS_V
325sdrc_dlla_ctrl:
326	.word	SDRC_DLLA_CTRL_V
 
 
 
 
 
 
 
 
 
327ENTRY(omap3_do_wfi_sz)
328	.word	. - omap3_do_wfi
329
330
331/*
332 * ==============================
333 * == Resume path for OFF mode ==
334 * ==============================
335 */
336
337/*
338 * The restore_* functions are called by the ROM code
339 *  when back from WFI in OFF mode.
340 * Cf. the get_*restore_pointer functions.
341 *
342 *  restore_es3: applies to 34xx >= ES3.0
343 *  restore_3630: applies to 36xx
344 *  restore: common code for 3xxx
345 *
346 * Note: when back from CORE and MPU OFF mode we are running
347 *  from SDRAM, without MMU, without the caches and prediction.
348 *  Also the SRAM content has been cleared.
349 */
350ENTRY(omap3_restore_es3)
351	ldr	r5, pm_prepwstst_core_p
352	ldr	r4, [r5]
353	and	r4, r4, #0x3
354	cmp	r4, #0x0	@ Check if previous power state of CORE is OFF
355	bne	omap3_restore	@ Fall through to OMAP3 common code
356	adr	r0, es3_sdrc_fix
357	ldr	r1, sram_base
358	ldr	r2, es3_sdrc_fix_sz
359	mov	r2, r2, ror #2
360copy_to_sram:
361	ldmia	r0!, {r3}	@ val = *src
362	stmia	r1!, {r3}	@ *dst = val
363	subs	r2, r2, #0x1	@ num_words--
364	bne	copy_to_sram
365	ldr	r1, sram_base
366	blx	r1
367	b	omap3_restore	@ Fall through to OMAP3 common code
368ENDPROC(omap3_restore_es3)
369
370ENTRY(omap3_restore_3630)
371	ldr	r1, pm_prepwstst_core_p
372	ldr	r2, [r1]
373	and	r2, r2, #0x3
374	cmp	r2, #0x0	@ Check if previous power state of CORE is OFF
375	bne	omap3_restore	@ Fall through to OMAP3 common code
376	/* Disable RTA before giving control */
377	ldr	r1, control_mem_rta
378	mov	r2, #OMAP36XX_RTA_DISABLE
379	str	r2, [r1]
380ENDPROC(omap3_restore_3630)
381
382	/* Fall through to common code for the remaining logic */
383
384ENTRY(omap3_restore)
385	/*
386	 * Read the pwstctrl register to check the reason for mpu reset.
387	 * This tells us what was lost.
388	 */
389	ldr	r1, pm_pwstctrl_mpu
390	ldr	r2, [r1]
391	and	r2, r2, #0x3
392	cmp	r2, #0x0	@ Check if target power state was OFF or RET
393	bne	logic_l1_restore
394
395	adr	r1, l2dis_3630_offset	@ address for offset
396	ldr	r0, [r1]		@ value for offset
397	ldr	r0, [r1, r0]		@ value at l2dis_3630
398	cmp	r0, #0x1	@ should we disable L2 on 3630?
399	bne	skipl2dis
400	mrc	p15, 0, r0, c1, c0, 1
401	bic	r0, r0, #2	@ disable L2 cache
402	mcr	p15, 0, r0, c1, c0, 1
403skipl2dis:
404	ldr	r0, control_stat
405	ldr	r1, [r0]
406	and	r1, #0x700
407	cmp	r1, #0x300
408	beq	l2_inv_gp
409	adr	r0, l2_inv_api_params_offset
410	ldr	r3, [r0]
411	add	r3, r3, r0		@ r3 points to dummy parameters
412	mov	r0, #40			@ set service ID for PPA
413	mov	r12, r0			@ copy secure Service ID in r12
414	mov	r1, #0			@ set task id for ROM code in r1
415	mov	r2, #4			@ set some flags in r2, r6
416	mov	r6, #0xff
 
417	dsb				@ data write barrier
418	dmb				@ data memory barrier
419	smc	#1			@ call SMI monitor (smi #1)
420	/* Write to Aux control register to set some bits */
421	mov	r0, #42			@ set service ID for PPA
422	mov	r12, r0			@ copy secure Service ID in r12
423	mov	r1, #0			@ set task id for ROM code in r1
424	mov	r2, #4			@ set some flags in r2, r6
425	mov	r6, #0xff
426	ldr	r4, scratchpad_base
427	ldr	r3, [r4, #0xBC]		@ r3 points to parameters
428	dsb				@ data write barrier
429	dmb				@ data memory barrier
430	smc	#1			@ call SMI monitor (smi #1)
431
432#ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE
433	/* Restore L2 aux control register */
434					@ set service ID for PPA
435	mov	r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID
436	mov	r12, r0			@ copy service ID in r12
437	mov	r1, #0			@ set task ID for ROM code in r1
438	mov	r2, #4			@ set some flags in r2, r6
439	mov	r6, #0xff
440	ldr	r4, scratchpad_base
441	ldr	r3, [r4, #0xBC]
442	adds	r3, r3, #8		@ r3 points to parameters
443	dsb				@ data write barrier
444	dmb				@ data memory barrier
445	smc	#1			@ call SMI monitor (smi #1)
446#endif
447	b	logic_l1_restore
448
449	.align
450l2_inv_api_params_offset:
451	.long	l2_inv_api_params - .
452l2_inv_gp:
453	/* Execute smi to invalidate L2 cache */
454	mov r12, #0x1			@ set up to invalidate L2
455	smc	#0			@ Call SMI monitor (smieq)
456	/* Write to Aux control register to set some bits */
457	ldr	r4, scratchpad_base
458	ldr	r3, [r4,#0xBC]
459	ldr	r0, [r3,#4]
460	mov	r12, #0x3
461	smc	#0			@ Call SMI monitor (smieq)
462	ldr	r4, scratchpad_base
463	ldr	r3, [r4,#0xBC]
464	ldr	r0, [r3,#12]
465	mov	r12, #0x2
466	smc	#0			@ Call SMI monitor (smieq)
467logic_l1_restore:
468	adr	r0, l2dis_3630_offset	@ adress for offset
469	ldr	r1, [r0]		@ value for offset
470	ldr	r1, [r0, r1]		@ value at l2dis_3630
471	cmp	r1, #0x1		@ Test if L2 re-enable needed on 3630
472	bne	skipl2reen
473	mrc	p15, 0, r1, c1, c0, 1
474	orr	r1, r1, #2		@ re-enable L2 cache
475	mcr	p15, 0, r1, c1, c0, 1
476skipl2reen:
477
478	/* Now branch to the common CPU resume function */
479	b	cpu_resume
480ENDPROC(omap3_restore)
481
482	.ltorg
483
484/*
485 * Local variables
486 */
487pm_prepwstst_core_p:
488	.word	PM_PREPWSTST_CORE_P
489pm_pwstctrl_mpu:
490	.word	PM_PWSTCTRL_MPU_P
491scratchpad_base:
492	.word	SCRATCHPAD_BASE_P
493sram_base:
494	.word	SRAM_BASE_P + 0x8000
495control_stat:
496	.word	CONTROL_STAT
497control_mem_rta:
498	.word	CONTROL_MEM_RTA_CTRL
499l2dis_3630_offset:
500	.long	l2dis_3630 - .
501
502	.data
503	.align	2
504l2dis_3630:
505	.word	0
506
507	.data
508	.align	2
509l2_inv_api_params:
510	.word	0x1, 0x00
511
512/*
513 * Internal functions
514 */
515
516/*
517 * This function implements the erratum ID i443 WA, applies to 34xx >= ES3.0
518 * Copied to and run from SRAM in order to reconfigure the SDRC parameters.
519 */
520	.text
521	.align	3
522ENTRY(es3_sdrc_fix)
523	ldr	r4, sdrc_syscfg		@ get config addr
524	ldr	r5, [r4]		@ get value
525	tst	r5, #0x100		@ is part access blocked
526	it	eq
527	biceq	r5, r5, #0x100		@ clear bit if set
528	str	r5, [r4]		@ write back change
529	ldr	r4, sdrc_mr_0		@ get config addr
530	ldr	r5, [r4]		@ get value
531	str	r5, [r4]		@ write back change
532	ldr	r4, sdrc_emr2_0		@ get config addr
533	ldr	r5, [r4]		@ get value
534	str	r5, [r4]		@ write back change
535	ldr	r4, sdrc_manual_0	@ get config addr
536	mov	r5, #0x2		@ autorefresh command
537	str	r5, [r4]		@ kick off refreshes
538	ldr	r4, sdrc_mr_1		@ get config addr
539	ldr	r5, [r4]		@ get value
540	str	r5, [r4]		@ write back change
541	ldr	r4, sdrc_emr2_1		@ get config addr
542	ldr	r5, [r4]		@ get value
543	str	r5, [r4]		@ write back change
544	ldr	r4, sdrc_manual_1	@ get config addr
545	mov	r5, #0x2		@ autorefresh command
546	str	r5, [r4]		@ kick off refreshes
547	bx	lr
548
549/*
550 * Local variables
551 */
552	.align
553sdrc_syscfg:
554	.word	SDRC_SYSCONFIG_P
555sdrc_mr_0:
556	.word	SDRC_MR_0_P
557sdrc_emr2_0:
558	.word	SDRC_EMR2_0_P
559sdrc_manual_0:
560	.word	SDRC_MANUAL_0_P
561sdrc_mr_1:
562	.word	SDRC_MR_1_P
563sdrc_emr2_1:
564	.word	SDRC_EMR2_1_P
565sdrc_manual_1:
566	.word	SDRC_MANUAL_1_P
567ENDPROC(es3_sdrc_fix)
568ENTRY(es3_sdrc_fix_sz)
569	.word	. - es3_sdrc_fix