Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * Low level suspend code for AM43XX SoCs
4 *
5 * Copyright (C) 2013-2018 Texas Instruments Incorporated - https://www.ti.com/
6 * Dave Gerlach, Vaibhav Bedia
7 */
8
9#include <linux/linkage.h>
10#include <linux/ti-emif-sram.h>
11#include <linux/platform_data/pm33xx.h>
12#include <asm/assembler.h>
13#include <asm/hardware/cache-l2x0.h>
14#include <asm/page.h>
15
16#include "cm33xx.h"
17#include "common.h"
18#include "iomap.h"
19#include "omap-secure.h"
20#include "omap44xx.h"
21#include "pm-asm-offsets.h"
22#include "prm33xx.h"
23#include "prcm43xx.h"
24
25/* replicated define because linux/bitops.h cannot be included in assembly */
26#define BIT(nr) (1 << (nr))
27
28#define AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED 0x00030000
29#define AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE 0x0003
30#define AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE 0x0002
31
32#define AM43XX_EMIF_POWEROFF_ENABLE 0x1
33#define AM43XX_EMIF_POWEROFF_DISABLE 0x0
34
35#define AM43XX_CM_CLKSTCTRL_CLKTRCTRL_SW_SLEEP 0x1
36#define AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO 0x3
37
38#define AM43XX_CM_BASE 0x44DF0000
39
40#define AM43XX_CM_REGADDR(inst, reg) \
41 AM33XX_L4_WK_IO_ADDRESS(AM43XX_CM_BASE + (inst) + (reg))
42
43#define AM43XX_CM_MPU_CLKSTCTRL AM43XX_CM_REGADDR(AM43XX_CM_MPU_INST, \
44 AM43XX_CM_MPU_MPU_CDOFFS)
45#define AM43XX_CM_MPU_MPU_CLKCTRL AM43XX_CM_REGADDR(AM43XX_CM_MPU_INST, \
46 AM43XX_CM_MPU_MPU_CLKCTRL_OFFSET)
47#define AM43XX_CM_PER_EMIF_CLKCTRL AM43XX_CM_REGADDR(AM43XX_CM_PER_INST, \
48 AM43XX_CM_PER_EMIF_CLKCTRL_OFFSET)
49#define AM43XX_PRM_EMIF_CTRL_OFFSET 0x0030
50
51#define RTC_SECONDS_REG 0x0
52#define RTC_PMIC_REG 0x98
53#define RTC_PMIC_POWER_EN BIT(16)
54#define RTC_PMIC_EXT_WAKEUP_STS BIT(12)
55#define RTC_PMIC_EXT_WAKEUP_POL BIT(4)
56#define RTC_PMIC_EXT_WAKEUP_EN BIT(0)
57
58 .arm
59 .arch armv7-a
60 .arch_extension sec
61 .align 3
62
63ENTRY(am43xx_do_wfi)
64 stmfd sp!, {r4 - r11, lr} @ save registers on stack
65
66 /* Save wfi_flags arg to data space */
67 mov r4, r0
68 adr r3, am43xx_pm_ro_sram_data
69 ldr r2, [r3, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET]
70 str r4, [r2, #AMX3_PM_WFI_FLAGS_OFFSET]
71
72#ifdef CONFIG_CACHE_L2X0
73 /* Retrieve l2 cache virt address BEFORE we shut off EMIF */
74 ldr r1, get_l2cache_base
75 blx r1
76 mov r8, r0
77#endif
78
79 /* Only flush cache is we know we are losing MPU context */
80 tst r4, #WFI_FLAG_FLUSH_CACHE
81 beq cache_skip_flush
82
83 /*
84 * Flush all data from the L1 and L2 data cache before disabling
85 * SCTLR.C bit.
86 */
87 ldr r1, kernel_flush
88 blx r1
89
90 /*
91 * Clear the SCTLR.C bit to prevent further data cache
92 * allocation. Clearing SCTLR.C would make all the data accesses
93 * strongly ordered and would not hit the cache.
94 */
95 mrc p15, 0, r0, c1, c0, 0
96 bic r0, r0, #(1 << 2) @ Disable the C bit
97 mcr p15, 0, r0, c1, c0, 0
98 isb
99 dsb
100
101 /*
102 * Invalidate L1 and L2 data cache.
103 */
104 ldr r1, kernel_flush
105 blx r1
106
107#ifdef CONFIG_CACHE_L2X0
108 /*
109 * Clean and invalidate the L2 cache.
110 */
111#ifdef CONFIG_PL310_ERRATA_727915
112 mov r0, #0x03
113 mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX
114 dsb
115 smc #0
116 dsb
117#endif
118 mov r0, r8
119 adr r4, am43xx_pm_ro_sram_data
120 ldr r3, [r4, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET]
121
122 mov r2, r0
123 ldr r0, [r2, #L2X0_AUX_CTRL]
124 str r0, [r3, #AMX3_PM_L2_AUX_CTRL_VAL_OFFSET]
125 ldr r0, [r2, #L310_PREFETCH_CTRL]
126 str r0, [r3, #AMX3_PM_L2_PREFETCH_CTRL_VAL_OFFSET]
127
128 ldr r0, l2_val
129 str r0, [r2, #L2X0_CLEAN_INV_WAY]
130wait:
131 ldr r0, [r2, #L2X0_CLEAN_INV_WAY]
132 ldr r1, l2_val
133 ands r0, r0, r1
134 bne wait
135#ifdef CONFIG_PL310_ERRATA_727915
136 mov r0, #0x00
137 mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX
138 dsb
139 smc #0
140 dsb
141#endif
142l2x_sync:
143 mov r0, r8
144 mov r2, r0
145 mov r0, #0x0
146 str r0, [r2, #L2X0_CACHE_SYNC]
147sync:
148 ldr r0, [r2, #L2X0_CACHE_SYNC]
149 ands r0, r0, #0x1
150 bne sync
151#endif
152
153 /* Restore wfi_flags */
154 adr r3, am43xx_pm_ro_sram_data
155 ldr r2, [r3, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET]
156 ldr r4, [r2, #AMX3_PM_WFI_FLAGS_OFFSET]
157
158cache_skip_flush:
159 /*
160 * If we are trying to enter RTC+DDR mode we must perform
161 * a read from the rtc address space to ensure translation
162 * presence in the TLB to avoid page table walk after DDR
163 * is unavailable.
164 */
165 tst r4, #WFI_FLAG_RTC_ONLY
166 beq skip_rtc_va_refresh
167
168 adr r3, am43xx_pm_ro_sram_data
169 ldr r1, [r3, #AMX3_PM_RTC_BASE_VIRT_OFFSET]
170 ldr r0, [r1]
171
172skip_rtc_va_refresh:
173 /* Check if we want self refresh */
174 tst r4, #WFI_FLAG_SELF_REFRESH
175 beq emif_skip_enter_sr
176
177 adr r9, am43xx_emif_sram_table
178
179 ldr r3, [r9, #EMIF_PM_ENTER_SR_OFFSET]
180 blx r3
181
182emif_skip_enter_sr:
183 /* Only necessary if PER is losing context */
184 tst r4, #WFI_FLAG_SAVE_EMIF
185 beq emif_skip_save
186
187 ldr r3, [r9, #EMIF_PM_SAVE_CONTEXT_OFFSET]
188 blx r3
189
190emif_skip_save:
191 /* Only can disable EMIF if we have entered self refresh */
192 tst r4, #WFI_FLAG_SELF_REFRESH
193 beq emif_skip_disable
194
195 /* Disable EMIF */
196 ldr r1, am43xx_virt_emif_clkctrl
197 ldr r2, [r1]
198 bic r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE
199 str r2, [r1]
200
201wait_emif_disable:
202 ldr r2, [r1]
203 mov r3, #AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED
204 cmp r2, r3
205 bne wait_emif_disable
206
207emif_skip_disable:
208 tst r4, #WFI_FLAG_RTC_ONLY
209 beq skip_rtc_only
210
211 adr r3, am43xx_pm_ro_sram_data
212 ldr r1, [r3, #AMX3_PM_RTC_BASE_VIRT_OFFSET]
213
214 ldr r0, [r1, #RTC_PMIC_REG]
215 orr r0, r0, #RTC_PMIC_POWER_EN
216 orr r0, r0, #RTC_PMIC_EXT_WAKEUP_STS
217 orr r0, r0, #RTC_PMIC_EXT_WAKEUP_EN
218 orr r0, r0, #RTC_PMIC_EXT_WAKEUP_POL
219 str r0, [r1, #RTC_PMIC_REG]
220 ldr r0, [r1, #RTC_PMIC_REG]
221 /* Wait for 2 seconds to lose power */
222 mov r3, #2
223 ldr r2, [r1, #RTC_SECONDS_REG]
224rtc_loop:
225 ldr r0, [r1, #RTC_SECONDS_REG]
226 cmp r0, r2
227 beq rtc_loop
228 mov r2, r0
229 subs r3, r3, #1
230 bne rtc_loop
231
232 b re_enable_emif
233
234skip_rtc_only:
235
236 tst r4, #WFI_FLAG_WAKE_M3
237 beq wkup_m3_skip
238
239 /*
240 * For the MPU WFI to be registered as an interrupt
241 * to WKUP_M3, MPU_CLKCTRL.MODULEMODE needs to be set
242 * to DISABLED
243 */
244 ldr r1, am43xx_virt_mpu_clkctrl
245 ldr r2, [r1]
246 bic r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE
247 str r2, [r1]
248
249 /*
250 * Put MPU CLKDM to SW_SLEEP
251 */
252 ldr r1, am43xx_virt_mpu_clkstctrl
253 mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_SW_SLEEP
254 str r2, [r1]
255
256wkup_m3_skip:
257 /*
258 * Execute a barrier instruction to ensure that all cache,
259 * TLB and branch predictor maintenance operations issued
260 * have completed.
261 */
262 dsb
263 dmb
264
265 /*
266 * Execute a WFI instruction and wait until the
267 * STANDBYWFI output is asserted to indicate that the
268 * CPU is in idle and low power state. CPU can specualatively
269 * prefetch the instructions so add NOPs after WFI. Sixteen
270 * NOPs as per Cortex-A9 pipeline.
271 */
272 wfi
273
274 nop
275 nop
276 nop
277 nop
278 nop
279 nop
280 nop
281 nop
282 nop
283 nop
284 nop
285 nop
286 nop
287 nop
288 nop
289 nop
290
291 /* We come here in case of an abort due to a late interrupt */
292 ldr r1, am43xx_virt_mpu_clkstctrl
293 mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO
294 str r2, [r1]
295
296 /* Set MPU_CLKCTRL.MODULEMODE back to ENABLE */
297 ldr r1, am43xx_virt_mpu_clkctrl
298 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
299 str r2, [r1]
300
301re_enable_emif:
302 /* Re-enable EMIF */
303 ldr r1, am43xx_virt_emif_clkctrl
304 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
305 str r2, [r1]
306wait_emif_enable:
307 ldr r3, [r1]
308 cmp r2, r3
309 bne wait_emif_enable
310
311 tst r4, #WFI_FLAG_FLUSH_CACHE
312 beq cache_skip_restore
313
314 /*
315 * Set SCTLR.C bit to allow data cache allocation
316 */
317 mrc p15, 0, r0, c1, c0, 0
318 orr r0, r0, #(1 << 2) @ Enable the C bit
319 mcr p15, 0, r0, c1, c0, 0
320 isb
321
322cache_skip_restore:
323 /* Only necessary if PER is losing context */
324 tst r4, #WFI_FLAG_SELF_REFRESH
325 beq emif_skip_exit_sr_abt
326
327 adr r9, am43xx_emif_sram_table
328 ldr r1, [r9, #EMIF_PM_ABORT_SR_OFFSET]
329 blx r1
330
331emif_skip_exit_sr_abt:
332 /* Let the suspend code know about the abort */
333 mov r0, #1
334 ldmfd sp!, {r4 - r11, pc} @ restore regs and return
335ENDPROC(am43xx_do_wfi)
336
337 .align
338ENTRY(am43xx_resume_offset)
339 .word . - am43xx_do_wfi
340
341ENTRY(am43xx_resume_from_deep_sleep)
342 /* Set MPU CLKSTCTRL to HW AUTO so that CPUidle works properly */
343 ldr r1, am43xx_virt_mpu_clkstctrl
344 mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO
345 str r2, [r1]
346
347 /* For AM43xx, use EMIF power down until context is restored */
348 ldr r2, am43xx_phys_emif_poweroff
349 mov r1, #AM43XX_EMIF_POWEROFF_ENABLE
350 str r1, [r2, #0x0]
351
352 /* Re-enable EMIF */
353 ldr r1, am43xx_phys_emif_clkctrl
354 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
355 str r2, [r1]
356wait_emif_enable1:
357 ldr r3, [r1]
358 cmp r2, r3
359 bne wait_emif_enable1
360
361 adr r9, am43xx_emif_sram_table
362
363 ldr r1, [r9, #EMIF_PM_RESTORE_CONTEXT_OFFSET]
364 blx r1
365
366 ldr r1, [r9, #EMIF_PM_EXIT_SR_OFFSET]
367 blx r1
368
369 ldr r2, am43xx_phys_emif_poweroff
370 mov r1, #AM43XX_EMIF_POWEROFF_DISABLE
371 str r1, [r2, #0x0]
372
373 ldr r1, [r9, #EMIF_PM_RUN_HW_LEVELING]
374 blx r1
375
376#ifdef CONFIG_CACHE_L2X0
377 ldr r2, l2_cache_base
378 ldr r0, [r2, #L2X0_CTRL]
379 and r0, #0x0f
380 cmp r0, #1
381 beq skip_l2en @ Skip if already enabled
382
383 adr r4, am43xx_pm_ro_sram_data
384 ldr r3, [r4, #AMX3_PM_RO_SRAM_DATA_PHYS_OFFSET]
385 ldr r0, [r3, #AMX3_PM_L2_PREFETCH_CTRL_VAL_OFFSET]
386
387 ldr r12, l2_smc1
388 dsb
389 smc #0
390 dsb
391set_aux_ctrl:
392 ldr r0, [r3, #AMX3_PM_L2_AUX_CTRL_VAL_OFFSET]
393 ldr r12, l2_smc2
394 dsb
395 smc #0
396 dsb
397
398 /* L2 invalidate on resume */
399 ldr r0, l2_val
400 ldr r2, l2_cache_base
401 str r0, [r2, #L2X0_INV_WAY]
402wait2:
403 ldr r0, [r2, #L2X0_INV_WAY]
404 ldr r1, l2_val
405 ands r0, r0, r1
406 bne wait2
407#ifdef CONFIG_PL310_ERRATA_727915
408 mov r0, #0x00
409 mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX
410 dsb
411 smc #0
412 dsb
413#endif
414l2x_sync2:
415 ldr r2, l2_cache_base
416 mov r0, #0x0
417 str r0, [r2, #L2X0_CACHE_SYNC]
418sync2:
419 ldr r0, [r2, #L2X0_CACHE_SYNC]
420 ands r0, r0, #0x1
421 bne sync2
422
423 mov r0, #0x1
424 ldr r12, l2_smc3
425 dsb
426 smc #0
427 dsb
428#endif
429skip_l2en:
430 /* We are back. Branch to the common CPU resume routine */
431 mov r0, #0
432 ldr pc, resume_addr
433ENDPROC(am43xx_resume_from_deep_sleep)
434
435/*
436 * Local variables
437 */
438 .align
439kernel_flush:
440 .word v7_flush_dcache_all
441ddr_start:
442 .word PAGE_OFFSET
443
444am43xx_phys_emif_poweroff:
445 .word (AM43XX_CM_BASE + AM43XX_PRM_DEVICE_INST + \
446 AM43XX_PRM_EMIF_CTRL_OFFSET)
447am43xx_virt_mpu_clkstctrl:
448 .word (AM43XX_CM_MPU_CLKSTCTRL)
449am43xx_virt_mpu_clkctrl:
450 .word (AM43XX_CM_MPU_MPU_CLKCTRL)
451am43xx_virt_emif_clkctrl:
452 .word (AM43XX_CM_PER_EMIF_CLKCTRL)
453am43xx_phys_emif_clkctrl:
454 .word (AM43XX_CM_BASE + AM43XX_CM_PER_INST + \
455 AM43XX_CM_PER_EMIF_CLKCTRL_OFFSET)
456
457#ifdef CONFIG_CACHE_L2X0
458/* L2 cache related defines for AM437x */
459get_l2cache_base:
460 .word omap4_get_l2cache_base
461l2_cache_base:
462 .word OMAP44XX_L2CACHE_BASE
463l2_smc1:
464 .word OMAP4_MON_L2X0_PREFETCH_INDEX
465l2_smc2:
466 .word OMAP4_MON_L2X0_AUXCTRL_INDEX
467l2_smc3:
468 .word OMAP4_MON_L2X0_CTRL_INDEX
469l2_val:
470 .word 0xffff
471#endif
472
473.align 3
474/* DDR related defines */
475ENTRY(am43xx_emif_sram_table)
476 .space EMIF_PM_FUNCTIONS_SIZE
477
478ENTRY(am43xx_pm_sram)
479 .word am43xx_do_wfi
480 .word am43xx_do_wfi_sz
481 .word am43xx_resume_offset
482 .word am43xx_emif_sram_table
483 .word am43xx_pm_ro_sram_data
484
485resume_addr:
486 .word cpu_resume - PAGE_OFFSET + 0x80000000
487.align 3
488
489ENTRY(am43xx_pm_ro_sram_data)
490 .space AMX3_PM_RO_SRAM_DATA_SIZE
491
492ENTRY(am43xx_do_wfi_sz)
493 .word . - am43xx_do_wfi
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * Low level suspend code for AM43XX SoCs
4 *
5 * Copyright (C) 2013-2018 Texas Instruments Incorporated - http://www.ti.com/
6 * Dave Gerlach, Vaibhav Bedia
7 */
8
9#include <generated/ti-pm-asm-offsets.h>
10#include <linux/linkage.h>
11#include <linux/ti-emif-sram.h>
12
13#include <asm/assembler.h>
14#include <asm/hardware/cache-l2x0.h>
15#include <asm/memory.h>
16
17#include "cm33xx.h"
18#include "common.h"
19#include "iomap.h"
20#include "omap-secure.h"
21#include "omap44xx.h"
22#include "prm33xx.h"
23#include "prcm43xx.h"
24
25#define AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED 0x00030000
26#define AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE 0x0003
27#define AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE 0x0002
28
29#define AM43XX_EMIF_POWEROFF_ENABLE 0x1
30#define AM43XX_EMIF_POWEROFF_DISABLE 0x0
31
32#define AM43XX_CM_CLKSTCTRL_CLKTRCTRL_SW_SLEEP 0x1
33#define AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO 0x3
34
35#define AM43XX_CM_BASE 0x44DF0000
36
37#define AM43XX_CM_REGADDR(inst, reg) \
38 AM33XX_L4_WK_IO_ADDRESS(AM43XX_CM_BASE + (inst) + (reg))
39
40#define AM43XX_CM_MPU_CLKSTCTRL AM43XX_CM_REGADDR(AM43XX_CM_MPU_INST, \
41 AM43XX_CM_MPU_MPU_CDOFFS)
42#define AM43XX_CM_MPU_MPU_CLKCTRL AM43XX_CM_REGADDR(AM43XX_CM_MPU_INST, \
43 AM43XX_CM_MPU_MPU_CLKCTRL_OFFSET)
44#define AM43XX_CM_PER_EMIF_CLKCTRL AM43XX_CM_REGADDR(AM43XX_CM_PER_INST, \
45 AM43XX_CM_PER_EMIF_CLKCTRL_OFFSET)
46#define AM43XX_PRM_EMIF_CTRL_OFFSET 0x0030
47
48 .arm
49 .align 3
50
51ENTRY(am43xx_do_wfi)
52 stmfd sp!, {r4 - r11, lr} @ save registers on stack
53
54#ifdef CONFIG_CACHE_L2X0
55 /* Retrieve l2 cache virt address BEFORE we shut off EMIF */
56 ldr r1, get_l2cache_base
57 blx r1
58 mov r8, r0
59#endif
60
61 /*
62 * Flush all data from the L1 and L2 data cache before disabling
63 * SCTLR.C bit.
64 */
65 ldr r1, kernel_flush
66 blx r1
67
68 /*
69 * Clear the SCTLR.C bit to prevent further data cache
70 * allocation. Clearing SCTLR.C would make all the data accesses
71 * strongly ordered and would not hit the cache.
72 */
73 mrc p15, 0, r0, c1, c0, 0
74 bic r0, r0, #(1 << 2) @ Disable the C bit
75 mcr p15, 0, r0, c1, c0, 0
76 isb
77 dsb
78
79 /*
80 * Invalidate L1 and L2 data cache.
81 */
82 ldr r1, kernel_flush
83 blx r1
84
85#ifdef CONFIG_CACHE_L2X0
86 /*
87 * Clean and invalidate the L2 cache.
88 */
89#ifdef CONFIG_PL310_ERRATA_727915
90 mov r0, #0x03
91 mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX
92 dsb
93 smc #0
94 dsb
95#endif
96 mov r0, r8
97 adr r4, am43xx_pm_ro_sram_data
98 ldr r3, [r4, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET]
99
100 mov r2, r0
101 ldr r0, [r2, #L2X0_AUX_CTRL]
102 str r0, [r3, #AMX3_PM_L2_AUX_CTRL_VAL_OFFSET]
103 ldr r0, [r2, #L310_PREFETCH_CTRL]
104 str r0, [r3, #AMX3_PM_L2_PREFETCH_CTRL_VAL_OFFSET]
105
106 ldr r0, l2_val
107 str r0, [r2, #L2X0_CLEAN_INV_WAY]
108wait:
109 ldr r0, [r2, #L2X0_CLEAN_INV_WAY]
110 ldr r1, l2_val
111 ands r0, r0, r1
112 bne wait
113#ifdef CONFIG_PL310_ERRATA_727915
114 mov r0, #0x00
115 mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX
116 dsb
117 smc #0
118 dsb
119#endif
120l2x_sync:
121 mov r0, r8
122 mov r2, r0
123 mov r0, #0x0
124 str r0, [r2, #L2X0_CACHE_SYNC]
125sync:
126 ldr r0, [r2, #L2X0_CACHE_SYNC]
127 ands r0, r0, #0x1
128 bne sync
129#endif
130
131 adr r9, am43xx_emif_sram_table
132
133 ldr r3, [r9, #EMIF_PM_ENTER_SR_OFFSET]
134 blx r3
135
136 ldr r3, [r9, #EMIF_PM_SAVE_CONTEXT_OFFSET]
137 blx r3
138
139 /* Disable EMIF */
140 ldr r1, am43xx_virt_emif_clkctrl
141 ldr r2, [r1]
142 bic r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE
143 str r2, [r1]
144
145wait_emif_disable:
146 ldr r2, [r1]
147 mov r3, #AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED
148 cmp r2, r3
149 bne wait_emif_disable
150
151 /*
152 * For the MPU WFI to be registered as an interrupt
153 * to WKUP_M3, MPU_CLKCTRL.MODULEMODE needs to be set
154 * to DISABLED
155 */
156 ldr r1, am43xx_virt_mpu_clkctrl
157 ldr r2, [r1]
158 bic r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE
159 str r2, [r1]
160
161 /*
162 * Put MPU CLKDM to SW_SLEEP
163 */
164 ldr r1, am43xx_virt_mpu_clkstctrl
165 mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_SW_SLEEP
166 str r2, [r1]
167
168 /*
169 * Execute a barrier instruction to ensure that all cache,
170 * TLB and branch predictor maintenance operations issued
171 * have completed.
172 */
173 dsb
174 dmb
175
176 /*
177 * Execute a WFI instruction and wait until the
178 * STANDBYWFI output is asserted to indicate that the
179 * CPU is in idle and low power state. CPU can specualatively
180 * prefetch the instructions so add NOPs after WFI. Sixteen
181 * NOPs as per Cortex-A9 pipeline.
182 */
183 wfi
184
185 nop
186 nop
187 nop
188 nop
189 nop
190 nop
191 nop
192 nop
193 nop
194 nop
195 nop
196 nop
197 nop
198 nop
199 nop
200 nop
201
202 /* We come here in case of an abort due to a late interrupt */
203 ldr r1, am43xx_virt_mpu_clkstctrl
204 mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO
205 str r2, [r1]
206
207 /* Set MPU_CLKCTRL.MODULEMODE back to ENABLE */
208 ldr r1, am43xx_virt_mpu_clkctrl
209 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
210 str r2, [r1]
211
212 /* Re-enable EMIF */
213 ldr r1, am43xx_virt_emif_clkctrl
214 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
215 str r2, [r1]
216wait_emif_enable:
217 ldr r3, [r1]
218 cmp r2, r3
219 bne wait_emif_enable
220
221 /*
222 * Set SCTLR.C bit to allow data cache allocation
223 */
224 mrc p15, 0, r0, c1, c0, 0
225 orr r0, r0, #(1 << 2) @ Enable the C bit
226 mcr p15, 0, r0, c1, c0, 0
227 isb
228
229 ldr r1, [r9, #EMIF_PM_ABORT_SR_OFFSET]
230 blx r1
231
232 /* Let the suspend code know about the abort */
233 mov r0, #1
234 ldmfd sp!, {r4 - r11, pc} @ restore regs and return
235ENDPROC(am43xx_do_wfi)
236
237 .align
238ENTRY(am43xx_resume_offset)
239 .word . - am43xx_do_wfi
240
241ENTRY(am43xx_resume_from_deep_sleep)
242 /* Set MPU CLKSTCTRL to HW AUTO so that CPUidle works properly */
243 ldr r1, am43xx_virt_mpu_clkstctrl
244 mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO
245 str r2, [r1]
246
247 /* For AM43xx, use EMIF power down until context is restored */
248 ldr r2, am43xx_phys_emif_poweroff
249 mov r1, #AM43XX_EMIF_POWEROFF_ENABLE
250 str r1, [r2, #0x0]
251
252 /* Re-enable EMIF */
253 ldr r1, am43xx_phys_emif_clkctrl
254 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
255 str r2, [r1]
256wait_emif_enable1:
257 ldr r3, [r1]
258 cmp r2, r3
259 bne wait_emif_enable1
260
261 adr r9, am43xx_emif_sram_table
262
263 ldr r1, [r9, #EMIF_PM_RESTORE_CONTEXT_OFFSET]
264 blx r1
265
266 ldr r1, [r9, #EMIF_PM_EXIT_SR_OFFSET]
267 blx r1
268
269 ldr r2, am43xx_phys_emif_poweroff
270 mov r1, #AM43XX_EMIF_POWEROFF_DISABLE
271 str r1, [r2, #0x0]
272
273#ifdef CONFIG_CACHE_L2X0
274 ldr r2, l2_cache_base
275 ldr r0, [r2, #L2X0_CTRL]
276 and r0, #0x0f
277 cmp r0, #1
278 beq skip_l2en @ Skip if already enabled
279
280 adr r4, am43xx_pm_ro_sram_data
281 ldr r3, [r4, #AMX3_PM_RO_SRAM_DATA_PHYS_OFFSET]
282 ldr r0, [r3, #AMX3_PM_L2_PREFETCH_CTRL_VAL_OFFSET]
283
284 ldr r12, l2_smc1
285 dsb
286 smc #0
287 dsb
288set_aux_ctrl:
289 ldr r0, [r3, #AMX3_PM_L2_AUX_CTRL_VAL_OFFSET]
290 ldr r12, l2_smc2
291 dsb
292 smc #0
293 dsb
294
295 /* L2 invalidate on resume */
296 ldr r0, l2_val
297 ldr r2, l2_cache_base
298 str r0, [r2, #L2X0_INV_WAY]
299wait2:
300 ldr r0, [r2, #L2X0_INV_WAY]
301 ldr r1, l2_val
302 ands r0, r0, r1
303 bne wait2
304#ifdef CONFIG_PL310_ERRATA_727915
305 mov r0, #0x00
306 mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX
307 dsb
308 smc #0
309 dsb
310#endif
311l2x_sync2:
312 ldr r2, l2_cache_base
313 mov r0, #0x0
314 str r0, [r2, #L2X0_CACHE_SYNC]
315sync2:
316 ldr r0, [r2, #L2X0_CACHE_SYNC]
317 ands r0, r0, #0x1
318 bne sync2
319
320 mov r0, #0x1
321 ldr r12, l2_smc3
322 dsb
323 smc #0
324 dsb
325#endif
326skip_l2en:
327 /* We are back. Branch to the common CPU resume routine */
328 mov r0, #0
329 ldr pc, resume_addr
330ENDPROC(am43xx_resume_from_deep_sleep)
331
332/*
333 * Local variables
334 */
335 .align
336resume_addr:
337 .word cpu_resume - PAGE_OFFSET + 0x80000000
338kernel_flush:
339 .word v7_flush_dcache_all
340ddr_start:
341 .word PAGE_OFFSET
342
343am43xx_phys_emif_poweroff:
344 .word (AM43XX_CM_BASE + AM43XX_PRM_DEVICE_INST + \
345 AM43XX_PRM_EMIF_CTRL_OFFSET)
346am43xx_virt_mpu_clkstctrl:
347 .word (AM43XX_CM_MPU_CLKSTCTRL)
348am43xx_virt_mpu_clkctrl:
349 .word (AM43XX_CM_MPU_MPU_CLKCTRL)
350am43xx_virt_emif_clkctrl:
351 .word (AM43XX_CM_PER_EMIF_CLKCTRL)
352am43xx_phys_emif_clkctrl:
353 .word (AM43XX_CM_BASE + AM43XX_CM_PER_INST + \
354 AM43XX_CM_PER_EMIF_CLKCTRL_OFFSET)
355
356#ifdef CONFIG_CACHE_L2X0
357/* L2 cache related defines for AM437x */
358get_l2cache_base:
359 .word omap4_get_l2cache_base
360l2_cache_base:
361 .word OMAP44XX_L2CACHE_BASE
362l2_smc1:
363 .word OMAP4_MON_L2X0_PREFETCH_INDEX
364l2_smc2:
365 .word OMAP4_MON_L2X0_AUXCTRL_INDEX
366l2_smc3:
367 .word OMAP4_MON_L2X0_CTRL_INDEX
368l2_val:
369 .word 0xffff
370#endif
371
372.align 3
373/* DDR related defines */
374ENTRY(am43xx_emif_sram_table)
375 .space EMIF_PM_FUNCTIONS_SIZE
376
377ENTRY(am43xx_pm_sram)
378 .word am43xx_do_wfi
379 .word am43xx_do_wfi_sz
380 .word am43xx_resume_offset
381 .word am43xx_emif_sram_table
382 .word am43xx_pm_ro_sram_data
383
384.align 3
385
386ENTRY(am43xx_pm_ro_sram_data)
387 .space AMX3_PM_RO_SRAM_DATA_SIZE
388
389ENTRY(am43xx_do_wfi_sz)
390 .word . - am43xx_do_wfi