Linux Audio

Check our new training course

Loading...
v3.15
  1/*
 
  2 * arch/sh/kernel/cpu/sh4a/sleep-sh_mobile.S
  3 *
  4 * Sleep mode and Standby modes support for SuperH Mobile
  5 *
  6 *  Copyright (C) 2009 Magnus Damm
  7 *
  8 * This file is subject to the terms and conditions of the GNU General Public
  9 * License.  See the file "COPYING" in the main directory of this archive
 10 * for more details.
 11 */
 12
 13#include <linux/sys.h>
 14#include <linux/errno.h>
 15#include <linux/linkage.h>
 16#include <asm/asm-offsets.h>
 17#include <asm/suspend.h>
 18
 19/*
 20 * Kernel mode register usage, see entry.S:
 21 *	k0	scratch
 22 *	k1	scratch
 23 */
 24#define k0	r0
 25#define k1	r1
 26
 27/* manage self-refresh and enter standby mode. must be self-contained.
 28 * this code will be copied to on-chip memory and executed from there.
 29 */
 30	.balign 4
 31ENTRY(sh_mobile_sleep_enter_start)
 32
 33	/* save mode flags */
 34	mov.l	r4, @(SH_SLEEP_MODE, r5)
 35
 36	/* save original vbr */
 37	stc	vbr, r0
 38	mov.l	r0, @(SH_SLEEP_VBR, r5)
 39
 40	/* point vbr to our on-chip memory page */
 41	ldc	r5, vbr
 42
 43	/* save return address */
 44	sts	pr, r0
 45	mov.l	r0, @(SH_SLEEP_SPC, r5)
 46
 47	/* save sr */
 48	stc	sr, r0
 49	mov.l	r0, @(SH_SLEEP_SR, r5)
 50
 51	/* save general purpose registers to stack if needed */
 52	mov.l	@(SH_SLEEP_MODE, r5), r0
 53	tst	#SUSP_SH_REGS, r0
 54	bt	skip_regs_save
 55
 56	sts.l	pr, @-r15
 57	mov.l	r14, @-r15
 58	mov.l	r13, @-r15
 59	mov.l	r12, @-r15
 60	mov.l	r11, @-r15
 61	mov.l	r10, @-r15
 62	mov.l	r9, @-r15
 63	mov.l	r8, @-r15
 64
 65	/* make sure bank0 is selected, save low registers */
 66	mov.l	rb_bit, r9
 67	not	r9, r9
 68	bsr	set_sr
 69	 mov	#0, r10
 70
 71	bsr	save_low_regs
 72	 nop
 73
 74	/* switch to bank 1, save low registers */
 75	mov.l	rb_bit, r10
 76	bsr	set_sr
 77	 mov	#-1, r9
 78
 79	bsr	save_low_regs
 80	 nop
 81
 82	/* switch back to bank 0 */
 83	mov.l	rb_bit, r9
 84	not	r9, r9
 85	bsr	set_sr
 86	 mov	#0, r10
 87
 88skip_regs_save:
 89
 90	/* save sp, also set to internal ram */
 91	mov.l	r15, @(SH_SLEEP_SP, r5)
 92	mov	r5, r15
 93
 94	/* save stbcr */
 95	bsr     save_register
 96	 mov    #SH_SLEEP_REG_STBCR, r0
 97
 98	/* save mmu and cache context if needed */
 99	mov.l	@(SH_SLEEP_MODE, r5), r0
100	tst	#SUSP_SH_MMU, r0
101	bt	skip_mmu_save_disable
102
103	/* save mmu state */
104	bsr	save_register
105	 mov	#SH_SLEEP_REG_PTEH, r0
106
107	bsr	save_register
108	 mov	#SH_SLEEP_REG_PTEL, r0
109
110	bsr	save_register
111	 mov	#SH_SLEEP_REG_TTB, r0
112
113	bsr	save_register
114	 mov	#SH_SLEEP_REG_TEA, r0
115
116	bsr	save_register
117	 mov	#SH_SLEEP_REG_MMUCR, r0
118
119	bsr	save_register
120	 mov	#SH_SLEEP_REG_PTEA, r0
121
122	bsr	save_register
123	 mov	#SH_SLEEP_REG_PASCR, r0
124
125	bsr	save_register
126	 mov	#SH_SLEEP_REG_IRMCR, r0
127
128	/* invalidate TLBs and disable the MMU */
129	bsr	get_register
130	 mov	#SH_SLEEP_REG_MMUCR, r0
131	mov	#4, r1
132	mov.l	r1, @r0
133	icbi	@r0
134
135	/* save cache registers and disable caches */
136	bsr	save_register
137	 mov	#SH_SLEEP_REG_CCR, r0
138
139	bsr	save_register
140	 mov	#SH_SLEEP_REG_RAMCR, r0
141
142	bsr	get_register
143	 mov	#SH_SLEEP_REG_CCR, r0
144	mov	#0, r1
145	mov.l	r1, @r0
146	icbi	@r0
147
148skip_mmu_save_disable:
149	/* call self-refresh entering code if needed */
150	mov.l	@(SH_SLEEP_MODE, r5), r0
151	tst	#SUSP_SH_SF, r0
152	bt	skip_set_sf
153
154	mov.l	@(SH_SLEEP_SF_PRE, r5), r0
155	jsr	@r0
156	 nop
157
158skip_set_sf:
159	mov.l	@(SH_SLEEP_MODE, r5), r0
160	tst	#SUSP_SH_STANDBY, r0
161	bt	test_rstandby
162
163	/* set mode to "software standby mode" */
164	bra	do_sleep
165	 mov	#0x80, r1
166
167test_rstandby:
168	tst	#SUSP_SH_RSTANDBY, r0
169	bt	test_ustandby
170
171	/* setup BAR register */
172	bsr	get_register
173	 mov	#SH_SLEEP_REG_BAR, r0
174	mov.l	@(SH_SLEEP_RESUME, r5), r1
175	mov.l	r1, @r0
176
177	/* set mode to "r-standby mode" */
178	bra	do_sleep
179	 mov	#0x20, r1
180
181test_ustandby:
182	tst	#SUSP_SH_USTANDBY, r0
183	bt	force_sleep
184
185	/* set mode to "u-standby mode" */
186	bra	do_sleep
187	 mov	#0x10, r1
188
189force_sleep:
190
191	/* set mode to "sleep mode" */
192	mov	#0x00, r1
193
194do_sleep:
195	/* setup and enter selected standby mode */
196	bsr     get_register
197	 mov    #SH_SLEEP_REG_STBCR, r0
198	mov.l	r1, @r0
199again:
200	sleep
201	bra	again
202	 nop
203
204save_register:
205	add	#SH_SLEEP_BASE_ADDR, r0
206	mov.l	@(r0, r5), r1
207	add	#-SH_SLEEP_BASE_ADDR, r0
208	mov.l	@r1, r1
209	add	#SH_SLEEP_BASE_DATA, r0
210	mov.l	r1, @(r0, r5)
211	add	#-SH_SLEEP_BASE_DATA, r0
212	rts
213	 nop
214
215get_register:
216	add	#SH_SLEEP_BASE_ADDR, r0
217	mov.l	@(r0, r5), r0
218	rts
219	 nop
220
221set_sr:
222	stc	sr, r8
223	and	r9, r8
224	or	r10, r8
225	ldc	r8, sr
226	rts
227	 nop
228
229save_low_regs:
230	mov.l	r7, @-r15
231	mov.l	r6, @-r15
232	mov.l	r5, @-r15
233	mov.l	r4, @-r15
234	mov.l	r3, @-r15
235	mov.l	r2, @-r15
236	mov.l	r1, @-r15
237	rts
238	 mov.l	r0, @-r15
239
240	.balign 4
241rb_bit:	.long	0x20000000 ! RB=1
242
243ENTRY(sh_mobile_sleep_enter_end)
244
245	.balign 4
246ENTRY(sh_mobile_sleep_resume_start)
247
248	/* figure out start address */
249	bsr	0f
250	 nop
2510:
252	sts	pr, k1
253	mov.l	1f, k0
254	and	k0, k1
255
256	/* store pointer to data area in VBR */
257	ldc	k1, vbr
258
259	/* setup sr with saved sr */
260	mov.l	@(SH_SLEEP_SR, k1), k0
261	ldc	k0, sr
262
263	/* now: user register set! */
264	stc	vbr, r5
265
266	/* setup spc with return address to c code */
267	mov.l	@(SH_SLEEP_SPC, r5), r0
268	ldc	r0, spc
269
270	/* restore vbr */
271	mov.l	@(SH_SLEEP_VBR, r5), r0
272	ldc	r0, vbr
273
274	/* setup ssr with saved sr */
275	mov.l	@(SH_SLEEP_SR, r5), r0
276	ldc	r0, ssr
277
278	/* restore sp */
279	mov.l   @(SH_SLEEP_SP, r5), r15
280
281	/* restore sleep mode register */
282	bsr     restore_register
283	 mov    #SH_SLEEP_REG_STBCR, r0
284
285	/* call self-refresh resume code if needed */
286	mov.l	@(SH_SLEEP_MODE, r5), r0
287	tst	#SUSP_SH_SF, r0
288	bt	skip_restore_sf
289
290	mov.l	@(SH_SLEEP_SF_POST, r5), r0
291	jsr	@r0
292	 nop
293
294skip_restore_sf:
295	/* restore mmu and cache state if needed */
296	mov.l	@(SH_SLEEP_MODE, r5), r0
297	tst	#SUSP_SH_MMU, r0
298	bt	skip_restore_mmu
299
300	/* restore mmu state */
301	bsr	restore_register
302	 mov	#SH_SLEEP_REG_PTEH, r0
303
304	bsr	restore_register
305	 mov	#SH_SLEEP_REG_PTEL, r0
306
307	bsr	restore_register
308	 mov	#SH_SLEEP_REG_TTB, r0
309
310	bsr	restore_register
311	 mov	#SH_SLEEP_REG_TEA, r0
312
313	bsr	restore_register
314	 mov	#SH_SLEEP_REG_PTEA, r0
315
316	bsr	restore_register
317	 mov	#SH_SLEEP_REG_PASCR, r0
318
319	bsr	restore_register
320	 mov	#SH_SLEEP_REG_IRMCR, r0
321
322	bsr	restore_register
323	 mov	#SH_SLEEP_REG_MMUCR, r0
324	icbi	@r0
325
326	/* restore cache settings */
327	bsr	restore_register
328	 mov	#SH_SLEEP_REG_RAMCR, r0
329	icbi	@r0
330
331	bsr	restore_register
332	 mov	#SH_SLEEP_REG_CCR, r0
333	icbi	@r0
334
335skip_restore_mmu:
336
337	/* restore general purpose registers if needed */
338	mov.l	@(SH_SLEEP_MODE, r5), r0
339	tst	#SUSP_SH_REGS, r0
340	bt	skip_restore_regs
341
342	/* switch to bank 1, restore low registers */
343	mov.l	_rb_bit, r10
344	bsr	_set_sr
345	 mov	#-1, r9
346
347	bsr	restore_low_regs
348	 nop
349
350	/* switch to bank0, restore low registers */
351	mov.l	_rb_bit, r9
352	not	r9, r9
353	bsr	_set_sr
354	 mov	#0, r10
355
356	bsr	restore_low_regs
357	 nop
358
359	/* restore the rest of the registers */
360	mov.l	@r15+, r8
361	mov.l	@r15+, r9
362	mov.l	@r15+, r10
363	mov.l	@r15+, r11
364	mov.l	@r15+, r12
365	mov.l	@r15+, r13
366	mov.l	@r15+, r14
367	lds.l	@r15+, pr
368
369skip_restore_regs:
370	rte
371	 nop
372
373restore_register:
374	add	#SH_SLEEP_BASE_DATA, r0
375	mov.l	@(r0, r5), r1
376	add	#-SH_SLEEP_BASE_DATA, r0
377	add	#SH_SLEEP_BASE_ADDR, r0
378	mov.l	@(r0, r5), r0
379	mov.l	r1, @r0
380	rts
381	 nop
382
383_set_sr:
384	stc	sr, r8
385	and	r9, r8
386	or	r10, r8
387	ldc	r8, sr
388	rts
389	 nop
390
391restore_low_regs:
392	mov.l	@r15+, r0
393	mov.l	@r15+, r1
394	mov.l	@r15+, r2
395	mov.l	@r15+, r3
396	mov.l	@r15+, r4
397	mov.l	@r15+, r5
398	mov.l	@r15+, r6
399	rts
400	 mov.l	@r15+, r7
401
402	.balign 4
403_rb_bit:	.long	0x20000000 ! RB=1
4041:	.long	~0x7ff
405ENTRY(sh_mobile_sleep_resume_end)
v5.4
  1/* SPDX-License-Identifier: GPL-2.0
  2 *
  3 * arch/sh/kernel/cpu/sh4a/sleep-sh_mobile.S
  4 *
  5 * Sleep mode and Standby modes support for SuperH Mobile
  6 *
  7 *  Copyright (C) 2009 Magnus Damm
 
 
 
 
  8 */
  9
 10#include <linux/sys.h>
 11#include <linux/errno.h>
 12#include <linux/linkage.h>
 13#include <asm/asm-offsets.h>
 14#include <asm/suspend.h>
 15
 16/*
 17 * Kernel mode register usage, see entry.S:
 18 *	k0	scratch
 19 *	k1	scratch
 20 */
 21#define k0	r0
 22#define k1	r1
 23
 24/* manage self-refresh and enter standby mode. must be self-contained.
 25 * this code will be copied to on-chip memory and executed from there.
 26 */
 27	.balign 4
 28ENTRY(sh_mobile_sleep_enter_start)
 29
 30	/* save mode flags */
 31	mov.l	r4, @(SH_SLEEP_MODE, r5)
 32
 33	/* save original vbr */
 34	stc	vbr, r0
 35	mov.l	r0, @(SH_SLEEP_VBR, r5)
 36
 37	/* point vbr to our on-chip memory page */
 38	ldc	r5, vbr
 39
 40	/* save return address */
 41	sts	pr, r0
 42	mov.l	r0, @(SH_SLEEP_SPC, r5)
 43
 44	/* save sr */
 45	stc	sr, r0
 46	mov.l	r0, @(SH_SLEEP_SR, r5)
 47
 48	/* save general purpose registers to stack if needed */
 49	mov.l	@(SH_SLEEP_MODE, r5), r0
 50	tst	#SUSP_SH_REGS, r0
 51	bt	skip_regs_save
 52
 53	sts.l	pr, @-r15
 54	mov.l	r14, @-r15
 55	mov.l	r13, @-r15
 56	mov.l	r12, @-r15
 57	mov.l	r11, @-r15
 58	mov.l	r10, @-r15
 59	mov.l	r9, @-r15
 60	mov.l	r8, @-r15
 61
 62	/* make sure bank0 is selected, save low registers */
 63	mov.l	rb_bit, r9
 64	not	r9, r9
 65	bsr	set_sr
 66	 mov	#0, r10
 67
 68	bsr	save_low_regs
 69	 nop
 70
 71	/* switch to bank 1, save low registers */
 72	mov.l	rb_bit, r10
 73	bsr	set_sr
 74	 mov	#-1, r9
 75
 76	bsr	save_low_regs
 77	 nop
 78
 79	/* switch back to bank 0 */
 80	mov.l	rb_bit, r9
 81	not	r9, r9
 82	bsr	set_sr
 83	 mov	#0, r10
 84
 85skip_regs_save:
 86
 87	/* save sp, also set to internal ram */
 88	mov.l	r15, @(SH_SLEEP_SP, r5)
 89	mov	r5, r15
 90
 91	/* save stbcr */
 92	bsr     save_register
 93	 mov    #SH_SLEEP_REG_STBCR, r0
 94
 95	/* save mmu and cache context if needed */
 96	mov.l	@(SH_SLEEP_MODE, r5), r0
 97	tst	#SUSP_SH_MMU, r0
 98	bt	skip_mmu_save_disable
 99
100	/* save mmu state */
101	bsr	save_register
102	 mov	#SH_SLEEP_REG_PTEH, r0
103
104	bsr	save_register
105	 mov	#SH_SLEEP_REG_PTEL, r0
106
107	bsr	save_register
108	 mov	#SH_SLEEP_REG_TTB, r0
109
110	bsr	save_register
111	 mov	#SH_SLEEP_REG_TEA, r0
112
113	bsr	save_register
114	 mov	#SH_SLEEP_REG_MMUCR, r0
115
116	bsr	save_register
117	 mov	#SH_SLEEP_REG_PTEA, r0
118
119	bsr	save_register
120	 mov	#SH_SLEEP_REG_PASCR, r0
121
122	bsr	save_register
123	 mov	#SH_SLEEP_REG_IRMCR, r0
124
125	/* invalidate TLBs and disable the MMU */
126	bsr	get_register
127	 mov	#SH_SLEEP_REG_MMUCR, r0
128	mov	#4, r1
129	mov.l	r1, @r0
130	icbi	@r0
131
132	/* save cache registers and disable caches */
133	bsr	save_register
134	 mov	#SH_SLEEP_REG_CCR, r0
135
136	bsr	save_register
137	 mov	#SH_SLEEP_REG_RAMCR, r0
138
139	bsr	get_register
140	 mov	#SH_SLEEP_REG_CCR, r0
141	mov	#0, r1
142	mov.l	r1, @r0
143	icbi	@r0
144
145skip_mmu_save_disable:
146	/* call self-refresh entering code if needed */
147	mov.l	@(SH_SLEEP_MODE, r5), r0
148	tst	#SUSP_SH_SF, r0
149	bt	skip_set_sf
150
151	mov.l	@(SH_SLEEP_SF_PRE, r5), r0
152	jsr	@r0
153	 nop
154
155skip_set_sf:
156	mov.l	@(SH_SLEEP_MODE, r5), r0
157	tst	#SUSP_SH_STANDBY, r0
158	bt	test_rstandby
159
160	/* set mode to "software standby mode" */
161	bra	do_sleep
162	 mov	#0x80, r1
163
164test_rstandby:
165	tst	#SUSP_SH_RSTANDBY, r0
166	bt	test_ustandby
167
168	/* setup BAR register */
169	bsr	get_register
170	 mov	#SH_SLEEP_REG_BAR, r0
171	mov.l	@(SH_SLEEP_RESUME, r5), r1
172	mov.l	r1, @r0
173
174	/* set mode to "r-standby mode" */
175	bra	do_sleep
176	 mov	#0x20, r1
177
178test_ustandby:
179	tst	#SUSP_SH_USTANDBY, r0
180	bt	force_sleep
181
182	/* set mode to "u-standby mode" */
183	bra	do_sleep
184	 mov	#0x10, r1
185
186force_sleep:
187
188	/* set mode to "sleep mode" */
189	mov	#0x00, r1
190
191do_sleep:
192	/* setup and enter selected standby mode */
193	bsr     get_register
194	 mov    #SH_SLEEP_REG_STBCR, r0
195	mov.l	r1, @r0
196again:
197	sleep
198	bra	again
199	 nop
200
201save_register:
202	add	#SH_SLEEP_BASE_ADDR, r0
203	mov.l	@(r0, r5), r1
204	add	#-SH_SLEEP_BASE_ADDR, r0
205	mov.l	@r1, r1
206	add	#SH_SLEEP_BASE_DATA, r0
207	mov.l	r1, @(r0, r5)
208	add	#-SH_SLEEP_BASE_DATA, r0
209	rts
210	 nop
211
212get_register:
213	add	#SH_SLEEP_BASE_ADDR, r0
214	mov.l	@(r0, r5), r0
215	rts
216	 nop
217
218set_sr:
219	stc	sr, r8
220	and	r9, r8
221	or	r10, r8
222	ldc	r8, sr
223	rts
224	 nop
225
226save_low_regs:
227	mov.l	r7, @-r15
228	mov.l	r6, @-r15
229	mov.l	r5, @-r15
230	mov.l	r4, @-r15
231	mov.l	r3, @-r15
232	mov.l	r2, @-r15
233	mov.l	r1, @-r15
234	rts
235	 mov.l	r0, @-r15
236
237	.balign 4
238rb_bit:	.long	0x20000000 ! RB=1
239
240ENTRY(sh_mobile_sleep_enter_end)
241
242	.balign 4
243ENTRY(sh_mobile_sleep_resume_start)
244
245	/* figure out start address */
246	bsr	0f
247	 nop
2480:
249	sts	pr, k1
250	mov.l	1f, k0
251	and	k0, k1
252
253	/* store pointer to data area in VBR */
254	ldc	k1, vbr
255
256	/* setup sr with saved sr */
257	mov.l	@(SH_SLEEP_SR, k1), k0
258	ldc	k0, sr
259
260	/* now: user register set! */
261	stc	vbr, r5
262
263	/* setup spc with return address to c code */
264	mov.l	@(SH_SLEEP_SPC, r5), r0
265	ldc	r0, spc
266
267	/* restore vbr */
268	mov.l	@(SH_SLEEP_VBR, r5), r0
269	ldc	r0, vbr
270
271	/* setup ssr with saved sr */
272	mov.l	@(SH_SLEEP_SR, r5), r0
273	ldc	r0, ssr
274
275	/* restore sp */
276	mov.l   @(SH_SLEEP_SP, r5), r15
277
278	/* restore sleep mode register */
279	bsr     restore_register
280	 mov    #SH_SLEEP_REG_STBCR, r0
281
282	/* call self-refresh resume code if needed */
283	mov.l	@(SH_SLEEP_MODE, r5), r0
284	tst	#SUSP_SH_SF, r0
285	bt	skip_restore_sf
286
287	mov.l	@(SH_SLEEP_SF_POST, r5), r0
288	jsr	@r0
289	 nop
290
291skip_restore_sf:
292	/* restore mmu and cache state if needed */
293	mov.l	@(SH_SLEEP_MODE, r5), r0
294	tst	#SUSP_SH_MMU, r0
295	bt	skip_restore_mmu
296
297	/* restore mmu state */
298	bsr	restore_register
299	 mov	#SH_SLEEP_REG_PTEH, r0
300
301	bsr	restore_register
302	 mov	#SH_SLEEP_REG_PTEL, r0
303
304	bsr	restore_register
305	 mov	#SH_SLEEP_REG_TTB, r0
306
307	bsr	restore_register
308	 mov	#SH_SLEEP_REG_TEA, r0
309
310	bsr	restore_register
311	 mov	#SH_SLEEP_REG_PTEA, r0
312
313	bsr	restore_register
314	 mov	#SH_SLEEP_REG_PASCR, r0
315
316	bsr	restore_register
317	 mov	#SH_SLEEP_REG_IRMCR, r0
318
319	bsr	restore_register
320	 mov	#SH_SLEEP_REG_MMUCR, r0
321	icbi	@r0
322
323	/* restore cache settings */
324	bsr	restore_register
325	 mov	#SH_SLEEP_REG_RAMCR, r0
326	icbi	@r0
327
328	bsr	restore_register
329	 mov	#SH_SLEEP_REG_CCR, r0
330	icbi	@r0
331
332skip_restore_mmu:
333
334	/* restore general purpose registers if needed */
335	mov.l	@(SH_SLEEP_MODE, r5), r0
336	tst	#SUSP_SH_REGS, r0
337	bt	skip_restore_regs
338
339	/* switch to bank 1, restore low registers */
340	mov.l	_rb_bit, r10
341	bsr	_set_sr
342	 mov	#-1, r9
343
344	bsr	restore_low_regs
345	 nop
346
347	/* switch to bank0, restore low registers */
348	mov.l	_rb_bit, r9
349	not	r9, r9
350	bsr	_set_sr
351	 mov	#0, r10
352
353	bsr	restore_low_regs
354	 nop
355
356	/* restore the rest of the registers */
357	mov.l	@r15+, r8
358	mov.l	@r15+, r9
359	mov.l	@r15+, r10
360	mov.l	@r15+, r11
361	mov.l	@r15+, r12
362	mov.l	@r15+, r13
363	mov.l	@r15+, r14
364	lds.l	@r15+, pr
365
366skip_restore_regs:
367	rte
368	 nop
369
370restore_register:
371	add	#SH_SLEEP_BASE_DATA, r0
372	mov.l	@(r0, r5), r1
373	add	#-SH_SLEEP_BASE_DATA, r0
374	add	#SH_SLEEP_BASE_ADDR, r0
375	mov.l	@(r0, r5), r0
376	mov.l	r1, @r0
377	rts
378	 nop
379
380_set_sr:
381	stc	sr, r8
382	and	r9, r8
383	or	r10, r8
384	ldc	r8, sr
385	rts
386	 nop
387
388restore_low_regs:
389	mov.l	@r15+, r0
390	mov.l	@r15+, r1
391	mov.l	@r15+, r2
392	mov.l	@r15+, r3
393	mov.l	@r15+, r4
394	mov.l	@r15+, r5
395	mov.l	@r15+, r6
396	rts
397	 mov.l	@r15+, r7
398
399	.balign 4
400_rb_bit:	.long	0x20000000 ! RB=1
4011:	.long	~0x7ff
402ENTRY(sh_mobile_sleep_resume_end)