Loading...
1/* -*- mode: asm -*-
2 *
3 * linux/arch/h8300/platform/h8300h/entry.S
4 *
5 * Yoshinori Sato <ysato@users.sourceforge.jp>
6 * David McCullough <davidm@snapgear.com>
7 *
8 */
9
10/*
11 * entry.S
12 * include exception/interrupt gateway
13 * system call entry
14 */
15
16#include <linux/sys.h>
17#include <asm/unistd.h>
18#include <asm/setup.h>
19#include <asm/segment.h>
20#include <asm/linkage.h>
21#include <asm/asm-offsets.h>
22#include <asm/thread_info.h>
23#include <asm/errno.h>
24
25#if defined(CONFIG_CPU_H8300H)
26#define USERRET 8
27INTERRUPTS = 64
28 .h8300h
29 .macro SHLL2 reg
30 shll.l \reg
31 shll.l \reg
32 .endm
33 .macro SHLR2 reg
34 shlr.l \reg
35 shlr.l \reg
36 .endm
37 .macro SAVEREGS
38 mov.l er0,@-sp
39 mov.l er1,@-sp
40 mov.l er2,@-sp
41 mov.l er3,@-sp
42 .endm
43 .macro RESTOREREGS
44 mov.l @sp+,er3
45 mov.l @sp+,er2
46 .endm
47 .macro SAVEEXR
48 .endm
49 .macro RESTOREEXR
50 .endm
51#endif
52#if defined(CONFIG_CPU_H8S)
53#define USERRET 10
54#define USEREXR 8
55INTERRUPTS = 128
56 .h8300s
57 .macro SHLL2 reg
58 shll.l #2,\reg
59 .endm
60 .macro SHLR2 reg
61 shlr.l #2,\reg
62 .endm
63 .macro SAVEREGS
64 stm.l er0-er3,@-sp
65 .endm
66 .macro RESTOREREGS
67 ldm.l @sp+,er2-er3
68 .endm
69 .macro SAVEEXR
70 mov.w @(USEREXR:16,er0),r1
71 mov.w r1,@(LEXR-LER3:16,sp) /* copy EXR */
72 .endm
73 .macro RESTOREEXR
74 mov.w @(LEXR-LER1:16,sp),r1 /* restore EXR */
75 mov.b r1l,r1h
76 mov.w r1,@(USEREXR:16,er0)
77 .endm
78#endif
79
80
81/* CPU context save/restore macros. */
82
83 .macro SAVE_ALL
84 mov.l er0,@-sp
85 stc ccr,r0l /* check kernel mode */
86 btst #4,r0l
87 bne 5f
88
89 /* user mode */
90 mov.l sp,@SYMBOL_NAME(sw_usp)
91 mov.l @sp,er0 /* restore saved er0 */
92 orc #0x10,ccr /* switch kernel stack */
93 mov.l @SYMBOL_NAME(sw_ksp),sp
94 sub.l #(LRET-LORIG),sp /* allocate LORIG - LRET */
95 SAVEREGS
96 mov.l @SYMBOL_NAME(sw_usp),er0
97 mov.l @(USERRET:16,er0),er1 /* copy the RET addr */
98 mov.l er1,@(LRET-LER3:16,sp)
99 SAVEEXR
100
101 mov.l @(LORIG-LER3:16,sp),er0
102 mov.l er0,@(LER0-LER3:16,sp) /* copy ER0 */
103 mov.w e1,r1 /* e1 highbyte = ccr */
104 and #0xef,r1h /* mask mode? flag */
105 bra 6f
1065:
107 /* kernel mode */
108 mov.l @sp,er0 /* restore saved er0 */
109 subs #2,sp /* set dummy ccr */
110 SAVEREGS
111 mov.w @(LRET-LER3:16,sp),r1 /* copy old ccr */
1126:
113 mov.b r1h,r1l
114 mov.b #0,r1h
115 mov.w r1,@(LCCR-LER3:16,sp) /* set ccr */
116 mov.l er6,@-sp /* syscall arg #6 */
117 mov.l er5,@-sp /* syscall arg #5 */
118 mov.l er4,@-sp /* syscall arg #4 */
119 .endm /* r1 = ccr */
120
121 .macro RESTORE_ALL
122 mov.l @sp+,er4
123 mov.l @sp+,er5
124 mov.l @sp+,er6
125 RESTOREREGS
126 mov.w @(LCCR-LER1:16,sp),r0 /* check kernel mode */
127 btst #4,r0l
128 bne 7f
129
130 orc #0x80,ccr
131 mov.l @SYMBOL_NAME(sw_usp),er0
132 mov.l @(LER0-LER1:16,sp),er1 /* restore ER0 */
133 mov.l er1,@er0
134 RESTOREEXR
135 mov.w @(LCCR-LER1:16,sp),r1 /* restore the RET addr */
136 mov.b r1l,r1h
137 mov.b @(LRET+1-LER1:16,sp),r1l
138 mov.w r1,e1
139 mov.w @(LRET+2-LER1:16,sp),r1
140 mov.l er1,@(USERRET:16,er0)
141
142 mov.l @sp+,er1
143 add.l #(LRET-LER1),sp /* remove LORIG - LRET */
144 mov.l sp,@SYMBOL_NAME(sw_ksp)
145 andc #0xef,ccr /* switch to user mode */
146 mov.l er0,sp
147 bra 8f
1487:
149 mov.l @sp+,er1
150 adds #4,sp
151 adds #2,sp
1528:
153 mov.l @sp+,er0
154 adds #4,sp /* remove the sw created LVEC */
155 rte
156 .endm
157
158.globl SYMBOL_NAME(system_call)
159.globl SYMBOL_NAME(ret_from_exception)
160.globl SYMBOL_NAME(ret_from_fork)
161.globl SYMBOL_NAME(ret_from_interrupt)
162.globl SYMBOL_NAME(interrupt_redirect_table)
163.globl SYMBOL_NAME(sw_ksp),SYMBOL_NAME(sw_usp)
164.globl SYMBOL_NAME(resume)
165.globl SYMBOL_NAME(interrupt_entry)
166.globl SYMBOL_NAME(trace_break)
167
168#if defined(CONFIG_ROMKERNEL)
169 .section .int_redirect,"ax"
170SYMBOL_NAME_LABEL(interrupt_redirect_table)
171#if defined(CONFIG_CPU_H8300H)
172 .rept 7
173 .long 0
174 .endr
175#endif
176#if defined(CONFIG_CPU_H8S)
177 .rept 5
178 .long 0
179 .endr
180 jmp @SYMBOL_NAME(trace_break)
181 .long 0
182#endif
183
184 jsr @SYMBOL_NAME(interrupt_entry) /* NMI */
185 jmp @SYMBOL_NAME(system_call) /* TRAPA #0 (System call) */
186 .long 0
187 .long 0
188 jmp @SYMBOL_NAME(trace_break) /* TRAPA #3 (breakpoint) */
189 .rept INTERRUPTS-12
190 jsr @SYMBOL_NAME(interrupt_entry)
191 .endr
192#endif
193#if defined(CONFIG_RAMKERNEL)
194.globl SYMBOL_NAME(interrupt_redirect_table)
195 .section .bss
196SYMBOL_NAME_LABEL(interrupt_redirect_table)
197 .space 4
198#endif
199
200 .section .text
201 .align 2
202SYMBOL_NAME_LABEL(interrupt_entry)
203 SAVE_ALL
204 mov.l sp,er0
205 add.l #LVEC,er0
206 btst #4,r1l
207 bne 1f
208 /* user LVEC */
209 mov.l @SYMBOL_NAME(sw_usp),er0
210 adds #4,er0
2111:
212 mov.l @er0,er0 /* LVEC address */
213#if defined(CONFIG_ROMKERNEL)
214 sub.l #SYMBOL_NAME(interrupt_redirect_table),er0
215#endif
216#if defined(CONFIG_RAMKERNEL)
217 mov.l @SYMBOL_NAME(interrupt_redirect_table),er1
218 sub.l er1,er0
219#endif
220 SHLR2 er0
221 dec.l #1,er0
222 mov.l sp,er1
223 subs #4,er1 /* adjust ret_pc */
224 jsr @SYMBOL_NAME(do_IRQ)
225 jmp @SYMBOL_NAME(ret_from_interrupt)
226
227SYMBOL_NAME_LABEL(system_call)
228 subs #4,sp /* dummy LVEC */
229 SAVE_ALL
230 andc #0x7f,ccr
231 mov.l er0,er4
232
233 /* save top of frame */
234 mov.l sp,er0
235 jsr @SYMBOL_NAME(set_esp0)
236 mov.l sp,er2
237 and.w #0xe000,r2
238 mov.b @((TI_FLAGS+3-(TIF_SYSCALL_TRACE >> 3)):16,er2),r2l
239 btst #(TIF_SYSCALL_TRACE & 7),r2l
240 beq 1f
241 jsr @SYMBOL_NAME(do_syscall_trace)
2421:
243 cmp.l #NR_syscalls,er4
244 bcc badsys
245 SHLL2 er4
246 mov.l #SYMBOL_NAME(sys_call_table),er0
247 add.l er4,er0
248 mov.l @er0,er4
249 beq SYMBOL_NAME(ret_from_exception):16
250 mov.l @(LER1:16,sp),er0
251 mov.l @(LER2:16,sp),er1
252 mov.l @(LER3:16,sp),er2
253 jsr @er4
254 mov.l er0,@(LER0:16,sp) /* save the return value */
255 mov.l sp,er2
256 and.w #0xe000,r2
257 mov.b @((TI_FLAGS+3-(TIF_SYSCALL_TRACE >> 3)):16,er2),r2l
258 btst #(TIF_SYSCALL_TRACE & 7),r2l
259 beq 2f
260 jsr @SYMBOL_NAME(do_syscall_trace)
2612:
262#if defined(CONFIG_SYSCALL_PRINT)
263 jsr @SYMBOL_NAME(syscall_print)
264#endif
265 orc #0x80,ccr
266 bra resume_userspace
267
268badsys:
269 mov.l #-ENOSYS,er0
270 mov.l er0,@(LER0:16,sp)
271 bra resume_userspace
272
273#if !defined(CONFIG_PREEMPT)
274#define resume_kernel restore_all
275#endif
276
277SYMBOL_NAME_LABEL(ret_from_exception)
278#if defined(CONFIG_PREEMPT)
279 orc #0x80,ccr
280#endif
281SYMBOL_NAME_LABEL(ret_from_interrupt)
282 mov.b @(LCCR+1:16,sp),r0l
283 btst #4,r0l
284 bne resume_kernel:8 /* return from kernel */
285resume_userspace:
286 andc #0x7f,ccr
287 mov.l sp,er4
288 and.w #0xe000,r4 /* er4 <- current thread info */
289 mov.l @(TI_FLAGS:16,er4),er1
290 and.l #_TIF_WORK_MASK,er1
291 beq restore_all:8
292work_pending:
293 btst #TIF_NEED_RESCHED,r1l
294 bne work_resched:8
295 /* work notifysig */
296 mov.l sp,er0
297 subs #4,er0 /* er0: pt_regs */
298 jsr @SYMBOL_NAME(do_notify_resume)
299 bra restore_all:8
300work_resched:
301 mov.l sp,er0
302 jsr @SYMBOL_NAME(set_esp0)
303 jsr @SYMBOL_NAME(schedule)
304 bra resume_userspace:8
305restore_all:
306 RESTORE_ALL /* Does RTE */
307
308#if defined(CONFIG_PREEMPT)
309resume_kernel:
310 mov.l @(TI_PRE_COUNT:16,er4),er0
311 bne restore_all:8
312need_resched:
313 mov.l @(TI_FLAGS:16,er4),er0
314 btst #TIF_NEED_RESCHED,r0l
315 beq restore_all:8
316 mov.b @(LCCR+1:16,sp),r0l /* Interrupt Enabled? */
317 bmi restore_all:8
318 mov.l #PREEMPT_ACTIVE,er0
319 mov.l er0,@(TI_PRE_COUNT:16,er4)
320 andc #0x7f,ccr
321 mov.l sp,er0
322 jsr @SYMBOL_NAME(set_esp0)
323 jsr @SYMBOL_NAME(schedule)
324 orc #0x80,ccr
325 bra need_resched:8
326#endif
327
328SYMBOL_NAME_LABEL(ret_from_fork)
329 mov.l er2,er0
330 jsr @SYMBOL_NAME(schedule_tail)
331 jmp @SYMBOL_NAME(ret_from_exception)
332
333SYMBOL_NAME_LABEL(resume)
334 /*
335 * Beware - when entering resume, offset of tss is in d1,
336 * prev (the current task) is in a0, next (the new task)
337 * is in a1 and d2.b is non-zero if the mm structure is
338 * shared between the tasks, so don't change these
339 * registers until their contents are no longer needed.
340 */
341
342 /* save sr */
343 sub.w r3,r3
344 stc ccr,r3l
345 mov.w r3,@(THREAD_CCR+2:16,er0)
346
347 /* disable interrupts */
348 orc #0x80,ccr
349 mov.l @SYMBOL_NAME(sw_usp),er3
350 mov.l er3,@(THREAD_USP:16,er0)
351 mov.l sp,@(THREAD_KSP:16,er0)
352
353 /* Skip address space switching if they are the same. */
354 /* FIXME: what did we hack out of here, this does nothing! */
355
356 mov.l @(THREAD_USP:16,er1),er0
357 mov.l er0,@SYMBOL_NAME(sw_usp)
358 mov.l @(THREAD_KSP:16,er1),sp
359
360 /* restore status register */
361 mov.w @(THREAD_CCR+2:16,er1),r3
362
363 ldc r3l,ccr
364 rts
365
366SYMBOL_NAME_LABEL(trace_break)
367 subs #4,sp
368 SAVE_ALL
369 sub.l er1,er1
370 dec.l #1,er1
371 mov.l er1,@(LORIG,sp)
372 mov.l sp,er0
373 jsr @SYMBOL_NAME(set_esp0)
374 mov.l @SYMBOL_NAME(sw_usp),er0
375 mov.l @er0,er1
376 mov.w @(-2:16,er1),r2
377 cmp.w #0x5730,r2
378 beq 1f
379 subs #2,er1
380 mov.l er1,@er0
3811:
382 and.w #0xff,e1
383 mov.l er1,er0
384 jsr @SYMBOL_NAME(trace_trap)
385 jmp @SYMBOL_NAME(ret_from_exception)
386
387 .section .bss
388SYMBOL_NAME_LABEL(sw_ksp)
389 .space 4
390SYMBOL_NAME_LABEL(sw_usp)
391 .space 4
392
393 .end
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 *
4 * linux/arch/h8300/kernel/entry.S
5 *
6 * Yoshinori Sato <ysato@users.sourceforge.jp>
7 * David McCullough <davidm@snapgear.com>
8 *
9 */
10
11/*
12 * entry.S
13 * include exception/interrupt gateway
14 * system call entry
15 */
16
17#include <linux/sys.h>
18#include <asm/unistd.h>
19#include <asm/setup.h>
20#include <asm/segment.h>
21#include <asm/linkage.h>
22#include <asm/asm-offsets.h>
23#include <asm/thread_info.h>
24#include <asm/errno.h>
25
26#if defined(CONFIG_CPU_H8300H)
27#define USERRET 8
28INTERRUPTS = 64
29 .h8300h
30 .macro SHLL2 reg
31 shll.l \reg
32 shll.l \reg
33 .endm
34 .macro SHLR2 reg
35 shlr.l \reg
36 shlr.l \reg
37 .endm
38 .macro SAVEREGS
39 mov.l er0,@-sp
40 mov.l er1,@-sp
41 mov.l er2,@-sp
42 mov.l er3,@-sp
43 .endm
44 .macro RESTOREREGS
45 mov.l @sp+,er3
46 mov.l @sp+,er2
47 .endm
48 .macro SAVEEXR
49 .endm
50 .macro RESTOREEXR
51 .endm
52#endif
53#if defined(CONFIG_CPU_H8S)
54#define USERRET 10
55#define USEREXR 8
56INTERRUPTS = 128
57 .h8300s
58 .macro SHLL2 reg
59 shll.l #2,\reg
60 .endm
61 .macro SHLR2 reg
62 shlr.l #2,\reg
63 .endm
64 .macro SAVEREGS
65 stm.l er0-er3,@-sp
66 .endm
67 .macro RESTOREREGS
68 ldm.l @sp+,er2-er3
69 .endm
70 .macro SAVEEXR
71 mov.w @(USEREXR:16,er0),r1
72 mov.w r1,@(LEXR-LER3:16,sp) /* copy EXR */
73 .endm
74 .macro RESTOREEXR
75 mov.w @(LEXR-LER1:16,sp),r1 /* restore EXR */
76 mov.b r1l,r1h
77 mov.w r1,@(USEREXR:16,er0)
78 .endm
79#endif
80
81
82/* CPU context save/restore macros. */
83
84 .macro SAVE_ALL
85 mov.l er0,@-sp
86 stc ccr,r0l /* check kernel mode */
87 btst #4,r0l
88 bne 5f
89
90 /* user mode */
91 mov.l sp,@_sw_usp
92 mov.l @sp,er0 /* restore saved er0 */
93 orc #0x10,ccr /* switch kernel stack */
94 mov.l @_sw_ksp,sp
95 sub.l #(LRET-LORIG),sp /* allocate LORIG - LRET */
96 SAVEREGS
97 mov.l @_sw_usp,er0
98 mov.l @(USERRET:16,er0),er1 /* copy the RET addr */
99 mov.l er1,@(LRET-LER3:16,sp)
100 SAVEEXR
101
102 mov.l @(LORIG-LER3:16,sp),er0
103 mov.l er0,@(LER0-LER3:16,sp) /* copy ER0 */
104 mov.w e1,r1 /* e1 highbyte = ccr */
105 and #0xef,r1h /* mask mode? flag */
106 bra 6f
1075:
108 /* kernel mode */
109 mov.l @sp,er0 /* restore saved er0 */
110 subs #2,sp /* set dummy ccr */
111 subs #4,sp /* set dummp sp */
112 SAVEREGS
113 mov.w @(LRET-LER3:16,sp),r1 /* copy old ccr */
1146:
115 mov.b r1h,r1l
116 mov.b #0,r1h
117 mov.w r1,@(LCCR-LER3:16,sp) /* set ccr */
118 mov.l @_sw_usp,er2
119 mov.l er2,@(LSP-LER3:16,sp) /* set usp */
120 mov.l er6,@-sp /* syscall arg #6 */
121 mov.l er5,@-sp /* syscall arg #5 */
122 mov.l er4,@-sp /* syscall arg #4 */
123 .endm /* r1 = ccr */
124
125 .macro RESTORE_ALL
126 mov.l @sp+,er4
127 mov.l @sp+,er5
128 mov.l @sp+,er6
129 RESTOREREGS
130 mov.w @(LCCR-LER1:16,sp),r0 /* check kernel mode */
131 btst #4,r0l
132 bne 7f
133
134 orc #0xc0,ccr
135 mov.l @(LSP-LER1:16,sp),er0
136 mov.l @(LER0-LER1:16,sp),er1 /* restore ER0 */
137 mov.l er1,@er0
138 RESTOREEXR
139 mov.w @(LCCR-LER1:16,sp),r1 /* restore the RET addr */
140 mov.b r1l,r1h
141 mov.b @(LRET+1-LER1:16,sp),r1l
142 mov.w r1,e1
143 mov.w @(LRET+2-LER1:16,sp),r1
144 mov.l er1,@(USERRET:16,er0)
145
146 mov.l @sp+,er1
147 add.l #(LRET-LER1),sp /* remove LORIG - LRET */
148 mov.l sp,@_sw_ksp
149 andc #0xef,ccr /* switch to user mode */
150 mov.l er0,sp
151 bra 8f
1527:
153 mov.l @sp+,er1
154 add.l #10,sp
1558:
156 mov.l @sp+,er0
157 adds #4,sp /* remove the sw created LVEC */
158 rte
159 .endm
160
161.globl _system_call
162.globl ret_from_exception
163.globl ret_from_fork
164.globl ret_from_kernel_thread
165.globl ret_from_interrupt
166.globl _interrupt_redirect_table
167.globl _sw_ksp,_sw_usp
168.globl _resume
169.globl _interrupt_entry
170.globl _trace_break
171.globl _nmi
172
173#if defined(CONFIG_ROMKERNEL)
174 .section .int_redirect,"ax"
175_interrupt_redirect_table:
176#if defined(CONFIG_CPU_H8300H)
177 .rept 7
178 .long 0
179 .endr
180#endif
181#if defined(CONFIG_CPU_H8S)
182 .rept 5
183 .long 0
184 .endr
185 jmp @_trace_break
186 .long 0
187#endif
188
189 jsr @_interrupt_entry /* NMI */
190 jmp @_system_call /* TRAPA #0 (System call) */
191 .long 0
192#if defined(CONFIG_KGDB)
193 jmp @_kgdb_trap
194#else
195 .long 0
196#endif
197 jmp @_trace_break /* TRAPA #3 (breakpoint) */
198 .rept INTERRUPTS-12
199 jsr @_interrupt_entry
200 .endr
201#endif
202#if defined(CONFIG_RAMKERNEL)
203.globl _interrupt_redirect_table
204 .section .bss
205_interrupt_redirect_table:
206 .space 4
207#endif
208
209 .section .text
210 .align 2
211_interrupt_entry:
212 SAVE_ALL
213/* r1l is saved ccr */
214 mov.l sp,er0
215 add.l #LVEC,er0
216 btst #4,r1l
217 bne 1f
218 /* user LVEC */
219 mov.l @_sw_usp,er0
220 adds #4,er0
2211:
222 mov.l @er0,er0 /* LVEC address */
223#if defined(CONFIG_ROMKERNEL)
224 sub.l #_interrupt_redirect_table,er0
225#endif
226#if defined(CONFIG_RAMKERNEL)
227 mov.l @_interrupt_redirect_table,er1
228 sub.l er1,er0
229#endif
230 SHLR2 er0
231 dec.l #1,er0
232 mov.l sp,er1
233 subs #4,er1 /* adjust ret_pc */
234#if defined(CONFIG_CPU_H8S)
235 orc #7,exr
236#endif
237 jsr @do_IRQ
238 jmp @ret_from_interrupt
239
240_system_call:
241 subs #4,sp /* dummy LVEC */
242 SAVE_ALL
243 /* er0: syscall nr */
244 andc #0xbf,ccr
245 mov.l er0,er4
246
247 /* save top of frame */
248 mov.l sp,er0
249 jsr @set_esp0
250 andc #0x3f,ccr
251 mov.l sp,er2
252 and.w #0xe000,r2
253 mov.l @(TI_FLAGS:16,er2),er2
254 and.w #_TIF_WORK_SYSCALL_MASK,r2
255 beq 1f
256 mov.l sp,er0
257 jsr @do_syscall_trace_enter
2581:
259 cmp.l #__NR_syscalls,er4
260 bcc badsys
261 SHLL2 er4
262 mov.l #_sys_call_table,er0
263 add.l er4,er0
264 mov.l @er0,er4
265 beq ret_from_exception:16
266 mov.l @(LER1:16,sp),er0
267 mov.l @(LER2:16,sp),er1
268 mov.l @(LER3:16,sp),er2
269 jsr @er4
270 mov.l er0,@(LER0:16,sp) /* save the return value */
271 mov.l sp,er2
272 and.w #0xe000,r2
273 mov.l @(TI_FLAGS:16,er2),er2
274 and.w #_TIF_WORK_SYSCALL_MASK,r2
275 beq 2f
276 mov.l sp,er0
277 jsr @do_syscall_trace_leave
2782:
279 orc #0xc0,ccr
280 bra resume_userspace
281
282badsys:
283 mov.l #-ENOSYS,er0
284 mov.l er0,@(LER0:16,sp)
285 bra resume_userspace
286
287#if !defined(CONFIG_PREEMPT)
288#define resume_kernel restore_all
289#endif
290
291ret_from_exception:
292#if defined(CONFIG_PREEMPT)
293 orc #0xc0,ccr
294#endif
295ret_from_interrupt:
296 mov.b @(LCCR+1:16,sp),r0l
297 btst #4,r0l
298 bne resume_kernel:16 /* return from kernel */
299resume_userspace:
300 andc #0xbf,ccr
301 mov.l sp,er4
302 and.w #0xe000,r4 /* er4 <- current thread info */
303 mov.l @(TI_FLAGS:16,er4),er1
304 and.l #_TIF_WORK_MASK,er1
305 beq restore_all:8
306work_pending:
307 btst #TIF_NEED_RESCHED,r1l
308 bne work_resched:8
309 /* work notifysig */
310 mov.l sp,er0
311 subs #4,er0 /* er0: pt_regs */
312 jsr @do_notify_resume
313 bra resume_userspace:8
314work_resched:
315 mov.l sp,er0
316 jsr @set_esp0
317 jsr @schedule
318 bra resume_userspace:8
319restore_all:
320 RESTORE_ALL /* Does RTE */
321
322#if defined(CONFIG_PREEMPT)
323resume_kernel:
324 mov.l @(TI_PRE_COUNT:16,er4),er0
325 bne restore_all:8
326need_resched:
327 mov.l @(TI_FLAGS:16,er4),er0
328 btst #TIF_NEED_RESCHED,r0l
329 beq restore_all:8
330 mov.b @(LCCR+1:16,sp),r0l /* Interrupt Enabled? */
331 bmi restore_all:8
332 mov.l sp,er0
333 jsr @set_esp0
334 jsr @preempt_schedule_irq
335 bra need_resched:8
336#endif
337
338ret_from_fork:
339 mov.l er2,er0
340 jsr @schedule_tail
341 jmp @ret_from_exception
342
343ret_from_kernel_thread:
344 mov.l er2,er0
345 jsr @schedule_tail
346 mov.l @(LER4:16,sp),er0
347 mov.l @(LER5:16,sp),er1
348 jsr @er1
349 jmp @ret_from_exception
350
351_resume:
352 /*
353 * Beware - when entering resume, offset of tss is in d1,
354 * prev (the current task) is in a0, next (the new task)
355 * is in a1 and d2.b is non-zero if the mm structure is
356 * shared between the tasks, so don't change these
357 * registers until their contents are no longer needed.
358 */
359
360 /* save sr */
361 sub.w r3,r3
362 stc ccr,r3l
363 mov.w r3,@(THREAD_CCR+2:16,er0)
364
365 /* disable interrupts */
366 orc #0xc0,ccr
367 mov.l @_sw_usp,er3
368 mov.l er3,@(THREAD_USP:16,er0)
369 mov.l sp,@(THREAD_KSP:16,er0)
370
371 /* Skip address space switching if they are the same. */
372 /* FIXME: what did we hack out of here, this does nothing! */
373
374 mov.l @(THREAD_USP:16,er1),er0
375 mov.l er0,@_sw_usp
376 mov.l @(THREAD_KSP:16,er1),sp
377
378 /* restore status register */
379 mov.w @(THREAD_CCR+2:16,er1),r3
380
381 ldc r3l,ccr
382 rts
383
384_trace_break:
385 subs #4,sp
386 SAVE_ALL
387 sub.l er1,er1
388 dec.l #1,er1
389 mov.l er1,@(LORIG,sp)
390 mov.l sp,er0
391 jsr @set_esp0
392 mov.l @_sw_usp,er0
393 mov.l @er0,er1
394 mov.w @(-2:16,er1),r2
395 cmp.w #0x5730,r2
396 beq 1f
397 subs #2,er1
398 mov.l er1,@er0
3991:
400 and.w #0xff,e1
401 mov.l er1,er0
402 jsr @trace_trap
403 jmp @ret_from_exception
404
405_nmi:
406 subs #4, sp
407 mov.l er0, @-sp
408 mov.l @_interrupt_redirect_table, er0
409 add.l #8*4, er0
410 mov.l er0, @(4,sp)
411 mov.l @sp+, er0
412 jmp @_interrupt_entry
413
414#if defined(CONFIG_KGDB)
415_kgdb_trap:
416 subs #4,sp
417 SAVE_ALL
418 mov.l sp,er0
419 add.l #LRET,er0
420 mov.l er0,@(LSP,sp)
421 jsr @set_esp0
422 mov.l sp,er0
423 subs #4,er0
424 jsr @h8300_kgdb_trap
425 jmp @ret_from_exception
426#endif
427
428 .section .bss
429_sw_ksp:
430 .space 4
431_sw_usp:
432 .space 4
433
434 .end