Loading...
1/*
2 *
3 * linux/arch/h8300/kernel/entry.S
4 *
5 * Yoshinori Sato <ysato@users.sourceforge.jp>
6 * David McCullough <davidm@snapgear.com>
7 *
8 */
9
10/*
11 * entry.S
12 * include exception/interrupt gateway
13 * system call entry
14 */
15
16#include <linux/sys.h>
17#include <asm/unistd.h>
18#include <asm/setup.h>
19#include <asm/segment.h>
20#include <asm/linkage.h>
21#include <asm/asm-offsets.h>
22#include <asm/thread_info.h>
23#include <asm/errno.h>
24
25#if defined(CONFIG_CPU_H8300H)
26#define USERRET 8
27INTERRUPTS = 64
28 .h8300h
29 .macro SHLL2 reg
30 shll.l \reg
31 shll.l \reg
32 .endm
33 .macro SHLR2 reg
34 shlr.l \reg
35 shlr.l \reg
36 .endm
37 .macro SAVEREGS
38 mov.l er0,@-sp
39 mov.l er1,@-sp
40 mov.l er2,@-sp
41 mov.l er3,@-sp
42 .endm
43 .macro RESTOREREGS
44 mov.l @sp+,er3
45 mov.l @sp+,er2
46 .endm
47 .macro SAVEEXR
48 .endm
49 .macro RESTOREEXR
50 .endm
51#endif
52#if defined(CONFIG_CPU_H8S)
53#define USERRET 10
54#define USEREXR 8
55INTERRUPTS = 128
56 .h8300s
57 .macro SHLL2 reg
58 shll.l #2,\reg
59 .endm
60 .macro SHLR2 reg
61 shlr.l #2,\reg
62 .endm
63 .macro SAVEREGS
64 stm.l er0-er3,@-sp
65 .endm
66 .macro RESTOREREGS
67 ldm.l @sp+,er2-er3
68 .endm
69 .macro SAVEEXR
70 mov.w @(USEREXR:16,er0),r1
71 mov.w r1,@(LEXR-LER3:16,sp) /* copy EXR */
72 .endm
73 .macro RESTOREEXR
74 mov.w @(LEXR-LER1:16,sp),r1 /* restore EXR */
75 mov.b r1l,r1h
76 mov.w r1,@(USEREXR:16,er0)
77 .endm
78#endif
79
80
81/* CPU context save/restore macros. */
82
83 .macro SAVE_ALL
84 mov.l er0,@-sp
85 stc ccr,r0l /* check kernel mode */
86 btst #4,r0l
87 bne 5f
88
89 /* user mode */
90 mov.l sp,@_sw_usp
91 mov.l @sp,er0 /* restore saved er0 */
92 orc #0x10,ccr /* switch kernel stack */
93 mov.l @_sw_ksp,sp
94 sub.l #(LRET-LORIG),sp /* allocate LORIG - LRET */
95 SAVEREGS
96 mov.l @_sw_usp,er0
97 mov.l @(USERRET:16,er0),er1 /* copy the RET addr */
98 mov.l er1,@(LRET-LER3:16,sp)
99 SAVEEXR
100
101 mov.l @(LORIG-LER3:16,sp),er0
102 mov.l er0,@(LER0-LER3:16,sp) /* copy ER0 */
103 mov.w e1,r1 /* e1 highbyte = ccr */
104 and #0xef,r1h /* mask mode? flag */
105 bra 6f
1065:
107 /* kernel mode */
108 mov.l @sp,er0 /* restore saved er0 */
109 subs #2,sp /* set dummy ccr */
110 subs #4,sp /* set dummp sp */
111 SAVEREGS
112 mov.w @(LRET-LER3:16,sp),r1 /* copy old ccr */
1136:
114 mov.b r1h,r1l
115 mov.b #0,r1h
116 mov.w r1,@(LCCR-LER3:16,sp) /* set ccr */
117 mov.l @_sw_usp,er2
118 mov.l er2,@(LSP-LER3:16,sp) /* set usp */
119 mov.l er6,@-sp /* syscall arg #6 */
120 mov.l er5,@-sp /* syscall arg #5 */
121 mov.l er4,@-sp /* syscall arg #4 */
122 .endm /* r1 = ccr */
123
124 .macro RESTORE_ALL
125 mov.l @sp+,er4
126 mov.l @sp+,er5
127 mov.l @sp+,er6
128 RESTOREREGS
129 mov.w @(LCCR-LER1:16,sp),r0 /* check kernel mode */
130 btst #4,r0l
131 bne 7f
132
133 orc #0xc0,ccr
134 mov.l @(LSP-LER1:16,sp),er0
135 mov.l @(LER0-LER1:16,sp),er1 /* restore ER0 */
136 mov.l er1,@er0
137 RESTOREEXR
138 mov.w @(LCCR-LER1:16,sp),r1 /* restore the RET addr */
139 mov.b r1l,r1h
140 mov.b @(LRET+1-LER1:16,sp),r1l
141 mov.w r1,e1
142 mov.w @(LRET+2-LER1:16,sp),r1
143 mov.l er1,@(USERRET:16,er0)
144
145 mov.l @sp+,er1
146 add.l #(LRET-LER1),sp /* remove LORIG - LRET */
147 mov.l sp,@_sw_ksp
148 andc #0xef,ccr /* switch to user mode */
149 mov.l er0,sp
150 bra 8f
1517:
152 mov.l @sp+,er1
153 add.l #10,sp
1548:
155 mov.l @sp+,er0
156 adds #4,sp /* remove the sw created LVEC */
157 rte
158 .endm
159
160.globl _system_call
161.globl ret_from_exception
162.globl ret_from_fork
163.globl ret_from_kernel_thread
164.globl ret_from_interrupt
165.globl _interrupt_redirect_table
166.globl _sw_ksp,_sw_usp
167.globl _resume
168.globl _interrupt_entry
169.globl _trace_break
170.globl _nmi
171
172#if defined(CONFIG_ROMKERNEL)
173 .section .int_redirect,"ax"
174_interrupt_redirect_table:
175#if defined(CONFIG_CPU_H8300H)
176 .rept 7
177 .long 0
178 .endr
179#endif
180#if defined(CONFIG_CPU_H8S)
181 .rept 5
182 .long 0
183 .endr
184 jmp @_trace_break
185 .long 0
186#endif
187
188 jsr @_interrupt_entry /* NMI */
189 jmp @_system_call /* TRAPA #0 (System call) */
190 .long 0
191#if defined(CONFIG_KGDB)
192 jmp @_kgdb_trap
193#else
194 .long 0
195#endif
196 jmp @_trace_break /* TRAPA #3 (breakpoint) */
197 .rept INTERRUPTS-12
198 jsr @_interrupt_entry
199 .endr
200#endif
201#if defined(CONFIG_RAMKERNEL)
202.globl _interrupt_redirect_table
203 .section .bss
204_interrupt_redirect_table:
205 .space 4
206#endif
207
208 .section .text
209 .align 2
210_interrupt_entry:
211 SAVE_ALL
212/* r1l is saved ccr */
213 mov.l sp,er0
214 add.l #LVEC,er0
215 btst #4,r1l
216 bne 1f
217 /* user LVEC */
218 mov.l @_sw_usp,er0
219 adds #4,er0
2201:
221 mov.l @er0,er0 /* LVEC address */
222#if defined(CONFIG_ROMKERNEL)
223 sub.l #_interrupt_redirect_table,er0
224#endif
225#if defined(CONFIG_RAMKERNEL)
226 mov.l @_interrupt_redirect_table,er1
227 sub.l er1,er0
228#endif
229 SHLR2 er0
230 dec.l #1,er0
231 mov.l sp,er1
232 subs #4,er1 /* adjust ret_pc */
233#if defined(CONFIG_CPU_H8S)
234 orc #7,exr
235#endif
236 jsr @do_IRQ
237 jmp @ret_from_interrupt
238
239_system_call:
240 subs #4,sp /* dummy LVEC */
241 SAVE_ALL
242 /* er0: syscall nr */
243 andc #0xbf,ccr
244 mov.l er0,er4
245
246 /* save top of frame */
247 mov.l sp,er0
248 jsr @set_esp0
249 andc #0x3f,ccr
250 mov.l sp,er2
251 and.w #0xe000,r2
252 mov.l @(TI_FLAGS:16,er2),er2
253 and.w #_TIF_WORK_SYSCALL_MASK,r2
254 beq 1f
255 mov.l sp,er0
256 jsr @do_syscall_trace_enter
2571:
258 cmp.l #__NR_syscalls,er4
259 bcc badsys
260 SHLL2 er4
261 mov.l #_sys_call_table,er0
262 add.l er4,er0
263 mov.l @er0,er4
264 beq ret_from_exception:16
265 mov.l @(LER1:16,sp),er0
266 mov.l @(LER2:16,sp),er1
267 mov.l @(LER3:16,sp),er2
268 jsr @er4
269 mov.l er0,@(LER0:16,sp) /* save the return value */
270 mov.l sp,er2
271 and.w #0xe000,r2
272 mov.l @(TI_FLAGS:16,er2),er2
273 and.w #_TIF_WORK_SYSCALL_MASK,r2
274 beq 2f
275 mov.l sp,er0
276 jsr @do_syscall_trace_leave
2772:
278 orc #0xc0,ccr
279 bra resume_userspace
280
281badsys:
282 mov.l #-ENOSYS,er0
283 mov.l er0,@(LER0:16,sp)
284 bra resume_userspace
285
286#if !defined(CONFIG_PREEMPT)
287#define resume_kernel restore_all
288#endif
289
290ret_from_exception:
291#if defined(CONFIG_PREEMPT)
292 orc #0xc0,ccr
293#endif
294ret_from_interrupt:
295 mov.b @(LCCR+1:16,sp),r0l
296 btst #4,r0l
297 bne resume_kernel:16 /* return from kernel */
298resume_userspace:
299 andc #0xbf,ccr
300 mov.l sp,er4
301 and.w #0xe000,r4 /* er4 <- current thread info */
302 mov.l @(TI_FLAGS:16,er4),er1
303 and.l #_TIF_WORK_MASK,er1
304 beq restore_all:8
305work_pending:
306 btst #TIF_NEED_RESCHED,r1l
307 bne work_resched:8
308 /* work notifysig */
309 mov.l sp,er0
310 subs #4,er0 /* er0: pt_regs */
311 jsr @do_notify_resume
312 bra resume_userspace:8
313work_resched:
314 mov.l sp,er0
315 jsr @set_esp0
316 jsr @schedule
317 bra resume_userspace:8
318restore_all:
319 RESTORE_ALL /* Does RTE */
320
321#if defined(CONFIG_PREEMPT)
322resume_kernel:
323 mov.l @(TI_PRE_COUNT:16,er4),er0
324 bne restore_all:8
325need_resched:
326 mov.l @(TI_FLAGS:16,er4),er0
327 btst #TIF_NEED_RESCHED,r0l
328 beq restore_all:8
329 mov.b @(LCCR+1:16,sp),r0l /* Interrupt Enabled? */
330 bmi restore_all:8
331 mov.l sp,er0
332 jsr @set_esp0
333 jsr @preempt_schedule_irq
334 bra need_resched:8
335#endif
336
337ret_from_fork:
338 mov.l er2,er0
339 jsr @schedule_tail
340 jmp @ret_from_exception
341
342ret_from_kernel_thread:
343 mov.l er2,er0
344 jsr @schedule_tail
345 mov.l @(LER4:16,sp),er0
346 mov.l @(LER5:16,sp),er1
347 jsr @er1
348 jmp @ret_from_exception
349
350_resume:
351 /*
352 * Beware - when entering resume, offset of tss is in d1,
353 * prev (the current task) is in a0, next (the new task)
354 * is in a1 and d2.b is non-zero if the mm structure is
355 * shared between the tasks, so don't change these
356 * registers until their contents are no longer needed.
357 */
358
359 /* save sr */
360 sub.w r3,r3
361 stc ccr,r3l
362 mov.w r3,@(THREAD_CCR+2:16,er0)
363
364 /* disable interrupts */
365 orc #0xc0,ccr
366 mov.l @_sw_usp,er3
367 mov.l er3,@(THREAD_USP:16,er0)
368 mov.l sp,@(THREAD_KSP:16,er0)
369
370 /* Skip address space switching if they are the same. */
371 /* FIXME: what did we hack out of here, this does nothing! */
372
373 mov.l @(THREAD_USP:16,er1),er0
374 mov.l er0,@_sw_usp
375 mov.l @(THREAD_KSP:16,er1),sp
376
377 /* restore status register */
378 mov.w @(THREAD_CCR+2:16,er1),r3
379
380 ldc r3l,ccr
381 rts
382
383_trace_break:
384 subs #4,sp
385 SAVE_ALL
386 sub.l er1,er1
387 dec.l #1,er1
388 mov.l er1,@(LORIG,sp)
389 mov.l sp,er0
390 jsr @set_esp0
391 mov.l @_sw_usp,er0
392 mov.l @er0,er1
393 mov.w @(-2:16,er1),r2
394 cmp.w #0x5730,r2
395 beq 1f
396 subs #2,er1
397 mov.l er1,@er0
3981:
399 and.w #0xff,e1
400 mov.l er1,er0
401 jsr @trace_trap
402 jmp @ret_from_exception
403
404_nmi:
405 subs #4, sp
406 mov.l er0, @-sp
407 mov.l @_interrupt_redirect_table, er0
408 add.l #8*4, er0
409 mov.l er0, @(4,sp)
410 mov.l @sp+, er0
411 jmp @_interrupt_entry
412
413#if defined(CONFIG_KGDB)
414_kgdb_trap:
415 subs #4,sp
416 SAVE_ALL
417 mov.l sp,er0
418 add.l #LRET,er0
419 mov.l er0,@(LSP,sp)
420 jsr @set_esp0
421 mov.l sp,er0
422 subs #4,er0
423 jsr @h8300_kgdb_trap
424 jmp @ret_from_exception
425#endif
426
427 .section .bss
428_sw_ksp:
429 .space 4
430_sw_usp:
431 .space 4
432
433 .end
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 *
4 * linux/arch/h8300/kernel/entry.S
5 *
6 * Yoshinori Sato <ysato@users.sourceforge.jp>
7 * David McCullough <davidm@snapgear.com>
8 *
9 */
10
11/*
12 * entry.S
13 * include exception/interrupt gateway
14 * system call entry
15 */
16
17#include <linux/sys.h>
18#include <asm/unistd.h>
19#include <asm/setup.h>
20#include <asm/segment.h>
21#include <asm/linkage.h>
22#include <asm/asm-offsets.h>
23#include <asm/thread_info.h>
24#include <asm/errno.h>
25
26#if defined(CONFIG_CPU_H8300H)
27#define USERRET 8
28INTERRUPTS = 64
29 .h8300h
30 .macro SHLL2 reg
31 shll.l \reg
32 shll.l \reg
33 .endm
34 .macro SHLR2 reg
35 shlr.l \reg
36 shlr.l \reg
37 .endm
38 .macro SAVEREGS
39 mov.l er0,@-sp
40 mov.l er1,@-sp
41 mov.l er2,@-sp
42 mov.l er3,@-sp
43 .endm
44 .macro RESTOREREGS
45 mov.l @sp+,er3
46 mov.l @sp+,er2
47 .endm
48 .macro SAVEEXR
49 .endm
50 .macro RESTOREEXR
51 .endm
52#endif
53#if defined(CONFIG_CPU_H8S)
54#define USERRET 10
55#define USEREXR 8
56INTERRUPTS = 128
57 .h8300s
58 .macro SHLL2 reg
59 shll.l #2,\reg
60 .endm
61 .macro SHLR2 reg
62 shlr.l #2,\reg
63 .endm
64 .macro SAVEREGS
65 stm.l er0-er3,@-sp
66 .endm
67 .macro RESTOREREGS
68 ldm.l @sp+,er2-er3
69 .endm
70 .macro SAVEEXR
71 mov.w @(USEREXR:16,er0),r1
72 mov.w r1,@(LEXR-LER3:16,sp) /* copy EXR */
73 .endm
74 .macro RESTOREEXR
75 mov.w @(LEXR-LER1:16,sp),r1 /* restore EXR */
76 mov.b r1l,r1h
77 mov.w r1,@(USEREXR:16,er0)
78 .endm
79#endif
80
81
82/* CPU context save/restore macros. */
83
84 .macro SAVE_ALL
85 mov.l er0,@-sp
86 stc ccr,r0l /* check kernel mode */
87 btst #4,r0l
88 bne 5f
89
90 /* user mode */
91 mov.l sp,@_sw_usp
92 mov.l @sp,er0 /* restore saved er0 */
93 orc #0x10,ccr /* switch kernel stack */
94 mov.l @_sw_ksp,sp
95 sub.l #(LRET-LORIG),sp /* allocate LORIG - LRET */
96 SAVEREGS
97 mov.l @_sw_usp,er0
98 mov.l @(USERRET:16,er0),er1 /* copy the RET addr */
99 mov.l er1,@(LRET-LER3:16,sp)
100 SAVEEXR
101
102 mov.l @(LORIG-LER3:16,sp),er0
103 mov.l er0,@(LER0-LER3:16,sp) /* copy ER0 */
104 mov.w e1,r1 /* e1 highbyte = ccr */
105 and #0xef,r1h /* mask mode? flag */
106 bra 6f
1075:
108 /* kernel mode */
109 mov.l @sp,er0 /* restore saved er0 */
110 subs #2,sp /* set dummy ccr */
111 subs #4,sp /* set dummp sp */
112 SAVEREGS
113 mov.w @(LRET-LER3:16,sp),r1 /* copy old ccr */
1146:
115 mov.b r1h,r1l
116 mov.b #0,r1h
117 mov.w r1,@(LCCR-LER3:16,sp) /* set ccr */
118 mov.l @_sw_usp,er2
119 mov.l er2,@(LSP-LER3:16,sp) /* set usp */
120 mov.l er6,@-sp /* syscall arg #6 */
121 mov.l er5,@-sp /* syscall arg #5 */
122 mov.l er4,@-sp /* syscall arg #4 */
123 .endm /* r1 = ccr */
124
125 .macro RESTORE_ALL
126 mov.l @sp+,er4
127 mov.l @sp+,er5
128 mov.l @sp+,er6
129 RESTOREREGS
130 mov.w @(LCCR-LER1:16,sp),r0 /* check kernel mode */
131 btst #4,r0l
132 bne 7f
133
134 orc #0xc0,ccr
135 mov.l @(LSP-LER1:16,sp),er0
136 mov.l @(LER0-LER1:16,sp),er1 /* restore ER0 */
137 mov.l er1,@er0
138 RESTOREEXR
139 mov.w @(LCCR-LER1:16,sp),r1 /* restore the RET addr */
140 mov.b r1l,r1h
141 mov.b @(LRET+1-LER1:16,sp),r1l
142 mov.w r1,e1
143 mov.w @(LRET+2-LER1:16,sp),r1
144 mov.l er1,@(USERRET:16,er0)
145
146 mov.l @sp+,er1
147 add.l #(LRET-LER1),sp /* remove LORIG - LRET */
148 mov.l sp,@_sw_ksp
149 andc #0xef,ccr /* switch to user mode */
150 mov.l er0,sp
151 bra 8f
1527:
153 mov.l @sp+,er1
154 add.l #10,sp
1558:
156 mov.l @sp+,er0
157 adds #4,sp /* remove the sw created LVEC */
158 rte
159 .endm
160
161.globl _system_call
162.globl ret_from_exception
163.globl ret_from_fork
164.globl ret_from_kernel_thread
165.globl ret_from_interrupt
166.globl _interrupt_redirect_table
167.globl _sw_ksp,_sw_usp
168.globl _resume
169.globl _interrupt_entry
170.globl _trace_break
171.globl _nmi
172
173#if defined(CONFIG_ROMKERNEL)
174 .section .int_redirect,"ax"
175_interrupt_redirect_table:
176#if defined(CONFIG_CPU_H8300H)
177 .rept 7
178 .long 0
179 .endr
180#endif
181#if defined(CONFIG_CPU_H8S)
182 .rept 5
183 .long 0
184 .endr
185 jmp @_trace_break
186 .long 0
187#endif
188
189 jsr @_interrupt_entry /* NMI */
190 jmp @_system_call /* TRAPA #0 (System call) */
191 .long 0
192#if defined(CONFIG_KGDB)
193 jmp @_kgdb_trap
194#else
195 .long 0
196#endif
197 jmp @_trace_break /* TRAPA #3 (breakpoint) */
198 .rept INTERRUPTS-12
199 jsr @_interrupt_entry
200 .endr
201#endif
202#if defined(CONFIG_RAMKERNEL)
203.globl _interrupt_redirect_table
204 .section .bss
205_interrupt_redirect_table:
206 .space 4
207#endif
208
209 .section .text
210 .align 2
211_interrupt_entry:
212 SAVE_ALL
213/* r1l is saved ccr */
214 mov.l sp,er0
215 add.l #LVEC,er0
216 btst #4,r1l
217 bne 1f
218 /* user LVEC */
219 mov.l @_sw_usp,er0
220 adds #4,er0
2211:
222 mov.l @er0,er0 /* LVEC address */
223#if defined(CONFIG_ROMKERNEL)
224 sub.l #_interrupt_redirect_table,er0
225#endif
226#if defined(CONFIG_RAMKERNEL)
227 mov.l @_interrupt_redirect_table,er1
228 sub.l er1,er0
229#endif
230 SHLR2 er0
231 dec.l #1,er0
232 mov.l sp,er1
233 subs #4,er1 /* adjust ret_pc */
234#if defined(CONFIG_CPU_H8S)
235 orc #7,exr
236#endif
237 jsr @do_IRQ
238 jmp @ret_from_interrupt
239
240_system_call:
241 subs #4,sp /* dummy LVEC */
242 SAVE_ALL
243 /* er0: syscall nr */
244 andc #0xbf,ccr
245 mov.l er0,er4
246
247 /* save top of frame */
248 mov.l sp,er0
249 jsr @set_esp0
250 andc #0x3f,ccr
251 mov.l sp,er2
252 and.w #0xe000,r2
253 mov.l @(TI_FLAGS:16,er2),er2
254 and.w #_TIF_WORK_SYSCALL_MASK,r2
255 beq 1f
256 mov.l sp,er0
257 jsr @do_syscall_trace_enter
2581:
259 cmp.l #__NR_syscalls,er4
260 bcc badsys
261 SHLL2 er4
262 mov.l #_sys_call_table,er0
263 add.l er4,er0
264 mov.l @er0,er4
265 beq ret_from_exception:16
266 mov.l @(LER1:16,sp),er0
267 mov.l @(LER2:16,sp),er1
268 mov.l @(LER3:16,sp),er2
269 jsr @er4
270 mov.l er0,@(LER0:16,sp) /* save the return value */
271 mov.l sp,er2
272 and.w #0xe000,r2
273 mov.l @(TI_FLAGS:16,er2),er2
274 and.w #_TIF_WORK_SYSCALL_MASK,r2
275 beq 2f
276 mov.l sp,er0
277 jsr @do_syscall_trace_leave
2782:
279 orc #0xc0,ccr
280 bra resume_userspace
281
282badsys:
283 mov.l #-ENOSYS,er0
284 mov.l er0,@(LER0:16,sp)
285 bra resume_userspace
286
287#if !defined(CONFIG_PREEMPT)
288#define resume_kernel restore_all
289#endif
290
291ret_from_exception:
292#if defined(CONFIG_PREEMPT)
293 orc #0xc0,ccr
294#endif
295ret_from_interrupt:
296 mov.b @(LCCR+1:16,sp),r0l
297 btst #4,r0l
298 bne resume_kernel:16 /* return from kernel */
299resume_userspace:
300 andc #0xbf,ccr
301 mov.l sp,er4
302 and.w #0xe000,r4 /* er4 <- current thread info */
303 mov.l @(TI_FLAGS:16,er4),er1
304 and.l #_TIF_WORK_MASK,er1
305 beq restore_all:8
306work_pending:
307 btst #TIF_NEED_RESCHED,r1l
308 bne work_resched:8
309 /* work notifysig */
310 mov.l sp,er0
311 subs #4,er0 /* er0: pt_regs */
312 jsr @do_notify_resume
313 bra resume_userspace:8
314work_resched:
315 mov.l sp,er0
316 jsr @set_esp0
317 jsr @schedule
318 bra resume_userspace:8
319restore_all:
320 RESTORE_ALL /* Does RTE */
321
322#if defined(CONFIG_PREEMPT)
323resume_kernel:
324 mov.l @(TI_PRE_COUNT:16,er4),er0
325 bne restore_all:8
326need_resched:
327 mov.l @(TI_FLAGS:16,er4),er0
328 btst #TIF_NEED_RESCHED,r0l
329 beq restore_all:8
330 mov.b @(LCCR+1:16,sp),r0l /* Interrupt Enabled? */
331 bmi restore_all:8
332 mov.l sp,er0
333 jsr @set_esp0
334 jsr @preempt_schedule_irq
335 bra need_resched:8
336#endif
337
338ret_from_fork:
339 mov.l er2,er0
340 jsr @schedule_tail
341 jmp @ret_from_exception
342
343ret_from_kernel_thread:
344 mov.l er2,er0
345 jsr @schedule_tail
346 mov.l @(LER4:16,sp),er0
347 mov.l @(LER5:16,sp),er1
348 jsr @er1
349 jmp @ret_from_exception
350
351_resume:
352 /*
353 * Beware - when entering resume, offset of tss is in d1,
354 * prev (the current task) is in a0, next (the new task)
355 * is in a1 and d2.b is non-zero if the mm structure is
356 * shared between the tasks, so don't change these
357 * registers until their contents are no longer needed.
358 */
359
360 /* save sr */
361 sub.w r3,r3
362 stc ccr,r3l
363 mov.w r3,@(THREAD_CCR+2:16,er0)
364
365 /* disable interrupts */
366 orc #0xc0,ccr
367 mov.l @_sw_usp,er3
368 mov.l er3,@(THREAD_USP:16,er0)
369 mov.l sp,@(THREAD_KSP:16,er0)
370
371 /* Skip address space switching if they are the same. */
372 /* FIXME: what did we hack out of here, this does nothing! */
373
374 mov.l @(THREAD_USP:16,er1),er0
375 mov.l er0,@_sw_usp
376 mov.l @(THREAD_KSP:16,er1),sp
377
378 /* restore status register */
379 mov.w @(THREAD_CCR+2:16,er1),r3
380
381 ldc r3l,ccr
382 rts
383
384_trace_break:
385 subs #4,sp
386 SAVE_ALL
387 sub.l er1,er1
388 dec.l #1,er1
389 mov.l er1,@(LORIG,sp)
390 mov.l sp,er0
391 jsr @set_esp0
392 mov.l @_sw_usp,er0
393 mov.l @er0,er1
394 mov.w @(-2:16,er1),r2
395 cmp.w #0x5730,r2
396 beq 1f
397 subs #2,er1
398 mov.l er1,@er0
3991:
400 and.w #0xff,e1
401 mov.l er1,er0
402 jsr @trace_trap
403 jmp @ret_from_exception
404
405_nmi:
406 subs #4, sp
407 mov.l er0, @-sp
408 mov.l @_interrupt_redirect_table, er0
409 add.l #8*4, er0
410 mov.l er0, @(4,sp)
411 mov.l @sp+, er0
412 jmp @_interrupt_entry
413
414#if defined(CONFIG_KGDB)
415_kgdb_trap:
416 subs #4,sp
417 SAVE_ALL
418 mov.l sp,er0
419 add.l #LRET,er0
420 mov.l er0,@(LSP,sp)
421 jsr @set_esp0
422 mov.l sp,er0
423 subs #4,er0
424 jsr @h8300_kgdb_trap
425 jmp @ret_from_exception
426#endif
427
428 .section .bss
429_sw_ksp:
430 .space 4
431_sw_usp:
432 .space 4
433
434 .end