Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef __HEAD_32_H__
3#define __HEAD_32_H__
4
5#include <asm/ptrace.h> /* for STACK_FRAME_REGS_MARKER */
6
7/*
8 * Exception entry code. This code runs with address translation
9 * turned off, i.e. using physical addresses.
10 * We assume sprg3 has the physical address of the current
11 * task's thread_struct.
12 */
13.macro EXCEPTION_PROLOG trapno name handle_dar_dsisr=0
14 EXCEPTION_PROLOG_0 handle_dar_dsisr=\handle_dar_dsisr
15 EXCEPTION_PROLOG_1
16 EXCEPTION_PROLOG_2 \trapno \name handle_dar_dsisr=\handle_dar_dsisr
17.endm
18
19.macro EXCEPTION_PROLOG_0 handle_dar_dsisr=0
20 mtspr SPRN_SPRG_SCRATCH0,r10
21 mtspr SPRN_SPRG_SCRATCH1,r11
22 mfspr r10, SPRN_SPRG_THREAD
23 .if \handle_dar_dsisr
24#ifdef CONFIG_40x
25 mfspr r11, SPRN_DEAR
26#else
27 mfspr r11, SPRN_DAR
28#endif
29 stw r11, DAR(r10)
30#ifdef CONFIG_40x
31 mfspr r11, SPRN_ESR
32#else
33 mfspr r11, SPRN_DSISR
34#endif
35 stw r11, DSISR(r10)
36 .endif
37 mfspr r11, SPRN_SRR0
38 stw r11, SRR0(r10)
39 mfspr r11, SPRN_SRR1 /* check whether user or kernel */
40 stw r11, SRR1(r10)
41 mfcr r10
42 andi. r11, r11, MSR_PR
43.endm
44
45.macro EXCEPTION_PROLOG_1
46 mtspr SPRN_SPRG_SCRATCH2,r1
47 subi r1, r1, INT_FRAME_SIZE /* use r1 if kernel */
48 beq 1f
49 mfspr r1,SPRN_SPRG_THREAD
50 lwz r1,TASK_STACK-THREAD(r1)
51 addi r1, r1, THREAD_SIZE - INT_FRAME_SIZE
521:
53#ifdef CONFIG_VMAP_STACK
54 mtcrf 0x3f, r1
55 bt 32 - THREAD_ALIGN_SHIFT, vmap_stack_overflow
56#endif
57.endm
58
59.macro EXCEPTION_PROLOG_2 trapno name handle_dar_dsisr=0
60#ifdef CONFIG_PPC_8xx
61 .if \handle_dar_dsisr
62 li r11, RPN_PATTERN
63 mtspr SPRN_DAR, r11 /* Tag DAR, to be used in DTLB Error */
64 .endif
65#endif
66 LOAD_REG_IMMEDIATE(r11, MSR_KERNEL & ~MSR_RI) /* re-enable MMU */
67 mtspr SPRN_SRR1, r11
68 lis r11, 1f@h
69 ori r11, r11, 1f@l
70 mtspr SPRN_SRR0, r11
71 mfspr r11, SPRN_SPRG_SCRATCH2
72 rfi
73
74 .text
75\name\()_virt:
761:
77 stw r11,GPR1(r1)
78 stw r11,0(r1)
79 mr r11, r1
80 stw r10,_CCR(r11) /* save registers */
81 stw r12,GPR12(r11)
82 stw r9,GPR9(r11)
83 mfspr r10,SPRN_SPRG_SCRATCH0
84 mfspr r12,SPRN_SPRG_SCRATCH1
85 stw r10,GPR10(r11)
86 stw r12,GPR11(r11)
87 mflr r10
88 stw r10,_LINK(r11)
89 mfspr r12, SPRN_SPRG_THREAD
90 tovirt(r12, r12)
91 .if \handle_dar_dsisr
92 lwz r10, DAR(r12)
93 stw r10, _DAR(r11)
94 lwz r10, DSISR(r12)
95 stw r10, _DSISR(r11)
96 .endif
97 lwz r9, SRR1(r12)
98 lwz r12, SRR0(r12)
99#ifdef CONFIG_40x
100 rlwinm r9,r9,0,14,12 /* clear MSR_WE (necessary?) */
101#elif defined(CONFIG_PPC_8xx)
102 mtspr SPRN_EID, r2 /* Set MSR_RI */
103#else
104 li r10, MSR_KERNEL /* can take exceptions */
105 mtmsr r10 /* (except for mach check in rtas) */
106#endif
107 COMMON_EXCEPTION_PROLOG_END \trapno
108_ASM_NOKPROBE_SYMBOL(\name\()_virt)
109.endm
110
111.macro COMMON_EXCEPTION_PROLOG_END trapno
112 stw r0,GPR0(r1)
113 lis r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */
114 addi r10,r10,STACK_FRAME_REGS_MARKER@l
115 stw r10,8(r1)
116 li r10, \trapno
117 stw r10,_TRAP(r1)
118 SAVE_4GPRS(3, r1)
119 SAVE_2GPRS(7, r1)
120 SAVE_NVGPRS(r1)
121 stw r2,GPR2(r1)
122 stw r12,_NIP(r1)
123 stw r9,_MSR(r1)
124 mfctr r10
125 mfspr r2,SPRN_SPRG_THREAD
126 stw r10,_CTR(r1)
127 tovirt(r2, r2)
128 mfspr r10,SPRN_XER
129 addi r2, r2, -THREAD
130 stw r10,_XER(r1)
131 addi r3,r1,STACK_FRAME_OVERHEAD
132.endm
133
134.macro prepare_transfer_to_handler
135#ifdef CONFIG_PPC_BOOK3S_32
136 andi. r12,r9,MSR_PR
137 bne 777f
138 bl prepare_transfer_to_handler
139777:
140#endif
141.endm
142
143.macro SYSCALL_ENTRY trapno
144 mfspr r9, SPRN_SRR1
145 mfspr r12, SPRN_SRR0
146 LOAD_REG_IMMEDIATE(r11, MSR_KERNEL) /* can take exceptions */
147 lis r10, 1f@h
148 ori r10, r10, 1f@l
149 mtspr SPRN_SRR1, r11
150 mtspr SPRN_SRR0, r10
151 mfspr r10,SPRN_SPRG_THREAD
152 mr r11, r1
153 lwz r1,TASK_STACK-THREAD(r10)
154 tovirt(r10, r10)
155 addi r1, r1, THREAD_SIZE - INT_FRAME_SIZE
156 rfi
1571:
158 stw r12,_NIP(r1)
159 mfcr r12
160 rlwinm r12,r12,0,4,2 /* Clear SO bit in CR */
161 stw r12,_CCR(r1)
162 b transfer_to_syscall /* jump to handler */
163.endm
164
165/*
166 * Note: code which follows this uses cr0.eq (set if from kernel),
167 * r11, r12 (SRR0), and r9 (SRR1).
168 *
169 * Note2: once we have set r1 we are in a position to take exceptions
170 * again, and we could thus set MSR:RI at that point.
171 */
172
173/*
174 * Exception vectors.
175 */
176#ifdef CONFIG_PPC_BOOK3S
177#define START_EXCEPTION(n, label) \
178 __HEAD; \
179 . = n; \
180 DO_KVM n; \
181label:
182
183#else
184#define START_EXCEPTION(n, label) \
185 __HEAD; \
186 . = n; \
187label:
188
189#endif
190
191#define EXCEPTION(n, label, hdlr) \
192 START_EXCEPTION(n, label) \
193 EXCEPTION_PROLOG n label; \
194 prepare_transfer_to_handler; \
195 bl hdlr; \
196 b interrupt_return
197
198.macro vmap_stack_overflow_exception
199 __HEAD
200vmap_stack_overflow:
201#ifdef CONFIG_SMP
202 mfspr r1, SPRN_SPRG_THREAD
203 lwz r1, TASK_CPU - THREAD(r1)
204 slwi r1, r1, 3
205 addis r1, r1, emergency_ctx@ha
206#else
207 lis r1, emergency_ctx@ha
208#endif
209 lwz r1, emergency_ctx@l(r1)
210 addi r1, r1, THREAD_SIZE - INT_FRAME_SIZE
211 EXCEPTION_PROLOG_2 0 vmap_stack_overflow
212 prepare_transfer_to_handler
213 bl stack_overflow_exception
214 b interrupt_return
215.endm
216
217#endif /* __HEAD_32_H__ */
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef __HEAD_32_H__
3#define __HEAD_32_H__
4
5#include <asm/ptrace.h> /* for STACK_FRAME_REGS_MARKER */
6
7/*
8 * Exception entry code. This code runs with address translation
9 * turned off, i.e. using physical addresses.
10 * We assume sprg3 has the physical address of the current
11 * task's thread_struct.
12 */
13
14.macro EXCEPTION_PROLOG
15 mtspr SPRN_SPRG_SCRATCH0,r10
16 mtspr SPRN_SPRG_SCRATCH1,r11
17 mfcr r10
18 EXCEPTION_PROLOG_1
19 EXCEPTION_PROLOG_2
20.endm
21
22.macro EXCEPTION_PROLOG_1
23 mfspr r11,SPRN_SRR1 /* check whether user or kernel */
24 andi. r11,r11,MSR_PR
25 tophys(r11,r1) /* use tophys(r1) if kernel */
26 beq 1f
27 mfspr r11,SPRN_SPRG_THREAD
28 lwz r11,TASK_STACK-THREAD(r11)
29 addi r11,r11,THREAD_SIZE
30 tophys(r11,r11)
311: subi r11,r11,INT_FRAME_SIZE /* alloc exc. frame */
32.endm
33
34.macro EXCEPTION_PROLOG_2
35 stw r10,_CCR(r11) /* save registers */
36 stw r12,GPR12(r11)
37 stw r9,GPR9(r11)
38 mfspr r10,SPRN_SPRG_SCRATCH0
39 stw r10,GPR10(r11)
40 mfspr r12,SPRN_SPRG_SCRATCH1
41 stw r12,GPR11(r11)
42 mflr r10
43 stw r10,_LINK(r11)
44 mfspr r12,SPRN_SRR0
45 mfspr r9,SPRN_SRR1
46 stw r1,GPR1(r11)
47 stw r1,0(r11)
48 tovirt(r1,r11) /* set new kernel sp */
49#ifdef CONFIG_40x
50 rlwinm r9,r9,0,14,12 /* clear MSR_WE (necessary?) */
51#else
52 li r10,MSR_KERNEL & ~(MSR_IR|MSR_DR) /* can take exceptions */
53 MTMSRD(r10) /* (except for mach check in rtas) */
54#endif
55 stw r0,GPR0(r11)
56 lis r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */
57 addi r10,r10,STACK_FRAME_REGS_MARKER@l
58 stw r10,8(r11)
59 SAVE_4GPRS(3, r11)
60 SAVE_2GPRS(7, r11)
61.endm
62
63.macro SYSCALL_ENTRY trapno
64 mfspr r12,SPRN_SPRG_THREAD
65 mfcr r10
66 lwz r11,TASK_STACK-THREAD(r12)
67 mflr r9
68 addi r11,r11,THREAD_SIZE - INT_FRAME_SIZE
69 rlwinm r10,r10,0,4,2 /* Clear SO bit in CR */
70 tophys(r11,r11)
71 stw r10,_CCR(r11) /* save registers */
72 mfspr r10,SPRN_SRR0
73 stw r9,_LINK(r11)
74 mfspr r9,SPRN_SRR1
75 stw r1,GPR1(r11)
76 stw r1,0(r11)
77 tovirt(r1,r11) /* set new kernel sp */
78 stw r10,_NIP(r11)
79#ifdef CONFIG_40x
80 rlwinm r9,r9,0,14,12 /* clear MSR_WE (necessary?) */
81#else
82 LOAD_REG_IMMEDIATE(r10, MSR_KERNEL & ~(MSR_IR|MSR_DR)) /* can take exceptions */
83 MTMSRD(r10) /* (except for mach check in rtas) */
84#endif
85 lis r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */
86 stw r2,GPR2(r11)
87 addi r10,r10,STACK_FRAME_REGS_MARKER@l
88 stw r9,_MSR(r11)
89 li r2, \trapno + 1
90 stw r10,8(r11)
91 stw r2,_TRAP(r11)
92 SAVE_GPR(0, r11)
93 SAVE_4GPRS(3, r11)
94 SAVE_2GPRS(7, r11)
95 addi r11,r1,STACK_FRAME_OVERHEAD
96 addi r2,r12,-THREAD
97 stw r11,PT_REGS(r12)
98#if defined(CONFIG_40x)
99 /* Check to see if the dbcr0 register is set up to debug. Use the
100 internal debug mode bit to do this. */
101 lwz r12,THREAD_DBCR0(r12)
102 andis. r12,r12,DBCR0_IDM@h
103#endif
104 ACCOUNT_CPU_USER_ENTRY(r2, r11, r12)
105#if defined(CONFIG_40x)
106 beq+ 3f
107 /* From user and task is ptraced - load up global dbcr0 */
108 li r12,-1 /* clear all pending debug events */
109 mtspr SPRN_DBSR,r12
110 lis r11,global_dbcr0@ha
111 tophys(r11,r11)
112 addi r11,r11,global_dbcr0@l
113 lwz r12,0(r11)
114 mtspr SPRN_DBCR0,r12
115 lwz r12,4(r11)
116 addi r12,r12,-1
117 stw r12,4(r11)
118#endif
119
1203:
121 tovirt(r2, r2) /* set r2 to current */
122 lis r11, transfer_to_syscall@h
123 ori r11, r11, transfer_to_syscall@l
124#ifdef CONFIG_TRACE_IRQFLAGS
125 /*
126 * If MSR is changing we need to keep interrupts disabled at this point
127 * otherwise we might risk taking an interrupt before we tell lockdep
128 * they are enabled.
129 */
130 LOAD_REG_IMMEDIATE(r10, MSR_KERNEL)
131 rlwimi r10, r9, 0, MSR_EE
132#else
133 LOAD_REG_IMMEDIATE(r10, MSR_KERNEL | MSR_EE)
134#endif
135#if defined(CONFIG_PPC_8xx) && defined(CONFIG_PERF_EVENTS)
136 mtspr SPRN_NRI, r0
137#endif
138 mtspr SPRN_SRR1,r10
139 mtspr SPRN_SRR0,r11
140 SYNC
141 RFI /* jump to handler, enable MMU */
142.endm
143
144/*
145 * Note: code which follows this uses cr0.eq (set if from kernel),
146 * r11, r12 (SRR0), and r9 (SRR1).
147 *
148 * Note2: once we have set r1 we are in a position to take exceptions
149 * again, and we could thus set MSR:RI at that point.
150 */
151
152/*
153 * Exception vectors.
154 */
155#ifdef CONFIG_PPC_BOOK3S
156#define START_EXCEPTION(n, label) \
157 . = n; \
158 DO_KVM n; \
159label:
160
161#else
162#define START_EXCEPTION(n, label) \
163 . = n; \
164label:
165
166#endif
167
168#define EXCEPTION(n, label, hdlr, xfer) \
169 START_EXCEPTION(n, label) \
170 EXCEPTION_PROLOG; \
171 addi r3,r1,STACK_FRAME_OVERHEAD; \
172 xfer(n, hdlr)
173
174#define EXC_XFER_TEMPLATE(hdlr, trap, msr, tfer, ret) \
175 li r10,trap; \
176 stw r10,_TRAP(r11); \
177 LOAD_REG_IMMEDIATE(r10, msr); \
178 bl tfer; \
179 .long hdlr; \
180 .long ret
181
182#define EXC_XFER_STD(n, hdlr) \
183 EXC_XFER_TEMPLATE(hdlr, n, MSR_KERNEL, transfer_to_handler_full, \
184 ret_from_except_full)
185
186#define EXC_XFER_LITE(n, hdlr) \
187 EXC_XFER_TEMPLATE(hdlr, n+1, MSR_KERNEL, transfer_to_handler, \
188 ret_from_except)
189
190#endif /* __HEAD_32_H__ */