Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef __HEAD_32_H__
3#define __HEAD_32_H__
4
5#include <asm/ptrace.h> /* for STACK_FRAME_REGS_MARKER */
6
7/*
8 * Exception entry code. This code runs with address translation
9 * turned off, i.e. using physical addresses.
10 * We assume sprg3 has the physical address of the current
11 * task's thread_struct.
12 */
13.macro EXCEPTION_PROLOG handle_dar_dsisr=0
14 EXCEPTION_PROLOG_0 handle_dar_dsisr=\handle_dar_dsisr
15 EXCEPTION_PROLOG_1
16 EXCEPTION_PROLOG_2 handle_dar_dsisr=\handle_dar_dsisr
17.endm
18
19.macro EXCEPTION_PROLOG_0 handle_dar_dsisr=0
20 mtspr SPRN_SPRG_SCRATCH0,r10
21 mtspr SPRN_SPRG_SCRATCH1,r11
22#ifdef CONFIG_VMAP_STACK
23 mfspr r10, SPRN_SPRG_THREAD
24 .if \handle_dar_dsisr
25 mfspr r11, SPRN_DAR
26 stw r11, DAR(r10)
27 mfspr r11, SPRN_DSISR
28 stw r11, DSISR(r10)
29 .endif
30 mfspr r11, SPRN_SRR0
31 stw r11, SRR0(r10)
32#endif
33 mfspr r11, SPRN_SRR1 /* check whether user or kernel */
34#ifdef CONFIG_VMAP_STACK
35 stw r11, SRR1(r10)
36#endif
37 mfcr r10
38 andi. r11, r11, MSR_PR
39.endm
40
41.macro EXCEPTION_PROLOG_1 for_rtas=0
42#ifdef CONFIG_VMAP_STACK
43 .ifeq \for_rtas
44 li r11, MSR_KERNEL & ~(MSR_IR | MSR_RI) /* can take DTLB miss */
45 mtmsr r11
46 isync
47 .endif
48 subi r11, r1, INT_FRAME_SIZE /* use r1 if kernel */
49#else
50 tophys(r11,r1) /* use tophys(r1) if kernel */
51 subi r11, r11, INT_FRAME_SIZE /* alloc exc. frame */
52#endif
53 beq 1f
54 mfspr r11,SPRN_SPRG_THREAD
55 tovirt_vmstack r11, r11
56 lwz r11,TASK_STACK-THREAD(r11)
57 addi r11, r11, THREAD_SIZE - INT_FRAME_SIZE
58 tophys_novmstack r11, r11
591:
60#ifdef CONFIG_VMAP_STACK
61 mtcrf 0x7f, r11
62 bt 32 - THREAD_ALIGN_SHIFT, stack_overflow
63#endif
64.endm
65
66.macro EXCEPTION_PROLOG_2 handle_dar_dsisr=0
67#if defined(CONFIG_VMAP_STACK) && defined(CONFIG_PPC_BOOK3S)
68BEGIN_MMU_FTR_SECTION
69 mtcr r10
70FTR_SECTION_ELSE
71 stw r10, _CCR(r11)
72ALT_MMU_FTR_SECTION_END_IFSET(MMU_FTR_HPTE_TABLE)
73#else
74 stw r10,_CCR(r11) /* save registers */
75#endif
76 mfspr r10, SPRN_SPRG_SCRATCH0
77 stw r12,GPR12(r11)
78 stw r9,GPR9(r11)
79 stw r10,GPR10(r11)
80#if defined(CONFIG_VMAP_STACK) && defined(CONFIG_PPC_BOOK3S)
81BEGIN_MMU_FTR_SECTION
82 mfcr r10
83 stw r10, _CCR(r11)
84END_MMU_FTR_SECTION_IFSET(MMU_FTR_HPTE_TABLE)
85#endif
86 mfspr r12,SPRN_SPRG_SCRATCH1
87 stw r12,GPR11(r11)
88 mflr r10
89 stw r10,_LINK(r11)
90#ifdef CONFIG_VMAP_STACK
91 mfspr r12, SPRN_SPRG_THREAD
92 tovirt(r12, r12)
93 .if \handle_dar_dsisr
94 lwz r10, DAR(r12)
95 stw r10, _DAR(r11)
96 lwz r10, DSISR(r12)
97 stw r10, _DSISR(r11)
98 .endif
99 lwz r9, SRR1(r12)
100#if defined(CONFIG_VMAP_STACK) && defined(CONFIG_PPC_BOOK3S)
101BEGIN_MMU_FTR_SECTION
102 andi. r10, r9, MSR_PR
103END_MMU_FTR_SECTION_IFSET(MMU_FTR_HPTE_TABLE)
104#endif
105 lwz r12, SRR0(r12)
106#else
107 mfspr r12,SPRN_SRR0
108 mfspr r9,SPRN_SRR1
109#endif
110 stw r1,GPR1(r11)
111 stw r1,0(r11)
112 tovirt_novmstack r1, r11 /* set new kernel sp */
113#ifdef CONFIG_40x
114 rlwinm r9,r9,0,14,12 /* clear MSR_WE (necessary?) */
115#else
116#ifdef CONFIG_VMAP_STACK
117 li r10, MSR_KERNEL & ~MSR_IR /* can take exceptions */
118#else
119 li r10,MSR_KERNEL & ~(MSR_IR|MSR_DR) /* can take exceptions */
120#endif
121 mtmsr r10 /* (except for mach check in rtas) */
122#endif
123 stw r0,GPR0(r11)
124 lis r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */
125 addi r10,r10,STACK_FRAME_REGS_MARKER@l
126 stw r10,8(r11)
127 SAVE_4GPRS(3, r11)
128 SAVE_2GPRS(7, r11)
129.endm
130
131.macro SYSCALL_ENTRY trapno
132 mfspr r12,SPRN_SPRG_THREAD
133 mfspr r9, SPRN_SRR1
134#ifdef CONFIG_VMAP_STACK
135 mfspr r11, SPRN_SRR0
136 mtctr r11
137#endif
138 andi. r11, r9, MSR_PR
139 lwz r11,TASK_STACK-THREAD(r12)
140 beq- 99f
141 addi r11, r11, THREAD_SIZE - INT_FRAME_SIZE
142#ifdef CONFIG_VMAP_STACK
143 li r10, MSR_KERNEL & ~(MSR_IR | MSR_RI) /* can take DTLB miss */
144 mtmsr r10
145 isync
146#endif
147 tovirt_vmstack r12, r12
148 tophys_novmstack r11, r11
149 mflr r10
150 stw r10, _LINK(r11)
151#ifdef CONFIG_VMAP_STACK
152 mfctr r10
153#else
154 mfspr r10,SPRN_SRR0
155#endif
156 stw r1,GPR1(r11)
157 stw r1,0(r11)
158 tovirt_novmstack r1, r11 /* set new kernel sp */
159 stw r10,_NIP(r11)
160 mfcr r10
161 rlwinm r10,r10,0,4,2 /* Clear SO bit in CR */
162 stw r10,_CCR(r11) /* save registers */
163#ifdef CONFIG_40x
164 rlwinm r9,r9,0,14,12 /* clear MSR_WE (necessary?) */
165#else
166#ifdef CONFIG_VMAP_STACK
167 LOAD_REG_IMMEDIATE(r10, MSR_KERNEL & ~MSR_IR) /* can take exceptions */
168#else
169 LOAD_REG_IMMEDIATE(r10, MSR_KERNEL & ~(MSR_IR|MSR_DR)) /* can take exceptions */
170#endif
171 mtmsr r10 /* (except for mach check in rtas) */
172#endif
173 lis r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */
174 stw r2,GPR2(r11)
175 addi r10,r10,STACK_FRAME_REGS_MARKER@l
176 stw r9,_MSR(r11)
177 li r2, \trapno + 1
178 stw r10,8(r11)
179 stw r2,_TRAP(r11)
180 SAVE_GPR(0, r11)
181 SAVE_4GPRS(3, r11)
182 SAVE_2GPRS(7, r11)
183 addi r11,r1,STACK_FRAME_OVERHEAD
184 addi r2,r12,-THREAD
185 stw r11,PT_REGS(r12)
186#if defined(CONFIG_40x)
187 /* Check to see if the dbcr0 register is set up to debug. Use the
188 internal debug mode bit to do this. */
189 lwz r12,THREAD_DBCR0(r12)
190 andis. r12,r12,DBCR0_IDM@h
191#endif
192 ACCOUNT_CPU_USER_ENTRY(r2, r11, r12)
193#if defined(CONFIG_40x)
194 beq+ 3f
195 /* From user and task is ptraced - load up global dbcr0 */
196 li r12,-1 /* clear all pending debug events */
197 mtspr SPRN_DBSR,r12
198 lis r11,global_dbcr0@ha
199 tophys(r11,r11)
200 addi r11,r11,global_dbcr0@l
201 lwz r12,0(r11)
202 mtspr SPRN_DBCR0,r12
203 lwz r12,4(r11)
204 addi r12,r12,-1
205 stw r12,4(r11)
206#endif
207
2083:
209 tovirt_novmstack r2, r2 /* set r2 to current */
210 lis r11, transfer_to_syscall@h
211 ori r11, r11, transfer_to_syscall@l
212#ifdef CONFIG_TRACE_IRQFLAGS
213 /*
214 * If MSR is changing we need to keep interrupts disabled at this point
215 * otherwise we might risk taking an interrupt before we tell lockdep
216 * they are enabled.
217 */
218 LOAD_REG_IMMEDIATE(r10, MSR_KERNEL)
219 rlwimi r10, r9, 0, MSR_EE
220#else
221 LOAD_REG_IMMEDIATE(r10, MSR_KERNEL | MSR_EE)
222#endif
223#if defined(CONFIG_PPC_8xx) && defined(CONFIG_PERF_EVENTS)
224 mtspr SPRN_NRI, r0
225#endif
226 mtspr SPRN_SRR1,r10
227 mtspr SPRN_SRR0,r11
228 SYNC
229 RFI /* jump to handler, enable MMU */
23099: b ret_from_kernel_syscall
231.endm
232
233.macro save_dar_dsisr_on_stack reg1, reg2, sp
234#ifndef CONFIG_VMAP_STACK
235 mfspr \reg1, SPRN_DAR
236 mfspr \reg2, SPRN_DSISR
237 stw \reg1, _DAR(\sp)
238 stw \reg2, _DSISR(\sp)
239#endif
240.endm
241
242.macro get_and_save_dar_dsisr_on_stack reg1, reg2, sp
243#ifdef CONFIG_VMAP_STACK
244 lwz \reg1, _DAR(\sp)
245 lwz \reg2, _DSISR(\sp)
246#else
247 save_dar_dsisr_on_stack \reg1, \reg2, \sp
248#endif
249.endm
250
251.macro tovirt_vmstack dst, src
252#ifdef CONFIG_VMAP_STACK
253 tovirt(\dst, \src)
254#else
255 .ifnc \dst, \src
256 mr \dst, \src
257 .endif
258#endif
259.endm
260
261.macro tovirt_novmstack dst, src
262#ifndef CONFIG_VMAP_STACK
263 tovirt(\dst, \src)
264#else
265 .ifnc \dst, \src
266 mr \dst, \src
267 .endif
268#endif
269.endm
270
271.macro tophys_novmstack dst, src
272#ifndef CONFIG_VMAP_STACK
273 tophys(\dst, \src)
274#else
275 .ifnc \dst, \src
276 mr \dst, \src
277 .endif
278#endif
279.endm
280
281/*
282 * Note: code which follows this uses cr0.eq (set if from kernel),
283 * r11, r12 (SRR0), and r9 (SRR1).
284 *
285 * Note2: once we have set r1 we are in a position to take exceptions
286 * again, and we could thus set MSR:RI at that point.
287 */
288
289/*
290 * Exception vectors.
291 */
292#ifdef CONFIG_PPC_BOOK3S
293#define START_EXCEPTION(n, label) \
294 . = n; \
295 DO_KVM n; \
296label:
297
298#else
299#define START_EXCEPTION(n, label) \
300 . = n; \
301label:
302
303#endif
304
305#define EXCEPTION(n, label, hdlr, xfer) \
306 START_EXCEPTION(n, label) \
307 EXCEPTION_PROLOG; \
308 addi r3,r1,STACK_FRAME_OVERHEAD; \
309 xfer(n, hdlr)
310
311#define EXC_XFER_TEMPLATE(hdlr, trap, msr, tfer, ret) \
312 li r10,trap; \
313 stw r10,_TRAP(r11); \
314 LOAD_REG_IMMEDIATE(r10, msr); \
315 bl tfer; \
316 .long hdlr; \
317 .long ret
318
319#define EXC_XFER_STD(n, hdlr) \
320 EXC_XFER_TEMPLATE(hdlr, n, MSR_KERNEL, transfer_to_handler_full, \
321 ret_from_except_full)
322
323#define EXC_XFER_LITE(n, hdlr) \
324 EXC_XFER_TEMPLATE(hdlr, n+1, MSR_KERNEL, transfer_to_handler, \
325 ret_from_except)
326
327.macro vmap_stack_overflow_exception
328#ifdef CONFIG_VMAP_STACK
329#ifdef CONFIG_SMP
330 mfspr r11, SPRN_SPRG_THREAD
331 tovirt(r11, r11)
332 lwz r11, TASK_CPU - THREAD(r11)
333 slwi r11, r11, 3
334 addis r11, r11, emergency_ctx@ha
335#else
336 lis r11, emergency_ctx@ha
337#endif
338 lwz r11, emergency_ctx@l(r11)
339 cmpwi cr1, r11, 0
340 bne cr1, 1f
341 lis r11, init_thread_union@ha
342 addi r11, r11, init_thread_union@l
3431: addi r11, r11, THREAD_SIZE - INT_FRAME_SIZE
344 EXCEPTION_PROLOG_2
345 SAVE_NVGPRS(r11)
346 addi r3, r1, STACK_FRAME_OVERHEAD
347 EXC_XFER_STD(0, stack_overflow_exception)
348#endif
349.endm
350
351#endif /* __HEAD_32_H__ */
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef __HEAD_32_H__
3#define __HEAD_32_H__
4
5#include <asm/ptrace.h> /* for STACK_FRAME_REGS_MARKER */
6
7/*
8 * Exception entry code. This code runs with address translation
9 * turned off, i.e. using physical addresses.
10 * We assume sprg3 has the physical address of the current
11 * task's thread_struct.
12 */
13.macro EXCEPTION_PROLOG trapno name handle_dar_dsisr=0
14 EXCEPTION_PROLOG_0 handle_dar_dsisr=\handle_dar_dsisr
15 EXCEPTION_PROLOG_1
16 EXCEPTION_PROLOG_2 \trapno \name handle_dar_dsisr=\handle_dar_dsisr
17.endm
18
19.macro EXCEPTION_PROLOG_0 handle_dar_dsisr=0
20 mtspr SPRN_SPRG_SCRATCH0,r10
21 mtspr SPRN_SPRG_SCRATCH1,r11
22 mfspr r10, SPRN_SPRG_THREAD
23 .if \handle_dar_dsisr
24#ifdef CONFIG_40x
25 mfspr r11, SPRN_DEAR
26#else
27 mfspr r11, SPRN_DAR
28#endif
29 stw r11, DAR(r10)
30#ifdef CONFIG_40x
31 mfspr r11, SPRN_ESR
32#else
33 mfspr r11, SPRN_DSISR
34#endif
35 stw r11, DSISR(r10)
36 .endif
37 mfspr r11, SPRN_SRR0
38 stw r11, SRR0(r10)
39 mfspr r11, SPRN_SRR1 /* check whether user or kernel */
40 stw r11, SRR1(r10)
41 mfcr r10
42 andi. r11, r11, MSR_PR
43.endm
44
45.macro EXCEPTION_PROLOG_1
46 mtspr SPRN_SPRG_SCRATCH2,r1
47 subi r1, r1, INT_FRAME_SIZE /* use r1 if kernel */
48 beq 1f
49 mfspr r1,SPRN_SPRG_THREAD
50 lwz r1,TASK_STACK-THREAD(r1)
51 addi r1, r1, THREAD_SIZE - INT_FRAME_SIZE
521:
53#ifdef CONFIG_VMAP_STACK
54 mtcrf 0x3f, r1
55 bt 32 - THREAD_ALIGN_SHIFT, vmap_stack_overflow
56#endif
57.endm
58
59.macro EXCEPTION_PROLOG_2 trapno name handle_dar_dsisr=0
60#ifdef CONFIG_PPC_8xx
61 .if \handle_dar_dsisr
62 li r11, RPN_PATTERN
63 mtspr SPRN_DAR, r11 /* Tag DAR, to be used in DTLB Error */
64 .endif
65#endif
66 LOAD_REG_IMMEDIATE(r11, MSR_KERNEL & ~MSR_RI) /* re-enable MMU */
67 mtspr SPRN_SRR1, r11
68 lis r11, 1f@h
69 ori r11, r11, 1f@l
70 mtspr SPRN_SRR0, r11
71 mfspr r11, SPRN_SPRG_SCRATCH2
72 rfi
73
74 .text
75\name\()_virt:
761:
77 stw r11,GPR1(r1)
78 stw r11,0(r1)
79 mr r11, r1
80 stw r10,_CCR(r11) /* save registers */
81 stw r12,GPR12(r11)
82 stw r9,GPR9(r11)
83 mfspr r10,SPRN_SPRG_SCRATCH0
84 mfspr r12,SPRN_SPRG_SCRATCH1
85 stw r10,GPR10(r11)
86 stw r12,GPR11(r11)
87 mflr r10
88 stw r10,_LINK(r11)
89 mfspr r12, SPRN_SPRG_THREAD
90 tovirt(r12, r12)
91 .if \handle_dar_dsisr
92 lwz r10, DAR(r12)
93 stw r10, _DAR(r11)
94 lwz r10, DSISR(r12)
95 stw r10, _DSISR(r11)
96 .endif
97 lwz r9, SRR1(r12)
98 lwz r12, SRR0(r12)
99#ifdef CONFIG_40x
100 rlwinm r9,r9,0,14,12 /* clear MSR_WE (necessary?) */
101#elif defined(CONFIG_PPC_8xx)
102 mtspr SPRN_EID, r2 /* Set MSR_RI */
103#else
104 li r10, MSR_KERNEL /* can take exceptions */
105 mtmsr r10 /* (except for mach check in rtas) */
106#endif
107 COMMON_EXCEPTION_PROLOG_END \trapno
108_ASM_NOKPROBE_SYMBOL(\name\()_virt)
109.endm
110
111.macro COMMON_EXCEPTION_PROLOG_END trapno
112 stw r0,GPR0(r1)
113 lis r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */
114 addi r10,r10,STACK_FRAME_REGS_MARKER@l
115 stw r10,STACK_INT_FRAME_MARKER(r1)
116 li r10, \trapno
117 stw r10,_TRAP(r1)
118 SAVE_GPRS(3, 8, r1)
119 SAVE_NVGPRS(r1)
120 stw r2,GPR2(r1)
121 stw r12,_NIP(r1)
122 stw r9,_MSR(r1)
123 mfctr r10
124 mfspr r2,SPRN_SPRG_THREAD
125 stw r10,_CTR(r1)
126 tovirt(r2, r2)
127 mfspr r10,SPRN_XER
128 addi r2, r2, -THREAD
129 stw r10,_XER(r1)
130 addi r3,r1,STACK_INT_FRAME_REGS
131.endm
132
133.macro prepare_transfer_to_handler
134#ifdef CONFIG_PPC_BOOK3S_32
135 andi. r12,r9,MSR_PR
136 bne 777f
137 bl prepare_transfer_to_handler
138#ifdef CONFIG_PPC_KUEP
139 b 778f
140777:
141 bl __kuep_lock
142778:
143#endif
144777:
145#endif
146.endm
147
148.macro SYSCALL_ENTRY trapno
149 mfspr r9, SPRN_SRR1
150 mfspr r12, SPRN_SRR0
151 LOAD_REG_IMMEDIATE(r11, MSR_KERNEL) /* can take exceptions */
152 lis r10, 1f@h
153 ori r10, r10, 1f@l
154 mtspr SPRN_SRR1, r11
155 mtspr SPRN_SRR0, r10
156 mfspr r10,SPRN_SPRG_THREAD
157 mr r11, r1
158 lwz r1,TASK_STACK-THREAD(r10)
159 tovirt(r10, r10)
160 addi r1, r1, THREAD_SIZE - INT_FRAME_SIZE
161 rfi
1621:
163 stw r12,_NIP(r1)
164 mfcr r12
165 rlwinm r12,r12,0,4,2 /* Clear SO bit in CR */
166 stw r12,_CCR(r1)
167 b transfer_to_syscall /* jump to handler */
168.endm
169
170/*
171 * Note: code which follows this uses cr0.eq (set if from kernel),
172 * r11, r12 (SRR0), and r9 (SRR1).
173 *
174 * Note2: once we have set r1 we are in a position to take exceptions
175 * again, and we could thus set MSR:RI at that point.
176 */
177
178/*
179 * Exception vectors.
180 */
181#ifdef CONFIG_PPC_BOOK3S
182#define START_EXCEPTION(n, label) \
183 __HEAD; \
184 . = n; \
185 DO_KVM n; \
186label:
187
188#else
189#define START_EXCEPTION(n, label) \
190 __HEAD; \
191 . = n; \
192label:
193
194#endif
195
196#define EXCEPTION(n, label, hdlr) \
197 START_EXCEPTION(n, label) \
198 EXCEPTION_PROLOG n label; \
199 prepare_transfer_to_handler; \
200 bl hdlr; \
201 b interrupt_return
202
203.macro vmap_stack_overflow_exception
204 __HEAD
205vmap_stack_overflow:
206#ifdef CONFIG_SMP
207 mfspr r1, SPRN_SPRG_THREAD
208 lwz r1, TASK_CPU - THREAD(r1)
209 slwi r1, r1, 3
210 addis r1, r1, emergency_ctx-PAGE_OFFSET@ha
211#else
212 lis r1, emergency_ctx-PAGE_OFFSET@ha
213#endif
214 lwz r1, emergency_ctx-PAGE_OFFSET@l(r1)
215 addi r1, r1, THREAD_SIZE - INT_FRAME_SIZE
216 EXCEPTION_PROLOG_2 0 vmap_stack_overflow
217 prepare_transfer_to_handler
218 bl stack_overflow_exception
219 b interrupt_return
220.endm
221
222#endif /* __HEAD_32_H__ */