Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
4 */
5#ifndef _ASM_STACKFRAME_H
6#define _ASM_STACKFRAME_H
7
8#include <linux/threads.h>
9
10#include <asm/asm.h>
11#include <asm/asmmacro.h>
12#include <asm/asm-offsets.h>
13#include <asm/loongarch.h>
14#include <asm/thread_info.h>
15
16/* Make the addition of cfi info a little easier. */
17 .macro cfi_rel_offset reg offset=0 docfi=0
18 .if \docfi
19 .cfi_rel_offset \reg, \offset
20 .endif
21 .endm
22
23 .macro cfi_st reg offset=0 docfi=0
24 cfi_rel_offset \reg, \offset, \docfi
25 LONG_S \reg, sp, \offset
26 .endm
27
28 .macro cfi_restore reg offset=0 docfi=0
29 .if \docfi
30 .cfi_restore \reg
31 .endif
32 .endm
33
34 .macro cfi_ld reg offset=0 docfi=0
35 LONG_L \reg, sp, \offset
36 cfi_restore \reg \offset \docfi
37 .endm
38
39 .macro BACKUP_T0T1
40 csrwr t0, EXCEPTION_KS0
41 csrwr t1, EXCEPTION_KS1
42 .endm
43
44 .macro RELOAD_T0T1
45 csrrd t0, EXCEPTION_KS0
46 csrrd t1, EXCEPTION_KS1
47 .endm
48
49 .macro SAVE_TEMP docfi=0
50 RELOAD_T0T1
51 cfi_st t0, PT_R12, \docfi
52 cfi_st t1, PT_R13, \docfi
53 cfi_st t2, PT_R14, \docfi
54 cfi_st t3, PT_R15, \docfi
55 cfi_st t4, PT_R16, \docfi
56 cfi_st t5, PT_R17, \docfi
57 cfi_st t6, PT_R18, \docfi
58 cfi_st t7, PT_R19, \docfi
59 cfi_st t8, PT_R20, \docfi
60 .endm
61
62 .macro SAVE_STATIC docfi=0
63 cfi_st s0, PT_R23, \docfi
64 cfi_st s1, PT_R24, \docfi
65 cfi_st s2, PT_R25, \docfi
66 cfi_st s3, PT_R26, \docfi
67 cfi_st s4, PT_R27, \docfi
68 cfi_st s5, PT_R28, \docfi
69 cfi_st s6, PT_R29, \docfi
70 cfi_st s7, PT_R30, \docfi
71 cfi_st s8, PT_R31, \docfi
72 .endm
73
74/*
75 * get_saved_sp returns the SP for the current CPU by looking in the
76 * kernelsp array for it. It stores the current sp in t0 and loads the
77 * new value in sp.
78 */
79 .macro get_saved_sp docfi=0
80 la.abs t1, kernelsp
81#ifdef CONFIG_SMP
82 csrrd t0, PERCPU_BASE_KS
83 LONG_ADD t1, t1, t0
84#endif
85 move t0, sp
86 .if \docfi
87 .cfi_register sp, t0
88 .endif
89 LONG_L sp, t1, 0
90 .endm
91
92 .macro set_saved_sp stackp temp temp2
93 la.abs \temp, kernelsp
94#ifdef CONFIG_SMP
95 LONG_ADD \temp, \temp, u0
96#endif
97 LONG_S \stackp, \temp, 0
98 .endm
99
100 .macro SAVE_SOME docfi=0
101 csrrd t1, LOONGARCH_CSR_PRMD
102 andi t1, t1, 0x3 /* extract pplv bit */
103 move t0, sp
104 beqz t1, 8f
105 /* Called from user mode, new stack. */
106 get_saved_sp docfi=\docfi
1078:
108 PTR_ADDI sp, sp, -PT_SIZE
109 .if \docfi
110 .cfi_def_cfa sp, 0
111 .endif
112 cfi_st t0, PT_R3, \docfi
113 cfi_rel_offset sp, PT_R3, \docfi
114 LONG_S zero, sp, PT_R0
115 csrrd t0, LOONGARCH_CSR_PRMD
116 LONG_S t0, sp, PT_PRMD
117 csrrd t0, LOONGARCH_CSR_CRMD
118 LONG_S t0, sp, PT_CRMD
119 csrrd t0, LOONGARCH_CSR_EUEN
120 LONG_S t0, sp, PT_EUEN
121 csrrd t0, LOONGARCH_CSR_ECFG
122 LONG_S t0, sp, PT_ECFG
123 csrrd t0, LOONGARCH_CSR_ESTAT
124 PTR_S t0, sp, PT_ESTAT
125 cfi_st ra, PT_R1, \docfi
126 cfi_st a0, PT_R4, \docfi
127 cfi_st a1, PT_R5, \docfi
128 cfi_st a2, PT_R6, \docfi
129 cfi_st a3, PT_R7, \docfi
130 cfi_st a4, PT_R8, \docfi
131 cfi_st a5, PT_R9, \docfi
132 cfi_st a6, PT_R10, \docfi
133 cfi_st a7, PT_R11, \docfi
134 csrrd ra, LOONGARCH_CSR_ERA
135 LONG_S ra, sp, PT_ERA
136 .if \docfi
137 .cfi_rel_offset ra, PT_ERA
138 .endif
139 cfi_st tp, PT_R2, \docfi
140 cfi_st fp, PT_R22, \docfi
141
142 /* Set thread_info if we're coming from user mode */
143 csrrd t0, LOONGARCH_CSR_PRMD
144 andi t0, t0, 0x3 /* extract pplv bit */
145 beqz t0, 9f
146
147 li.d tp, ~_THREAD_MASK
148 and tp, tp, sp
149 cfi_st u0, PT_R21, \docfi
150 csrrd u0, PERCPU_BASE_KS
1519:
152 .endm
153
154 .macro SAVE_ALL docfi=0
155 SAVE_SOME \docfi
156 SAVE_TEMP \docfi
157 SAVE_STATIC \docfi
158 .endm
159
160 .macro RESTORE_TEMP docfi=0
161 cfi_ld t0, PT_R12, \docfi
162 cfi_ld t1, PT_R13, \docfi
163 cfi_ld t2, PT_R14, \docfi
164 cfi_ld t3, PT_R15, \docfi
165 cfi_ld t4, PT_R16, \docfi
166 cfi_ld t5, PT_R17, \docfi
167 cfi_ld t6, PT_R18, \docfi
168 cfi_ld t7, PT_R19, \docfi
169 cfi_ld t8, PT_R20, \docfi
170 .endm
171
172 .macro RESTORE_STATIC docfi=0
173 cfi_ld s0, PT_R23, \docfi
174 cfi_ld s1, PT_R24, \docfi
175 cfi_ld s2, PT_R25, \docfi
176 cfi_ld s3, PT_R26, \docfi
177 cfi_ld s4, PT_R27, \docfi
178 cfi_ld s5, PT_R28, \docfi
179 cfi_ld s6, PT_R29, \docfi
180 cfi_ld s7, PT_R30, \docfi
181 cfi_ld s8, PT_R31, \docfi
182 .endm
183
184 .macro RESTORE_SOME docfi=0
185 LONG_L a0, sp, PT_PRMD
186 andi a0, a0, 0x3 /* extract pplv bit */
187 beqz a0, 8f
188 cfi_ld u0, PT_R21, \docfi
1898:
190 LONG_L a0, sp, PT_ERA
191 csrwr a0, LOONGARCH_CSR_ERA
192 LONG_L a0, sp, PT_PRMD
193 csrwr a0, LOONGARCH_CSR_PRMD
194 cfi_ld ra, PT_R1, \docfi
195 cfi_ld a0, PT_R4, \docfi
196 cfi_ld a1, PT_R5, \docfi
197 cfi_ld a2, PT_R6, \docfi
198 cfi_ld a3, PT_R7, \docfi
199 cfi_ld a4, PT_R8, \docfi
200 cfi_ld a5, PT_R9, \docfi
201 cfi_ld a6, PT_R10, \docfi
202 cfi_ld a7, PT_R11, \docfi
203 cfi_ld tp, PT_R2, \docfi
204 cfi_ld fp, PT_R22, \docfi
205 .endm
206
207 .macro RESTORE_SP_AND_RET docfi=0
208 cfi_ld sp, PT_R3, \docfi
209 ertn
210 .endm
211
212 .macro RESTORE_ALL_AND_RET docfi=0
213 RESTORE_STATIC \docfi
214 RESTORE_TEMP \docfi
215 RESTORE_SOME \docfi
216 RESTORE_SP_AND_RET \docfi
217 .endm
218
219#endif /* _ASM_STACKFRAME_H */
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
4 */
5#ifndef _ASM_STACKFRAME_H
6#define _ASM_STACKFRAME_H
7
8#include <linux/threads.h>
9
10#include <asm/addrspace.h>
11#include <asm/asm.h>
12#include <asm/asmmacro.h>
13#include <asm/asm-offsets.h>
14#include <asm/loongarch.h>
15#include <asm/thread_info.h>
16
17/* Make the addition of cfi info a little easier. */
18 .macro cfi_rel_offset reg offset=0 docfi=0
19 .if \docfi
20 .cfi_rel_offset \reg, \offset
21 .endif
22 .endm
23
24 .macro cfi_st reg offset=0 docfi=0
25 cfi_rel_offset \reg, \offset, \docfi
26 LONG_S \reg, sp, \offset
27 .endm
28
29 .macro cfi_restore reg offset=0 docfi=0
30 .if \docfi
31 .cfi_restore \reg
32 .endif
33 .endm
34
35 .macro cfi_ld reg offset=0 docfi=0
36 LONG_L \reg, sp, \offset
37 cfi_restore \reg \offset \docfi
38 .endm
39
40/* Jump to the runtime virtual address. */
41 .macro JUMP_VIRT_ADDR temp1 temp2
42 li.d \temp1, CACHE_BASE
43 pcaddi \temp2, 0
44 or \temp1, \temp1, \temp2
45 jirl zero, \temp1, 0xc
46 .endm
47
48 .macro BACKUP_T0T1
49 csrwr t0, EXCEPTION_KS0
50 csrwr t1, EXCEPTION_KS1
51 .endm
52
53 .macro RELOAD_T0T1
54 csrrd t0, EXCEPTION_KS0
55 csrrd t1, EXCEPTION_KS1
56 .endm
57
58 .macro SAVE_TEMP docfi=0
59 RELOAD_T0T1
60 cfi_st t0, PT_R12, \docfi
61 cfi_st t1, PT_R13, \docfi
62 cfi_st t2, PT_R14, \docfi
63 cfi_st t3, PT_R15, \docfi
64 cfi_st t4, PT_R16, \docfi
65 cfi_st t5, PT_R17, \docfi
66 cfi_st t6, PT_R18, \docfi
67 cfi_st t7, PT_R19, \docfi
68 cfi_st t8, PT_R20, \docfi
69 .endm
70
71 .macro SAVE_STATIC docfi=0
72 cfi_st s0, PT_R23, \docfi
73 cfi_st s1, PT_R24, \docfi
74 cfi_st s2, PT_R25, \docfi
75 cfi_st s3, PT_R26, \docfi
76 cfi_st s4, PT_R27, \docfi
77 cfi_st s5, PT_R28, \docfi
78 cfi_st s6, PT_R29, \docfi
79 cfi_st s7, PT_R30, \docfi
80 cfi_st s8, PT_R31, \docfi
81 .endm
82
83/*
84 * get_saved_sp returns the SP for the current CPU by looking in the
85 * kernelsp array for it. It stores the current sp in t0 and loads the
86 * new value in sp.
87 */
88 .macro get_saved_sp docfi=0
89 la_abs t1, kernelsp
90#ifdef CONFIG_SMP
91 csrrd t0, PERCPU_BASE_KS
92 LONG_ADD t1, t1, t0
93#endif
94 move t0, sp
95 .if \docfi
96 .cfi_register sp, t0
97 .endif
98 LONG_L sp, t1, 0
99 .endm
100
101 .macro set_saved_sp stackp temp temp2
102 la.pcrel \temp, kernelsp
103#ifdef CONFIG_SMP
104 LONG_ADD \temp, \temp, u0
105#endif
106 LONG_S \stackp, \temp, 0
107 .endm
108
109 .macro SAVE_SOME docfi=0
110 csrrd t1, LOONGARCH_CSR_PRMD
111 andi t1, t1, 0x3 /* extract pplv bit */
112 move t0, sp
113 beqz t1, 8f
114 /* Called from user mode, new stack. */
115 get_saved_sp docfi=\docfi
1168:
117 PTR_ADDI sp, sp, -PT_SIZE
118 .if \docfi
119 .cfi_def_cfa sp, 0
120 .endif
121 cfi_st t0, PT_R3, \docfi
122 cfi_rel_offset sp, PT_R3, \docfi
123 LONG_S zero, sp, PT_R0
124 csrrd t0, LOONGARCH_CSR_PRMD
125 LONG_S t0, sp, PT_PRMD
126 csrrd t0, LOONGARCH_CSR_CRMD
127 LONG_S t0, sp, PT_CRMD
128 csrrd t0, LOONGARCH_CSR_EUEN
129 LONG_S t0, sp, PT_EUEN
130 csrrd t0, LOONGARCH_CSR_ECFG
131 LONG_S t0, sp, PT_ECFG
132 csrrd t0, LOONGARCH_CSR_ESTAT
133 PTR_S t0, sp, PT_ESTAT
134 cfi_st ra, PT_R1, \docfi
135 cfi_st a0, PT_R4, \docfi
136 cfi_st a1, PT_R5, \docfi
137 cfi_st a2, PT_R6, \docfi
138 cfi_st a3, PT_R7, \docfi
139 cfi_st a4, PT_R8, \docfi
140 cfi_st a5, PT_R9, \docfi
141 cfi_st a6, PT_R10, \docfi
142 cfi_st a7, PT_R11, \docfi
143 csrrd ra, LOONGARCH_CSR_ERA
144 LONG_S ra, sp, PT_ERA
145 .if \docfi
146 .cfi_rel_offset ra, PT_ERA
147 .endif
148 cfi_st tp, PT_R2, \docfi
149 cfi_st fp, PT_R22, \docfi
150
151 /* Set thread_info if we're coming from user mode */
152 csrrd t0, LOONGARCH_CSR_PRMD
153 andi t0, t0, 0x3 /* extract pplv bit */
154 beqz t0, 9f
155
156 li.d tp, ~_THREAD_MASK
157 and tp, tp, sp
158 cfi_st u0, PT_R21, \docfi
159 csrrd u0, PERCPU_BASE_KS
1609:
161#ifdef CONFIG_KGDB
162 li.w t0, CSR_CRMD_WE
163 csrxchg t0, t0, LOONGARCH_CSR_CRMD
164#endif
165 .endm
166
167 .macro SAVE_ALL docfi=0
168 SAVE_SOME \docfi
169 SAVE_TEMP \docfi
170 SAVE_STATIC \docfi
171 .endm
172
173 .macro RESTORE_TEMP docfi=0
174 cfi_ld t0, PT_R12, \docfi
175 cfi_ld t1, PT_R13, \docfi
176 cfi_ld t2, PT_R14, \docfi
177 cfi_ld t3, PT_R15, \docfi
178 cfi_ld t4, PT_R16, \docfi
179 cfi_ld t5, PT_R17, \docfi
180 cfi_ld t6, PT_R18, \docfi
181 cfi_ld t7, PT_R19, \docfi
182 cfi_ld t8, PT_R20, \docfi
183 .endm
184
185 .macro RESTORE_STATIC docfi=0
186 cfi_ld s0, PT_R23, \docfi
187 cfi_ld s1, PT_R24, \docfi
188 cfi_ld s2, PT_R25, \docfi
189 cfi_ld s3, PT_R26, \docfi
190 cfi_ld s4, PT_R27, \docfi
191 cfi_ld s5, PT_R28, \docfi
192 cfi_ld s6, PT_R29, \docfi
193 cfi_ld s7, PT_R30, \docfi
194 cfi_ld s8, PT_R31, \docfi
195 .endm
196
197 .macro RESTORE_SOME docfi=0
198 LONG_L a0, sp, PT_PRMD
199 andi a0, a0, 0x3 /* extract pplv bit */
200 beqz a0, 8f
201 cfi_ld u0, PT_R21, \docfi
2028:
203 LONG_L a0, sp, PT_ERA
204 csrwr a0, LOONGARCH_CSR_ERA
205 LONG_L a0, sp, PT_PRMD
206 csrwr a0, LOONGARCH_CSR_PRMD
207 cfi_ld ra, PT_R1, \docfi
208 cfi_ld a0, PT_R4, \docfi
209 cfi_ld a1, PT_R5, \docfi
210 cfi_ld a2, PT_R6, \docfi
211 cfi_ld a3, PT_R7, \docfi
212 cfi_ld a4, PT_R8, \docfi
213 cfi_ld a5, PT_R9, \docfi
214 cfi_ld a6, PT_R10, \docfi
215 cfi_ld a7, PT_R11, \docfi
216 cfi_ld tp, PT_R2, \docfi
217 cfi_ld fp, PT_R22, \docfi
218 .endm
219
220 .macro RESTORE_SP_AND_RET docfi=0
221 cfi_ld sp, PT_R3, \docfi
222 ertn
223 .endm
224
225 .macro RESTORE_ALL_AND_RET docfi=0
226 RESTORE_STATIC \docfi
227 RESTORE_TEMP \docfi
228 RESTORE_SOME \docfi
229 RESTORE_SP_AND_RET \docfi
230 .endm
231
232#endif /* _ASM_STACKFRAME_H */