Loading...
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1994, 1995, 1996, 1998, 1999, 2002, 2003 Ralf Baechle
7 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
8 * Copyright (C) 1994, 1995, 1996, by Andreas Busse
9 * Copyright (C) 1999 Silicon Graphics, Inc.
10 * Copyright (C) 2000 MIPS Technologies, Inc.
11 * written by Carsten Langgaard, carstenl@mips.com
12 */
13#include <asm/asm.h>
14#include <asm/cachectl.h>
15#include <asm/fpregdef.h>
16#include <asm/mipsregs.h>
17#include <asm/asm-offsets.h>
18#include <asm/page.h>
19#include <asm/pgtable-bits.h>
20#include <asm/regdef.h>
21#include <asm/stackframe.h>
22#include <asm/thread_info.h>
23
24#include <asm/asmmacro.h>
25
26/*
27 * Offset to the current process status flags, the first 32 bytes of the
28 * stack are not used.
29 */
30#define ST_OFF (_THREAD_SIZE - 32 - PT_SIZE + PT_STATUS)
31
32/*
33 * FPU context is saved iff the process has used it's FPU in the current
34 * time slice as indicated by _TIF_USEDFPU. In any case, the CU1 bit for user
35 * space STATUS register should be 0, so that a process *always* starts its
36 * userland with FPU disabled after each context switch.
37 *
38 * FPU will be enabled as soon as the process accesses FPU again, through
39 * do_cpu() trap.
40 */
41
42/*
43 * task_struct *resume(task_struct *prev, task_struct *next,
44 * struct thread_info *next_ti, int usedfpu)
45 */
46 .align 5
47 LEAF(resume)
48 mfc0 t1, CP0_STATUS
49 LONG_S t1, THREAD_STATUS(a0)
50 cpu_save_nonscratch a0
51 LONG_S ra, THREAD_REG31(a0)
52
53 /*
54 * check if we need to save FPU registers
55 */
56
57 beqz a3, 1f
58
59 PTR_L t3, TASK_THREAD_INFO(a0)
60 /*
61 * clear saved user stack CU1 bit
62 */
63 LONG_L t0, ST_OFF(t3)
64 li t1, ~ST0_CU1
65 and t0, t0, t1
66 LONG_S t0, ST_OFF(t3)
67
68 fpu_save_double a0 t0 t1 # c0_status passed in t0
69 # clobbers t1
701:
71
72 /*
73 * The order of restoring the registers takes care of the race
74 * updating $28, $29 and kernelsp without disabling ints.
75 */
76 move $28, a2
77 cpu_restore_nonscratch a1
78
79 PTR_ADDU t0, $28, _THREAD_SIZE - 32
80 set_saved_sp t0, t1, t2
81#ifdef CONFIG_MIPS_MT_SMTC
82 /* Read-modify-writes of Status must be atomic on a VPE */
83 mfc0 t2, CP0_TCSTATUS
84 ori t1, t2, TCSTATUS_IXMT
85 mtc0 t1, CP0_TCSTATUS
86 andi t2, t2, TCSTATUS_IXMT
87 _ehb
88 DMT 8 # dmt t0
89 move t1,ra
90 jal mips_ihb
91 move ra,t1
92#endif /* CONFIG_MIPS_MT_SMTC */
93 mfc0 t1, CP0_STATUS /* Do we really need this? */
94 li a3, 0xff01
95 and t1, a3
96 LONG_L a2, THREAD_STATUS(a1)
97 nor a3, $0, a3
98 and a2, a3
99 or a2, t1
100 mtc0 a2, CP0_STATUS
101#ifdef CONFIG_MIPS_MT_SMTC
102 _ehb
103 andi t0, t0, VPECONTROL_TE
104 beqz t0, 1f
105 emt
1061:
107 mfc0 t1, CP0_TCSTATUS
108 xori t1, t1, TCSTATUS_IXMT
109 or t1, t1, t2
110 mtc0 t1, CP0_TCSTATUS
111 _ehb
112#endif /* CONFIG_MIPS_MT_SMTC */
113 move v0, a0
114 jr ra
115 END(resume)
116
117/*
118 * Save a thread's fp context.
119 */
120LEAF(_save_fp)
121#ifdef CONFIG_64BIT
122 mfc0 t0, CP0_STATUS
123#endif
124 fpu_save_double a0 t0 t1 # clobbers t1
125 jr ra
126 END(_save_fp)
127
128/*
129 * Restore a thread's fp context.
130 */
131LEAF(_restore_fp)
132#ifdef CONFIG_64BIT
133 mfc0 t0, CP0_STATUS
134#endif
135 fpu_restore_double a0 t0 t1 # clobbers t1
136 jr ra
137 END(_restore_fp)
138
139/*
140 * Load the FPU with signalling NANS. This bit pattern we're using has
141 * the property that no matter whether considered as single or as double
142 * precision represents signaling NANS.
143 *
144 * We initialize fcr31 to rounding to nearest, no exceptions.
145 */
146
147#define FPU_DEFAULT 0x00000000
148
149LEAF(_init_fpu)
150#ifdef CONFIG_MIPS_MT_SMTC
151 /* Rather than manipulate per-VPE Status, set per-TC bit in TCStatus */
152 mfc0 t0, CP0_TCSTATUS
153 /* Bit position is the same for Status, TCStatus */
154 li t1, ST0_CU1
155 or t0, t1
156 mtc0 t0, CP0_TCSTATUS
157#else /* Normal MIPS CU1 enable */
158 mfc0 t0, CP0_STATUS
159 li t1, ST0_CU1
160 or t0, t1
161 mtc0 t0, CP0_STATUS
162#endif /* CONFIG_MIPS_MT_SMTC */
163 enable_fpu_hazard
164
165 li t1, FPU_DEFAULT
166 ctc1 t1, fcr31
167
168 li t1, -1 # SNaN
169
170#ifdef CONFIG_64BIT
171 sll t0, t0, 5
172 bgez t0, 1f # 16 / 32 register mode?
173
174 dmtc1 t1, $f1
175 dmtc1 t1, $f3
176 dmtc1 t1, $f5
177 dmtc1 t1, $f7
178 dmtc1 t1, $f9
179 dmtc1 t1, $f11
180 dmtc1 t1, $f13
181 dmtc1 t1, $f15
182 dmtc1 t1, $f17
183 dmtc1 t1, $f19
184 dmtc1 t1, $f21
185 dmtc1 t1, $f23
186 dmtc1 t1, $f25
187 dmtc1 t1, $f27
188 dmtc1 t1, $f29
189 dmtc1 t1, $f31
1901:
191#endif
192
193#ifdef CONFIG_CPU_MIPS32
194 mtc1 t1, $f0
195 mtc1 t1, $f1
196 mtc1 t1, $f2
197 mtc1 t1, $f3
198 mtc1 t1, $f4
199 mtc1 t1, $f5
200 mtc1 t1, $f6
201 mtc1 t1, $f7
202 mtc1 t1, $f8
203 mtc1 t1, $f9
204 mtc1 t1, $f10
205 mtc1 t1, $f11
206 mtc1 t1, $f12
207 mtc1 t1, $f13
208 mtc1 t1, $f14
209 mtc1 t1, $f15
210 mtc1 t1, $f16
211 mtc1 t1, $f17
212 mtc1 t1, $f18
213 mtc1 t1, $f19
214 mtc1 t1, $f20
215 mtc1 t1, $f21
216 mtc1 t1, $f22
217 mtc1 t1, $f23
218 mtc1 t1, $f24
219 mtc1 t1, $f25
220 mtc1 t1, $f26
221 mtc1 t1, $f27
222 mtc1 t1, $f28
223 mtc1 t1, $f29
224 mtc1 t1, $f30
225 mtc1 t1, $f31
226#else
227 .set mips3
228 dmtc1 t1, $f0
229 dmtc1 t1, $f2
230 dmtc1 t1, $f4
231 dmtc1 t1, $f6
232 dmtc1 t1, $f8
233 dmtc1 t1, $f10
234 dmtc1 t1, $f12
235 dmtc1 t1, $f14
236 dmtc1 t1, $f16
237 dmtc1 t1, $f18
238 dmtc1 t1, $f20
239 dmtc1 t1, $f22
240 dmtc1 t1, $f24
241 dmtc1 t1, $f26
242 dmtc1 t1, $f28
243 dmtc1 t1, $f30
244#endif
245 jr ra
246 END(_init_fpu)
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1994, 1995, 1996, 1998, 1999, 2002, 2003 Ralf Baechle
7 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
8 * Copyright (C) 1994, 1995, 1996, by Andreas Busse
9 * Copyright (C) 1999 Silicon Graphics, Inc.
10 * Copyright (C) 2000 MIPS Technologies, Inc.
11 * written by Carsten Langgaard, carstenl@mips.com
12 */
13#include <asm/asm.h>
14#include <asm/cachectl.h>
15#include <asm/fpregdef.h>
16#include <asm/mipsregs.h>
17#include <asm/asm-offsets.h>
18#include <asm/pgtable-bits.h>
19#include <asm/regdef.h>
20#include <asm/stackframe.h>
21#include <asm/thread_info.h>
22
23#include <asm/asmmacro.h>
24
25/*
26 * Offset to the current process status flags, the first 32 bytes of the
27 * stack are not used.
28 */
29#define ST_OFF (_THREAD_SIZE - 32 - PT_SIZE + PT_STATUS)
30
31/*
32 * task_struct *resume(task_struct *prev, task_struct *next,
33 * struct thread_info *next_ti, s32 fp_save)
34 */
35 .align 5
36 LEAF(resume)
37 mfc0 t1, CP0_STATUS
38 LONG_S t1, THREAD_STATUS(a0)
39 cpu_save_nonscratch a0
40 LONG_S ra, THREAD_REG31(a0)
41
42 /*
43 * Check whether we need to save any FP context. FP context is saved
44 * iff the process has used the context with the scalar FPU or the MSA
45 * ASE in the current time slice, as indicated by _TIF_USEDFPU and
46 * _TIF_USEDMSA respectively. switch_to will have set fp_save
47 * accordingly to an FP_SAVE_ enum value.
48 */
49 beqz a3, 2f
50
51 /*
52 * We do. Clear the saved CU1 bit for prev, such that next time it is
53 * scheduled it will start in userland with the FPU disabled. If the
54 * task uses the FPU then it will be enabled again via the do_cpu trap.
55 * This allows us to lazily restore the FP context.
56 */
57 PTR_L t3, TASK_THREAD_INFO(a0)
58 LONG_L t0, ST_OFF(t3)
59 li t1, ~ST0_CU1
60 and t0, t0, t1
61 LONG_S t0, ST_OFF(t3)
62
63 /* Check whether we're saving scalar or vector context. */
64 bgtz a3, 1f
65
66 /* Save 128b MSA vector context. */
67 msa_save_all a0
68 b 2f
69
701: /* Save 32b/64b scalar FP context. */
71 fpu_save_double a0 t0 t1 # c0_status passed in t0
72 # clobbers t1
732:
74
75#if defined(CONFIG_CC_STACKPROTECTOR) && !defined(CONFIG_SMP)
76 PTR_LA t8, __stack_chk_guard
77 LONG_L t9, TASK_STACK_CANARY(a1)
78 LONG_S t9, 0(t8)
79#endif
80
81 /*
82 * The order of restoring the registers takes care of the race
83 * updating $28, $29 and kernelsp without disabling ints.
84 */
85 move $28, a2
86 cpu_restore_nonscratch a1
87
88 PTR_ADDU t0, $28, _THREAD_SIZE - 32
89 set_saved_sp t0, t1, t2
90#ifdef CONFIG_MIPS_MT_SMTC
91 /* Read-modify-writes of Status must be atomic on a VPE */
92 mfc0 t2, CP0_TCSTATUS
93 ori t1, t2, TCSTATUS_IXMT
94 mtc0 t1, CP0_TCSTATUS
95 andi t2, t2, TCSTATUS_IXMT
96 _ehb
97 DMT 8 # dmt t0
98 move t1,ra
99 jal mips_ihb
100 move ra,t1
101#endif /* CONFIG_MIPS_MT_SMTC */
102 mfc0 t1, CP0_STATUS /* Do we really need this? */
103 li a3, 0xff01
104 and t1, a3
105 LONG_L a2, THREAD_STATUS(a1)
106 nor a3, $0, a3
107 and a2, a3
108 or a2, t1
109 mtc0 a2, CP0_STATUS
110#ifdef CONFIG_MIPS_MT_SMTC
111 _ehb
112 andi t0, t0, VPECONTROL_TE
113 beqz t0, 1f
114 emt
1151:
116 mfc0 t1, CP0_TCSTATUS
117 xori t1, t1, TCSTATUS_IXMT
118 or t1, t1, t2
119 mtc0 t1, CP0_TCSTATUS
120 _ehb
121#endif /* CONFIG_MIPS_MT_SMTC */
122 move v0, a0
123 jr ra
124 END(resume)
125
126/*
127 * Save a thread's fp context.
128 */
129LEAF(_save_fp)
130#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2)
131 mfc0 t0, CP0_STATUS
132#endif
133 fpu_save_double a0 t0 t1 # clobbers t1
134 jr ra
135 END(_save_fp)
136
137/*
138 * Restore a thread's fp context.
139 */
140LEAF(_restore_fp)
141#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2)
142 mfc0 t0, CP0_STATUS
143#endif
144 fpu_restore_double a0 t0 t1 # clobbers t1
145 jr ra
146 END(_restore_fp)
147
148#ifdef CONFIG_CPU_HAS_MSA
149
150/*
151 * Save a thread's MSA vector context.
152 */
153LEAF(_save_msa)
154 msa_save_all a0
155 jr ra
156 END(_save_msa)
157
158/*
159 * Restore a thread's MSA vector context.
160 */
161LEAF(_restore_msa)
162 msa_restore_all a0
163 jr ra
164 END(_restore_msa)
165
166#endif
167
168/*
169 * Load the FPU with signalling NANS. This bit pattern we're using has
170 * the property that no matter whether considered as single or as double
171 * precision represents signaling NANS.
172 *
173 * We initialize fcr31 to rounding to nearest, no exceptions.
174 */
175
176#define FPU_DEFAULT 0x00000000
177
178LEAF(_init_fpu)
179#ifdef CONFIG_MIPS_MT_SMTC
180 /* Rather than manipulate per-VPE Status, set per-TC bit in TCStatus */
181 mfc0 t0, CP0_TCSTATUS
182 /* Bit position is the same for Status, TCStatus */
183 li t1, ST0_CU1
184 or t0, t1
185 mtc0 t0, CP0_TCSTATUS
186#else /* Normal MIPS CU1 enable */
187 mfc0 t0, CP0_STATUS
188 li t1, ST0_CU1
189 or t0, t1
190 mtc0 t0, CP0_STATUS
191#endif /* CONFIG_MIPS_MT_SMTC */
192 enable_fpu_hazard
193
194 li t1, FPU_DEFAULT
195 ctc1 t1, fcr31
196
197 li t1, -1 # SNaN
198
199#ifdef CONFIG_64BIT
200 sll t0, t0, 5
201 bgez t0, 1f # 16 / 32 register mode?
202
203 dmtc1 t1, $f1
204 dmtc1 t1, $f3
205 dmtc1 t1, $f5
206 dmtc1 t1, $f7
207 dmtc1 t1, $f9
208 dmtc1 t1, $f11
209 dmtc1 t1, $f13
210 dmtc1 t1, $f15
211 dmtc1 t1, $f17
212 dmtc1 t1, $f19
213 dmtc1 t1, $f21
214 dmtc1 t1, $f23
215 dmtc1 t1, $f25
216 dmtc1 t1, $f27
217 dmtc1 t1, $f29
218 dmtc1 t1, $f31
2191:
220#endif
221
222#ifdef CONFIG_CPU_MIPS32
223 mtc1 t1, $f0
224 mtc1 t1, $f1
225 mtc1 t1, $f2
226 mtc1 t1, $f3
227 mtc1 t1, $f4
228 mtc1 t1, $f5
229 mtc1 t1, $f6
230 mtc1 t1, $f7
231 mtc1 t1, $f8
232 mtc1 t1, $f9
233 mtc1 t1, $f10
234 mtc1 t1, $f11
235 mtc1 t1, $f12
236 mtc1 t1, $f13
237 mtc1 t1, $f14
238 mtc1 t1, $f15
239 mtc1 t1, $f16
240 mtc1 t1, $f17
241 mtc1 t1, $f18
242 mtc1 t1, $f19
243 mtc1 t1, $f20
244 mtc1 t1, $f21
245 mtc1 t1, $f22
246 mtc1 t1, $f23
247 mtc1 t1, $f24
248 mtc1 t1, $f25
249 mtc1 t1, $f26
250 mtc1 t1, $f27
251 mtc1 t1, $f28
252 mtc1 t1, $f29
253 mtc1 t1, $f30
254 mtc1 t1, $f31
255
256#ifdef CONFIG_CPU_MIPS32_R2
257 .set push
258 .set mips64r2
259 sll t0, t0, 5 # is Status.FR set?
260 bgez t0, 1f # no: skip setting upper 32b
261
262 mthc1 t1, $f0
263 mthc1 t1, $f1
264 mthc1 t1, $f2
265 mthc1 t1, $f3
266 mthc1 t1, $f4
267 mthc1 t1, $f5
268 mthc1 t1, $f6
269 mthc1 t1, $f7
270 mthc1 t1, $f8
271 mthc1 t1, $f9
272 mthc1 t1, $f10
273 mthc1 t1, $f11
274 mthc1 t1, $f12
275 mthc1 t1, $f13
276 mthc1 t1, $f14
277 mthc1 t1, $f15
278 mthc1 t1, $f16
279 mthc1 t1, $f17
280 mthc1 t1, $f18
281 mthc1 t1, $f19
282 mthc1 t1, $f20
283 mthc1 t1, $f21
284 mthc1 t1, $f22
285 mthc1 t1, $f23
286 mthc1 t1, $f24
287 mthc1 t1, $f25
288 mthc1 t1, $f26
289 mthc1 t1, $f27
290 mthc1 t1, $f28
291 mthc1 t1, $f29
292 mthc1 t1, $f30
293 mthc1 t1, $f31
2941: .set pop
295#endif /* CONFIG_CPU_MIPS32_R2 */
296#else
297 .set arch=r4000
298 dmtc1 t1, $f0
299 dmtc1 t1, $f2
300 dmtc1 t1, $f4
301 dmtc1 t1, $f6
302 dmtc1 t1, $f8
303 dmtc1 t1, $f10
304 dmtc1 t1, $f12
305 dmtc1 t1, $f14
306 dmtc1 t1, $f16
307 dmtc1 t1, $f18
308 dmtc1 t1, $f20
309 dmtc1 t1, $f22
310 dmtc1 t1, $f24
311 dmtc1 t1, $f26
312 dmtc1 t1, $f28
313 dmtc1 t1, $f30
314#endif
315 jr ra
316 END(_init_fpu)