Linux Audio

Check our new training course

Loading...
v6.2
 1/*
 2 * This file is subject to the terms and conditions of the GNU General Public
 3 * License.  See the file "COPYING" in the main directory of this archive
 4 * for more details.
 5 *
 6 * Copyright (C) 1994, 1995, 1996, 1998, 1999, 2002, 2003 Ralf Baechle
 7 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
 8 * Copyright (C) 1994, 1995, 1996, by Andreas Busse
 9 * Copyright (C) 1999 Silicon Graphics, Inc.
10 * Copyright (C) 2000 MIPS Technologies, Inc.
11 *    written by Carsten Langgaard, carstenl@mips.com
12 */
13#include <asm/asm.h>
14#include <asm/cachectl.h>
 
15#include <asm/mipsregs.h>
16#include <asm/asm-offsets.h>
17#include <asm/regdef.h>
18#include <asm/stackframe.h>
19#include <asm/thread_info.h>
20
21#include <asm/asmmacro.h>
22
 
 
 
 
 
 
 
 
 
 
23/*
24 * task_struct *resume(task_struct *prev, task_struct *next,
25 *		       struct thread_info *next_ti)
26 */
27	.align	5
28	LEAF(resume)
29	mfc0	t1, CP0_STATUS
30	LONG_S	t1, THREAD_STATUS(a0)
31	cpu_save_nonscratch a0
32	LONG_S	ra, THREAD_REG31(a0)
33
34#if defined(CONFIG_STACKPROTECTOR) && !defined(CONFIG_SMP)
35	PTR_LA	t8, __stack_chk_guard
36	LONG_L	t9, TASK_STACK_CANARY(a1)
37	LONG_S	t9, 0(t8)
38#endif
39
40	/*
41	 * The order of restoring the registers takes care of the race
42	 * updating $28, $29 and kernelsp without disabling ints.
43	 */
44	move	$28, a2
45	cpu_restore_nonscratch a1
46
47	PTR_ADDU	t0, $28, _THREAD_SIZE - 32
48	set_saved_sp	t0, t1, t2
49	mfc0	t1, CP0_STATUS		/* Do we really need this? */
50	li	a3, 0xff01
51	and	t1, a3
52	LONG_L	a2, THREAD_STATUS(a1)
53	nor	a3, $0, a3
54	and	a2, a3
55	or	a2, t1
56	mtc0	a2, CP0_STATUS
57	move	v0, a0
58	jr	ra
59	END(resume)
v4.10.11
  1/*
  2 * This file is subject to the terms and conditions of the GNU General Public
  3 * License.  See the file "COPYING" in the main directory of this archive
  4 * for more details.
  5 *
  6 * Copyright (C) 1994, 1995, 1996, 1998, 1999, 2002, 2003 Ralf Baechle
  7 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
  8 * Copyright (C) 1994, 1995, 1996, by Andreas Busse
  9 * Copyright (C) 1999 Silicon Graphics, Inc.
 10 * Copyright (C) 2000 MIPS Technologies, Inc.
 11 *    written by Carsten Langgaard, carstenl@mips.com
 12 */
 13#include <asm/asm.h>
 14#include <asm/cachectl.h>
 15#include <asm/fpregdef.h>
 16#include <asm/mipsregs.h>
 17#include <asm/asm-offsets.h>
 18#include <asm/regdef.h>
 19#include <asm/stackframe.h>
 20#include <asm/thread_info.h>
 21
 22#include <asm/asmmacro.h>
 23
 24/* preprocessor replaces the fp in ".set fp=64" with $30 otherwise */
 25#undef fp
 26
 27/*
 28 * Offset to the current process status flags, the first 32 bytes of the
 29 * stack are not used.
 30 */
 31#define ST_OFF (_THREAD_SIZE - 32 - PT_SIZE + PT_STATUS)
 32
 33#ifndef USE_ALTERNATE_RESUME_IMPL
 34/*
 35 * task_struct *resume(task_struct *prev, task_struct *next,
 36 *		       struct thread_info *next_ti)
 37 */
 38	.align	5
 39	LEAF(resume)
 40	mfc0	t1, CP0_STATUS
 41	LONG_S	t1, THREAD_STATUS(a0)
 42	cpu_save_nonscratch a0
 43	LONG_S	ra, THREAD_REG31(a0)
 44
 45#if defined(CONFIG_CC_STACKPROTECTOR) && !defined(CONFIG_SMP)
 46	PTR_LA	t8, __stack_chk_guard
 47	LONG_L	t9, TASK_STACK_CANARY(a1)
 48	LONG_S	t9, 0(t8)
 49#endif
 50
 51	/*
 52	 * The order of restoring the registers takes care of the race
 53	 * updating $28, $29 and kernelsp without disabling ints.
 54	 */
 55	move	$28, a2
 56	cpu_restore_nonscratch a1
 57
 58	PTR_ADDU	t0, $28, _THREAD_SIZE - 32
 59	set_saved_sp	t0, t1, t2
 60	mfc0	t1, CP0_STATUS		/* Do we really need this? */
 61	li	a3, 0xff01
 62	and	t1, a3
 63	LONG_L	a2, THREAD_STATUS(a1)
 64	nor	a3, $0, a3
 65	and	a2, a3
 66	or	a2, t1
 67	mtc0	a2, CP0_STATUS
 68	move	v0, a0
 69	jr	ra
 70	END(resume)
 71
 72#endif /* USE_ALTERNATE_RESUME_IMPL */
 73
 74/*
 75 * Save a thread's fp context.
 76 */
 77LEAF(_save_fp)
 78#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) || \
 79		defined(CONFIG_CPU_MIPS32_R6)
 80	mfc0	t0, CP0_STATUS
 81#endif
 82	fpu_save_double a0 t0 t1		# clobbers t1
 83	jr	ra
 84	END(_save_fp)
 85
 86/*
 87 * Restore a thread's fp context.
 88 */
 89LEAF(_restore_fp)
 90#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) || \
 91		defined(CONFIG_CPU_MIPS32_R6)
 92	mfc0	t0, CP0_STATUS
 93#endif
 94	fpu_restore_double a0 t0 t1		# clobbers t1
 95	jr	ra
 96	END(_restore_fp)
 97
 98#ifdef CONFIG_CPU_HAS_MSA
 99
100/*
101 * Save a thread's MSA vector context.
102 */
103LEAF(_save_msa)
104	msa_save_all	a0
105	jr	ra
106	END(_save_msa)
107
108/*
109 * Restore a thread's MSA vector context.
110 */
111LEAF(_restore_msa)
112	msa_restore_all	a0
113	jr	ra
114	END(_restore_msa)
115
116LEAF(_init_msa_upper)
117	msa_init_all_upper
118	jr	ra
119	END(_init_msa_upper)
120
121#endif
122
123/*
124 * Load the FPU with signalling NANS.  This bit pattern we're using has
125 * the property that no matter whether considered as single or as double
126 * precision represents signaling NANS.
127 *
128 * The value to initialize fcr31 to comes in $a0.
129 */
130
131	.set push
132	SET_HARDFLOAT
133
134LEAF(_init_fpu)
135	mfc0	t0, CP0_STATUS
136	li	t1, ST0_CU1
137	or	t0, t1
138	mtc0	t0, CP0_STATUS
139	enable_fpu_hazard
140
141	ctc1	a0, fcr31
142
143	li	t1, -1				# SNaN
144
145#ifdef CONFIG_64BIT
146	sll	t0, t0, 5
147	bgez	t0, 1f				# 16 / 32 register mode?
148
149	dmtc1	t1, $f1
150	dmtc1	t1, $f3
151	dmtc1	t1, $f5
152	dmtc1	t1, $f7
153	dmtc1	t1, $f9
154	dmtc1	t1, $f11
155	dmtc1	t1, $f13
156	dmtc1	t1, $f15
157	dmtc1	t1, $f17
158	dmtc1	t1, $f19
159	dmtc1	t1, $f21
160	dmtc1	t1, $f23
161	dmtc1	t1, $f25
162	dmtc1	t1, $f27
163	dmtc1	t1, $f29
164	dmtc1	t1, $f31
1651:
166#endif
167
168#ifdef CONFIG_CPU_MIPS32
169	mtc1	t1, $f0
170	mtc1	t1, $f1
171	mtc1	t1, $f2
172	mtc1	t1, $f3
173	mtc1	t1, $f4
174	mtc1	t1, $f5
175	mtc1	t1, $f6
176	mtc1	t1, $f7
177	mtc1	t1, $f8
178	mtc1	t1, $f9
179	mtc1	t1, $f10
180	mtc1	t1, $f11
181	mtc1	t1, $f12
182	mtc1	t1, $f13
183	mtc1	t1, $f14
184	mtc1	t1, $f15
185	mtc1	t1, $f16
186	mtc1	t1, $f17
187	mtc1	t1, $f18
188	mtc1	t1, $f19
189	mtc1	t1, $f20
190	mtc1	t1, $f21
191	mtc1	t1, $f22
192	mtc1	t1, $f23
193	mtc1	t1, $f24
194	mtc1	t1, $f25
195	mtc1	t1, $f26
196	mtc1	t1, $f27
197	mtc1	t1, $f28
198	mtc1	t1, $f29
199	mtc1	t1, $f30
200	mtc1	t1, $f31
201
202#if defined(CONFIG_CPU_MIPS32_R2) || defined(CONFIG_CPU_MIPS32_R6)
203	.set    push
204	.set    MIPS_ISA_LEVEL_RAW
205	.set	fp=64
206	sll     t0, t0, 5			# is Status.FR set?
207	bgez    t0, 1f				# no: skip setting upper 32b
208
209	mthc1   t1, $f0
210	mthc1   t1, $f1
211	mthc1   t1, $f2
212	mthc1   t1, $f3
213	mthc1   t1, $f4
214	mthc1   t1, $f5
215	mthc1   t1, $f6
216	mthc1   t1, $f7
217	mthc1   t1, $f8
218	mthc1   t1, $f9
219	mthc1   t1, $f10
220	mthc1   t1, $f11
221	mthc1   t1, $f12
222	mthc1   t1, $f13
223	mthc1   t1, $f14
224	mthc1   t1, $f15
225	mthc1   t1, $f16
226	mthc1   t1, $f17
227	mthc1   t1, $f18
228	mthc1   t1, $f19
229	mthc1   t1, $f20
230	mthc1   t1, $f21
231	mthc1   t1, $f22
232	mthc1   t1, $f23
233	mthc1   t1, $f24
234	mthc1   t1, $f25
235	mthc1   t1, $f26
236	mthc1   t1, $f27
237	mthc1   t1, $f28
238	mthc1   t1, $f29
239	mthc1   t1, $f30
240	mthc1   t1, $f31
2411:	.set    pop
242#endif /* CONFIG_CPU_MIPS32_R2 || CONFIG_CPU_MIPS32_R6 */
243#else
244	.set	MIPS_ISA_ARCH_LEVEL_RAW
245	dmtc1	t1, $f0
246	dmtc1	t1, $f2
247	dmtc1	t1, $f4
248	dmtc1	t1, $f6
249	dmtc1	t1, $f8
250	dmtc1	t1, $f10
251	dmtc1	t1, $f12
252	dmtc1	t1, $f14
253	dmtc1	t1, $f16
254	dmtc1	t1, $f18
255	dmtc1	t1, $f20
256	dmtc1	t1, $f22
257	dmtc1	t1, $f24
258	dmtc1	t1, $f26
259	dmtc1	t1, $f28
260	dmtc1	t1, $f30
261#endif
262	jr	ra
263	END(_init_fpu)
264
265	.set pop	/* SET_HARDFLOAT */