Linux Audio

Check our new training course

Loading...
v6.2
 1/*
 2 * This file is subject to the terms and conditions of the GNU General Public
 3 * License.  See the file "COPYING" in the main directory of this archive
 4 * for more details.
 5 *
 6 * Copyright (C) 1994, 1995, 1996, 1998, 1999, 2002, 2003 Ralf Baechle
 7 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
 8 * Copyright (C) 1994, 1995, 1996, by Andreas Busse
 9 * Copyright (C) 1999 Silicon Graphics, Inc.
10 * Copyright (C) 2000 MIPS Technologies, Inc.
11 *    written by Carsten Langgaard, carstenl@mips.com
12 */
13#include <asm/asm.h>
14#include <asm/cachectl.h>
 
15#include <asm/mipsregs.h>
16#include <asm/asm-offsets.h>
 
 
17#include <asm/regdef.h>
18#include <asm/stackframe.h>
19#include <asm/thread_info.h>
20
21#include <asm/asmmacro.h>
22
23/*
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24 * task_struct *resume(task_struct *prev, task_struct *next,
25 *		       struct thread_info *next_ti)
26 */
27	.align	5
28	LEAF(resume)
29	mfc0	t1, CP0_STATUS
30	LONG_S	t1, THREAD_STATUS(a0)
31	cpu_save_nonscratch a0
32	LONG_S	ra, THREAD_REG31(a0)
33
34#if defined(CONFIG_STACKPROTECTOR) && !defined(CONFIG_SMP)
35	PTR_LA	t8, __stack_chk_guard
36	LONG_L	t9, TASK_STACK_CANARY(a1)
37	LONG_S	t9, 0(t8)
38#endif
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
40	/*
41	 * The order of restoring the registers takes care of the race
42	 * updating $28, $29 and kernelsp without disabling ints.
43	 */
44	move	$28, a2
45	cpu_restore_nonscratch a1
46
47	PTR_ADDU	t0, $28, _THREAD_SIZE - 32
48	set_saved_sp	t0, t1, t2
 
 
 
 
 
 
 
 
 
 
 
 
49	mfc0	t1, CP0_STATUS		/* Do we really need this? */
50	li	a3, 0xff01
51	and	t1, a3
52	LONG_L	a2, THREAD_STATUS(a1)
53	nor	a3, $0, a3
54	and	a2, a3
55	or	a2, t1
56	mtc0	a2, CP0_STATUS
 
 
 
 
 
 
 
 
 
 
 
 
57	move	v0, a0
58	jr	ra
59	END(resume)
v3.1
  1/*
  2 * This file is subject to the terms and conditions of the GNU General Public
  3 * License.  See the file "COPYING" in the main directory of this archive
  4 * for more details.
  5 *
  6 * Copyright (C) 1994, 1995, 1996, 1998, 1999, 2002, 2003 Ralf Baechle
  7 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
  8 * Copyright (C) 1994, 1995, 1996, by Andreas Busse
  9 * Copyright (C) 1999 Silicon Graphics, Inc.
 10 * Copyright (C) 2000 MIPS Technologies, Inc.
 11 *    written by Carsten Langgaard, carstenl@mips.com
 12 */
 13#include <asm/asm.h>
 14#include <asm/cachectl.h>
 15#include <asm/fpregdef.h>
 16#include <asm/mipsregs.h>
 17#include <asm/asm-offsets.h>
 18#include <asm/page.h>
 19#include <asm/pgtable-bits.h>
 20#include <asm/regdef.h>
 21#include <asm/stackframe.h>
 22#include <asm/thread_info.h>
 23
 24#include <asm/asmmacro.h>
 25
 26/*
 27 * Offset to the current process status flags, the first 32 bytes of the
 28 * stack are not used.
 29 */
 30#define ST_OFF (_THREAD_SIZE - 32 - PT_SIZE + PT_STATUS)
 31
 32/*
 33 * FPU context is saved iff the process has used it's FPU in the current
 34 * time slice as indicated by _TIF_USEDFPU.  In any case, the CU1 bit for user
 35 * space STATUS register should be 0, so that a process *always* starts its
 36 * userland with FPU disabled after each context switch.
 37 *
 38 * FPU will be enabled as soon as the process accesses FPU again, through
 39 * do_cpu() trap.
 40 */
 41
 42/*
 43 * task_struct *resume(task_struct *prev, task_struct *next,
 44 *                     struct thread_info *next_ti)
 45 */
 46	.align	5
 47	LEAF(resume)
 48	mfc0	t1, CP0_STATUS
 49	LONG_S	t1, THREAD_STATUS(a0)
 50	cpu_save_nonscratch a0
 51	LONG_S	ra, THREAD_REG31(a0)
 52
 53	/*
 54	 * check if we need to save FPU registers
 55	 */
 56	PTR_L	t3, TASK_THREAD_INFO(a0)
 57	LONG_L	t0, TI_FLAGS(t3)
 58	li	t1, _TIF_USEDFPU
 59	and	t2, t0, t1
 60	beqz	t2, 1f
 61	nor	t1, zero, t1
 62
 63	and	t0, t0, t1
 64	LONG_S	t0, TI_FLAGS(t3)
 65
 66	/*
 67	 * clear saved user stack CU1 bit
 68	 */
 69	LONG_L	t0, ST_OFF(t3)
 70	li	t1, ~ST0_CU1
 71	and	t0, t0, t1
 72	LONG_S	t0, ST_OFF(t3)
 73
 74	fpu_save_double a0 t0 t1		# c0_status passed in t0
 75						# clobbers t1
 761:
 77
 78	/*
 79	 * The order of restoring the registers takes care of the race
 80	 * updating $28, $29 and kernelsp without disabling ints.
 81	 */
 82	move	$28, a2
 83	cpu_restore_nonscratch a1
 84
 85	PTR_ADDU	t0, $28, _THREAD_SIZE - 32
 86	set_saved_sp	t0, t1, t2
 87#ifdef CONFIG_MIPS_MT_SMTC
 88	/* Read-modify-writes of Status must be atomic on a VPE */
 89	mfc0	t2, CP0_TCSTATUS
 90	ori	t1, t2, TCSTATUS_IXMT
 91	mtc0	t1, CP0_TCSTATUS
 92	andi	t2, t2, TCSTATUS_IXMT
 93	_ehb
 94	DMT	8				# dmt	t0
 95	move	t1,ra
 96	jal	mips_ihb
 97	move	ra,t1
 98#endif /* CONFIG_MIPS_MT_SMTC */
 99	mfc0	t1, CP0_STATUS		/* Do we really need this? */
100	li	a3, 0xff01
101	and	t1, a3
102	LONG_L	a2, THREAD_STATUS(a1)
103	nor	a3, $0, a3
104	and	a2, a3
105	or	a2, t1
106	mtc0	a2, CP0_STATUS
107#ifdef CONFIG_MIPS_MT_SMTC
108	_ehb
109	andi	t0, t0, VPECONTROL_TE
110	beqz	t0, 1f
111	emt
1121:
113	mfc0	t1, CP0_TCSTATUS
114	xori	t1, t1, TCSTATUS_IXMT
115	or	t1, t1, t2
116	mtc0	t1, CP0_TCSTATUS
117	_ehb
118#endif /* CONFIG_MIPS_MT_SMTC */
119	move	v0, a0
120	jr	ra
121	END(resume)
122
123/*
124 * Save a thread's fp context.
125 */
126LEAF(_save_fp)
127#ifdef CONFIG_64BIT
128	mfc0	t0, CP0_STATUS
129#endif
130	fpu_save_double a0 t0 t1		# clobbers t1
131	jr	ra
132	END(_save_fp)
133
134/*
135 * Restore a thread's fp context.
136 */
137LEAF(_restore_fp)
138#ifdef CONFIG_64BIT
139	mfc0	t0, CP0_STATUS
140#endif
141	fpu_restore_double a0 t0 t1		# clobbers t1
142	jr	ra
143	END(_restore_fp)
144
145/*
146 * Load the FPU with signalling NANS.  This bit pattern we're using has
147 * the property that no matter whether considered as single or as double
148 * precision represents signaling NANS.
149 *
150 * We initialize fcr31 to rounding to nearest, no exceptions.
151 */
152
153#define FPU_DEFAULT  0x00000000
154
155LEAF(_init_fpu)
156#ifdef CONFIG_MIPS_MT_SMTC
157	/* Rather than manipulate per-VPE Status, set per-TC bit in TCStatus */
158	mfc0	t0, CP0_TCSTATUS
159	/* Bit position is the same for Status, TCStatus */
160	li	t1, ST0_CU1
161	or	t0, t1
162	mtc0	t0, CP0_TCSTATUS
163#else /* Normal MIPS CU1 enable */
164	mfc0	t0, CP0_STATUS
165	li	t1, ST0_CU1
166	or	t0, t1
167	mtc0	t0, CP0_STATUS
168#endif /* CONFIG_MIPS_MT_SMTC */
169	enable_fpu_hazard
170
171	li	t1, FPU_DEFAULT
172	ctc1	t1, fcr31
173
174	li	t1, -1				# SNaN
175
176#ifdef CONFIG_64BIT
177	sll	t0, t0, 5
178	bgez	t0, 1f				# 16 / 32 register mode?
179
180	dmtc1	t1, $f1
181	dmtc1	t1, $f3
182	dmtc1	t1, $f5
183	dmtc1	t1, $f7
184	dmtc1	t1, $f9
185	dmtc1	t1, $f11
186	dmtc1	t1, $f13
187	dmtc1	t1, $f15
188	dmtc1	t1, $f17
189	dmtc1	t1, $f19
190	dmtc1	t1, $f21
191	dmtc1	t1, $f23
192	dmtc1	t1, $f25
193	dmtc1	t1, $f27
194	dmtc1	t1, $f29
195	dmtc1	t1, $f31
1961:
197#endif
198
199#ifdef CONFIG_CPU_MIPS32
200	mtc1	t1, $f0
201	mtc1	t1, $f1
202	mtc1	t1, $f2
203	mtc1	t1, $f3
204	mtc1	t1, $f4
205	mtc1	t1, $f5
206	mtc1	t1, $f6
207	mtc1	t1, $f7
208	mtc1	t1, $f8
209	mtc1	t1, $f9
210	mtc1	t1, $f10
211	mtc1	t1, $f11
212	mtc1	t1, $f12
213	mtc1	t1, $f13
214	mtc1	t1, $f14
215	mtc1	t1, $f15
216	mtc1	t1, $f16
217	mtc1	t1, $f17
218	mtc1	t1, $f18
219	mtc1	t1, $f19
220	mtc1	t1, $f20
221	mtc1	t1, $f21
222	mtc1	t1, $f22
223	mtc1	t1, $f23
224	mtc1	t1, $f24
225	mtc1	t1, $f25
226	mtc1	t1, $f26
227	mtc1	t1, $f27
228	mtc1	t1, $f28
229	mtc1	t1, $f29
230	mtc1	t1, $f30
231	mtc1	t1, $f31
232#else
233	.set	mips3
234	dmtc1	t1, $f0
235	dmtc1	t1, $f2
236	dmtc1	t1, $f4
237	dmtc1	t1, $f6
238	dmtc1	t1, $f8
239	dmtc1	t1, $f10
240	dmtc1	t1, $f12
241	dmtc1	t1, $f14
242	dmtc1	t1, $f16
243	dmtc1	t1, $f18
244	dmtc1	t1, $f20
245	dmtc1	t1, $f22
246	dmtc1	t1, $f24
247	dmtc1	t1, $f26
248	dmtc1	t1, $f28
249	dmtc1	t1, $f30
250#endif
251	jr	ra
252	END(_init_fpu)