Linux Audio

Check our new training course

Loading...
v4.6
  1/*
  2 * This file is subject to the terms and conditions of the GNU General Public
  3 * License.  See the file "COPYING" in the main directory of this archive
  4 * for more details.
  5 *
  6 * Copyright (C) 1996, 98, 99, 2000, 01 Ralf Baechle
  7 *
  8 * Multi-arch abstraction and asm macros for easier reading:
  9 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
 10 *
 11 * Carsten Langgaard, carstenl@mips.com
 12 * Copyright (C) 2000 MIPS Technologies, Inc.
 13 * Copyright (C) 1999, 2001 Silicon Graphics, Inc.
 14 */
 15#include <asm/asm.h>
 16#include <asm/asmmacro.h>
 17#include <asm/errno.h>
 18#include <asm/fpregdef.h>
 19#include <asm/mipsregs.h>
 20#include <asm/asm-offsets.h>
 21#include <asm/regdef.h>
 22
 23/* preprocessor replaces the fp in ".set fp=64" with $30 otherwise */
 24#undef fp
 25
 26	.macro	EX insn, reg, src
 27	.set	push
 28	SET_HARDFLOAT
 29	.set	nomacro
 30.ex\@:	\insn	\reg, \src
 31	.set	pop
 32	.section __ex_table,"a"
 33	PTR	.ex\@, fault
 34	.previous
 35	.endm
 36
 37	.set	noreorder
 
 38
 39/**
 40 * _save_fp_context() - save FP context from the FPU
 41 * @a0 - pointer to fpregs field of sigcontext
 42 * @a1 - pointer to fpc_csr field of sigcontext
 43 *
 44 * Save FP context, including the 32 FP data registers and the FP
 45 * control & status register, from the FPU to signal context.
 46 */
 47LEAF(_save_fp_context)
 48	.set	push
 49	SET_HARDFLOAT
 50	cfc1	t1, fcr31
 51	.set	pop
 52
 53#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) || \
 54		defined(CONFIG_CPU_MIPS32_R6)
 55	.set	push
 56	SET_HARDFLOAT
 57#ifdef CONFIG_CPU_MIPS32_R2
 58	.set	mips32r2
 59	.set	fp=64
 60	mfc0	t0, CP0_STATUS
 61	sll	t0, t0, 5
 62	bgez	t0, 1f			# skip storing odd if FR=0
 63	 nop
 64#endif
 65	/* Store the 16 odd double precision registers */
 66	EX	sdc1 $f1, 8(a0)
 67	EX	sdc1 $f3, 24(a0)
 68	EX	sdc1 $f5, 40(a0)
 69	EX	sdc1 $f7, 56(a0)
 70	EX	sdc1 $f9, 72(a0)
 71	EX	sdc1 $f11, 88(a0)
 72	EX	sdc1 $f13, 104(a0)
 73	EX	sdc1 $f15, 120(a0)
 74	EX	sdc1 $f17, 136(a0)
 75	EX	sdc1 $f19, 152(a0)
 76	EX	sdc1 $f21, 168(a0)
 77	EX	sdc1 $f23, 184(a0)
 78	EX	sdc1 $f25, 200(a0)
 79	EX	sdc1 $f27, 216(a0)
 80	EX	sdc1 $f29, 232(a0)
 81	EX	sdc1 $f31, 248(a0)
 821:	.set	pop
 83#endif
 84
 85	.set push
 86	SET_HARDFLOAT
 87	/* Store the 16 even double precision registers */
 88	EX	sdc1 $f0, 0(a0)
 89	EX	sdc1 $f2, 16(a0)
 90	EX	sdc1 $f4, 32(a0)
 91	EX	sdc1 $f6, 48(a0)
 92	EX	sdc1 $f8, 64(a0)
 93	EX	sdc1 $f10, 80(a0)
 94	EX	sdc1 $f12, 96(a0)
 95	EX	sdc1 $f14, 112(a0)
 96	EX	sdc1 $f16, 128(a0)
 97	EX	sdc1 $f18, 144(a0)
 98	EX	sdc1 $f20, 160(a0)
 99	EX	sdc1 $f22, 176(a0)
100	EX	sdc1 $f24, 192(a0)
101	EX	sdc1 $f26, 208(a0)
102	EX	sdc1 $f28, 224(a0)
103	EX	sdc1 $f30, 240(a0)
104	EX	sw t1, 0(a1)
105	jr	ra
106	 li	v0, 0					# success
107	.set pop
108	END(_save_fp_context)
109
110/**
111 * _restore_fp_context() - restore FP context to the FPU
112 * @a0 - pointer to fpregs field of sigcontext
113 * @a1 - pointer to fpc_csr field of sigcontext
114 *
115 * Restore FP context, including the 32 FP data registers and the FP
116 * control & status register, from signal context to the FPU.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
117 */
118LEAF(_restore_fp_context)
119	EX	lw t1, 0(a1)
120
121#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2)  || \
122		defined(CONFIG_CPU_MIPS32_R6)
123	.set	push
124	SET_HARDFLOAT
125#ifdef CONFIG_CPU_MIPS32_R2
126	.set	mips32r2
127	.set	fp=64
128	mfc0	t0, CP0_STATUS
129	sll	t0, t0, 5
130	bgez	t0, 1f			# skip loading odd if FR=0
131	 nop
132#endif
133	EX	ldc1 $f1, 8(a0)
134	EX	ldc1 $f3, 24(a0)
135	EX	ldc1 $f5, 40(a0)
136	EX	ldc1 $f7, 56(a0)
137	EX	ldc1 $f9, 72(a0)
138	EX	ldc1 $f11, 88(a0)
139	EX	ldc1 $f13, 104(a0)
140	EX	ldc1 $f15, 120(a0)
141	EX	ldc1 $f17, 136(a0)
142	EX	ldc1 $f19, 152(a0)
143	EX	ldc1 $f21, 168(a0)
144	EX	ldc1 $f23, 184(a0)
145	EX	ldc1 $f25, 200(a0)
146	EX	ldc1 $f27, 216(a0)
147	EX	ldc1 $f29, 232(a0)
148	EX	ldc1 $f31, 248(a0)
1491:	.set pop
150#endif
151	.set push
152	SET_HARDFLOAT
153	EX	ldc1 $f0, 0(a0)
154	EX	ldc1 $f2, 16(a0)
155	EX	ldc1 $f4, 32(a0)
156	EX	ldc1 $f6, 48(a0)
157	EX	ldc1 $f8, 64(a0)
158	EX	ldc1 $f10, 80(a0)
159	EX	ldc1 $f12, 96(a0)
160	EX	ldc1 $f14, 112(a0)
161	EX	ldc1 $f16, 128(a0)
162	EX	ldc1 $f18, 144(a0)
163	EX	ldc1 $f20, 160(a0)
164	EX	ldc1 $f22, 176(a0)
165	EX	ldc1 $f24, 192(a0)
166	EX	ldc1 $f26, 208(a0)
167	EX	ldc1 $f28, 224(a0)
168	EX	ldc1 $f30, 240(a0)
169	ctc1	t1, fcr31
170	.set pop
171	jr	ra
172	 li	v0, 0					# success
173	END(_restore_fp_context)
174
175#ifdef CONFIG_CPU_HAS_MSA
 
 
 
176
177	.macro	op_one_wr	op, idx, base
178	.align	4
179\idx:	\op	\idx, 0, \base
180	jr	ra
181	 nop
182	.endm
183
184	.macro	op_msa_wr	name, op
185LEAF(\name)
186	.set		push
187	.set		noreorder
188	sll		t0, a0, 4
189	PTR_LA		t1, 0f
190	PTR_ADDU	t0, t0, t1
191	jr		t0
192	  nop
193	op_one_wr	\op, 0, a1
194	op_one_wr	\op, 1, a1
195	op_one_wr	\op, 2, a1
196	op_one_wr	\op, 3, a1
197	op_one_wr	\op, 4, a1
198	op_one_wr	\op, 5, a1
199	op_one_wr	\op, 6, a1
200	op_one_wr	\op, 7, a1
201	op_one_wr	\op, 8, a1
202	op_one_wr	\op, 9, a1
203	op_one_wr	\op, 10, a1
204	op_one_wr	\op, 11, a1
205	op_one_wr	\op, 12, a1
206	op_one_wr	\op, 13, a1
207	op_one_wr	\op, 14, a1
208	op_one_wr	\op, 15, a1
209	op_one_wr	\op, 16, a1
210	op_one_wr	\op, 17, a1
211	op_one_wr	\op, 18, a1
212	op_one_wr	\op, 19, a1
213	op_one_wr	\op, 20, a1
214	op_one_wr	\op, 21, a1
215	op_one_wr	\op, 22, a1
216	op_one_wr	\op, 23, a1
217	op_one_wr	\op, 24, a1
218	op_one_wr	\op, 25, a1
219	op_one_wr	\op, 26, a1
220	op_one_wr	\op, 27, a1
221	op_one_wr	\op, 28, a1
222	op_one_wr	\op, 29, a1
223	op_one_wr	\op, 30, a1
224	op_one_wr	\op, 31, a1
225	.set		pop
226	END(\name)
227	.endm
228
229	op_msa_wr	read_msa_wr_b, st_b
230	op_msa_wr	read_msa_wr_h, st_h
231	op_msa_wr	read_msa_wr_w, st_w
232	op_msa_wr	read_msa_wr_d, st_d
233
234	op_msa_wr	write_msa_wr_b, ld_b
235	op_msa_wr	write_msa_wr_h, ld_h
236	op_msa_wr	write_msa_wr_w, ld_w
237	op_msa_wr	write_msa_wr_d, ld_d
238
239#endif /* CONFIG_CPU_HAS_MSA */
240
241#ifdef CONFIG_CPU_HAS_MSA
242
243	.macro	save_msa_upper	wr, off, base
244	.set	push
245	.set	noat
246#ifdef CONFIG_64BIT
247	copy_u_d \wr, 1
248	EX sd	$1, \off(\base)
249#elif defined(CONFIG_CPU_LITTLE_ENDIAN)
250	copy_u_w \wr, 2
251	EX sw	$1, \off(\base)
252	copy_u_w \wr, 3
253	EX sw	$1, (\off+4)(\base)
254#else /* CONFIG_CPU_BIG_ENDIAN */
255	copy_u_w \wr, 2
256	EX sw	$1, (\off+4)(\base)
257	copy_u_w \wr, 3
258	EX sw	$1, \off(\base)
259#endif
260	.set	pop
261	.endm
262
263LEAF(_save_msa_all_upper)
264	save_msa_upper	0, 0x00, a0
265	save_msa_upper	1, 0x08, a0
266	save_msa_upper	2, 0x10, a0
267	save_msa_upper	3, 0x18, a0
268	save_msa_upper	4, 0x20, a0
269	save_msa_upper	5, 0x28, a0
270	save_msa_upper	6, 0x30, a0
271	save_msa_upper	7, 0x38, a0
272	save_msa_upper	8, 0x40, a0
273	save_msa_upper	9, 0x48, a0
274	save_msa_upper	10, 0x50, a0
275	save_msa_upper	11, 0x58, a0
276	save_msa_upper	12, 0x60, a0
277	save_msa_upper	13, 0x68, a0
278	save_msa_upper	14, 0x70, a0
279	save_msa_upper	15, 0x78, a0
280	save_msa_upper	16, 0x80, a0
281	save_msa_upper	17, 0x88, a0
282	save_msa_upper	18, 0x90, a0
283	save_msa_upper	19, 0x98, a0
284	save_msa_upper	20, 0xa0, a0
285	save_msa_upper	21, 0xa8, a0
286	save_msa_upper	22, 0xb0, a0
287	save_msa_upper	23, 0xb8, a0
288	save_msa_upper	24, 0xc0, a0
289	save_msa_upper	25, 0xc8, a0
290	save_msa_upper	26, 0xd0, a0
291	save_msa_upper	27, 0xd8, a0
292	save_msa_upper	28, 0xe0, a0
293	save_msa_upper	29, 0xe8, a0
294	save_msa_upper	30, 0xf0, a0
295	save_msa_upper	31, 0xf8, a0
 
 
 
 
 
 
296	jr	ra
297	 li	v0, 0
298	END(_save_msa_all_upper)
299
300	.macro	restore_msa_upper	wr, off, base
301	.set	push
302	.set	noat
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
303#ifdef CONFIG_64BIT
304	EX ld	$1, \off(\base)
305	insert_d \wr, 1
306#elif defined(CONFIG_CPU_LITTLE_ENDIAN)
307	EX lw	$1, \off(\base)
308	insert_w \wr, 2
309	EX lw	$1, (\off+4)(\base)
310	insert_w \wr, 3
311#else /* CONFIG_CPU_BIG_ENDIAN */
312	EX lw	$1, (\off+4)(\base)
313	insert_w \wr, 2
314	EX lw	$1, \off(\base)
315	insert_w \wr, 3
316#endif
317	.set	pop
318	.endm
319
320LEAF(_restore_msa_all_upper)
321	restore_msa_upper	0, 0x00, a0
322	restore_msa_upper	1, 0x08, a0
323	restore_msa_upper	2, 0x10, a0
324	restore_msa_upper	3, 0x18, a0
325	restore_msa_upper	4, 0x20, a0
326	restore_msa_upper	5, 0x28, a0
327	restore_msa_upper	6, 0x30, a0
328	restore_msa_upper	7, 0x38, a0
329	restore_msa_upper	8, 0x40, a0
330	restore_msa_upper	9, 0x48, a0
331	restore_msa_upper	10, 0x50, a0
332	restore_msa_upper	11, 0x58, a0
333	restore_msa_upper	12, 0x60, a0
334	restore_msa_upper	13, 0x68, a0
335	restore_msa_upper	14, 0x70, a0
336	restore_msa_upper	15, 0x78, a0
337	restore_msa_upper	16, 0x80, a0
338	restore_msa_upper	17, 0x88, a0
339	restore_msa_upper	18, 0x90, a0
340	restore_msa_upper	19, 0x98, a0
341	restore_msa_upper	20, 0xa0, a0
342	restore_msa_upper	21, 0xa8, a0
343	restore_msa_upper	22, 0xb0, a0
344	restore_msa_upper	23, 0xb8, a0
345	restore_msa_upper	24, 0xc0, a0
346	restore_msa_upper	25, 0xc8, a0
347	restore_msa_upper	26, 0xd0, a0
348	restore_msa_upper	27, 0xd8, a0
349	restore_msa_upper	28, 0xe0, a0
350	restore_msa_upper	29, 0xe8, a0
351	restore_msa_upper	30, 0xf0, a0
352	restore_msa_upper	31, 0xf8, a0
 
 
 
353	jr	ra
354	 li	v0, 0
355	END(_restore_msa_all_upper)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
356
357#endif /* CONFIG_CPU_HAS_MSA */
358
359	.set	reorder
360
361	.type	fault, @function
362	.ent	fault
363fault:	li	v0, -EFAULT				# failure
364	jr	ra
365	.end	fault
v3.15
  1/*
  2 * This file is subject to the terms and conditions of the GNU General Public
  3 * License.  See the file "COPYING" in the main directory of this archive
  4 * for more details.
  5 *
  6 * Copyright (C) 1996, 98, 99, 2000, 01 Ralf Baechle
  7 *
  8 * Multi-arch abstraction and asm macros for easier reading:
  9 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
 10 *
 11 * Carsten Langgaard, carstenl@mips.com
 12 * Copyright (C) 2000 MIPS Technologies, Inc.
 13 * Copyright (C) 1999, 2001 Silicon Graphics, Inc.
 14 */
 15#include <asm/asm.h>
 16#include <asm/asmmacro.h>
 17#include <asm/errno.h>
 18#include <asm/fpregdef.h>
 19#include <asm/mipsregs.h>
 20#include <asm/asm-offsets.h>
 21#include <asm/regdef.h>
 22
 
 
 
 23	.macro	EX insn, reg, src
 24	.set	push
 
 25	.set	nomacro
 26.ex\@:	\insn	\reg, \src
 27	.set	pop
 28	.section __ex_table,"a"
 29	PTR	.ex\@, fault
 30	.previous
 31	.endm
 32
 33	.set	noreorder
 34	.set	arch=r4000
 35
 
 
 
 
 
 
 
 
 36LEAF(_save_fp_context)
 
 
 37	cfc1	t1, fcr31
 
 38
 39#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2)
 
 40	.set	push
 
 41#ifdef CONFIG_CPU_MIPS32_R2
 42	.set	mips64r2
 
 43	mfc0	t0, CP0_STATUS
 44	sll	t0, t0, 5
 45	bgez	t0, 1f			# skip storing odd if FR=0
 46	 nop
 47#endif
 48	/* Store the 16 odd double precision registers */
 49	EX	sdc1 $f1, SC_FPREGS+8(a0)
 50	EX	sdc1 $f3, SC_FPREGS+24(a0)
 51	EX	sdc1 $f5, SC_FPREGS+40(a0)
 52	EX	sdc1 $f7, SC_FPREGS+56(a0)
 53	EX	sdc1 $f9, SC_FPREGS+72(a0)
 54	EX	sdc1 $f11, SC_FPREGS+88(a0)
 55	EX	sdc1 $f13, SC_FPREGS+104(a0)
 56	EX	sdc1 $f15, SC_FPREGS+120(a0)
 57	EX	sdc1 $f17, SC_FPREGS+136(a0)
 58	EX	sdc1 $f19, SC_FPREGS+152(a0)
 59	EX	sdc1 $f21, SC_FPREGS+168(a0)
 60	EX	sdc1 $f23, SC_FPREGS+184(a0)
 61	EX	sdc1 $f25, SC_FPREGS+200(a0)
 62	EX	sdc1 $f27, SC_FPREGS+216(a0)
 63	EX	sdc1 $f29, SC_FPREGS+232(a0)
 64	EX	sdc1 $f31, SC_FPREGS+248(a0)
 651:	.set	pop
 66#endif
 67
 
 
 68	/* Store the 16 even double precision registers */
 69	EX	sdc1 $f0, SC_FPREGS+0(a0)
 70	EX	sdc1 $f2, SC_FPREGS+16(a0)
 71	EX	sdc1 $f4, SC_FPREGS+32(a0)
 72	EX	sdc1 $f6, SC_FPREGS+48(a0)
 73	EX	sdc1 $f8, SC_FPREGS+64(a0)
 74	EX	sdc1 $f10, SC_FPREGS+80(a0)
 75	EX	sdc1 $f12, SC_FPREGS+96(a0)
 76	EX	sdc1 $f14, SC_FPREGS+112(a0)
 77	EX	sdc1 $f16, SC_FPREGS+128(a0)
 78	EX	sdc1 $f18, SC_FPREGS+144(a0)
 79	EX	sdc1 $f20, SC_FPREGS+160(a0)
 80	EX	sdc1 $f22, SC_FPREGS+176(a0)
 81	EX	sdc1 $f24, SC_FPREGS+192(a0)
 82	EX	sdc1 $f26, SC_FPREGS+208(a0)
 83	EX	sdc1 $f28, SC_FPREGS+224(a0)
 84	EX	sdc1 $f30, SC_FPREGS+240(a0)
 85	EX	sw t1, SC_FPC_CSR(a0)
 86	jr	ra
 87	 li	v0, 0					# success
 
 88	END(_save_fp_context)
 89
 90#ifdef CONFIG_MIPS32_COMPAT
 91	/* Save 32-bit process floating point context */
 92LEAF(_save_fp_context32)
 93	cfc1	t1, fcr31
 94
 95	mfc0	t0, CP0_STATUS
 96	sll	t0, t0, 5
 97	bgez	t0, 1f			# skip storing odd if FR=0
 98	 nop
 99
100	/* Store the 16 odd double precision registers */
101	EX      sdc1 $f1, SC32_FPREGS+8(a0)
102	EX      sdc1 $f3, SC32_FPREGS+24(a0)
103	EX      sdc1 $f5, SC32_FPREGS+40(a0)
104	EX      sdc1 $f7, SC32_FPREGS+56(a0)
105	EX      sdc1 $f9, SC32_FPREGS+72(a0)
106	EX      sdc1 $f11, SC32_FPREGS+88(a0)
107	EX      sdc1 $f13, SC32_FPREGS+104(a0)
108	EX      sdc1 $f15, SC32_FPREGS+120(a0)
109	EX      sdc1 $f17, SC32_FPREGS+136(a0)
110	EX      sdc1 $f19, SC32_FPREGS+152(a0)
111	EX      sdc1 $f21, SC32_FPREGS+168(a0)
112	EX      sdc1 $f23, SC32_FPREGS+184(a0)
113	EX      sdc1 $f25, SC32_FPREGS+200(a0)
114	EX      sdc1 $f27, SC32_FPREGS+216(a0)
115	EX      sdc1 $f29, SC32_FPREGS+232(a0)
116	EX      sdc1 $f31, SC32_FPREGS+248(a0)
117
118	/* Store the 16 even double precision registers */
1191:	EX	sdc1 $f0, SC32_FPREGS+0(a0)
120	EX	sdc1 $f2, SC32_FPREGS+16(a0)
121	EX	sdc1 $f4, SC32_FPREGS+32(a0)
122	EX	sdc1 $f6, SC32_FPREGS+48(a0)
123	EX	sdc1 $f8, SC32_FPREGS+64(a0)
124	EX	sdc1 $f10, SC32_FPREGS+80(a0)
125	EX	sdc1 $f12, SC32_FPREGS+96(a0)
126	EX	sdc1 $f14, SC32_FPREGS+112(a0)
127	EX	sdc1 $f16, SC32_FPREGS+128(a0)
128	EX	sdc1 $f18, SC32_FPREGS+144(a0)
129	EX	sdc1 $f20, SC32_FPREGS+160(a0)
130	EX	sdc1 $f22, SC32_FPREGS+176(a0)
131	EX	sdc1 $f24, SC32_FPREGS+192(a0)
132	EX	sdc1 $f26, SC32_FPREGS+208(a0)
133	EX	sdc1 $f28, SC32_FPREGS+224(a0)
134	EX	sdc1 $f30, SC32_FPREGS+240(a0)
135	EX	sw t1, SC32_FPC_CSR(a0)
136	cfc1	t0, $0				# implementation/version
137	EX	sw t0, SC32_FPC_EIR(a0)
138
139	jr	ra
140	 li	v0, 0					# success
141	END(_save_fp_context32)
142#endif
143
144/*
145 * Restore FPU state:
146 *  - fp gp registers
147 *  - cp1 status/control register
148 */
149LEAF(_restore_fp_context)
150	EX	lw t1, SC_FPC_CSR(a0)
151
152#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2)
 
153	.set	push
 
154#ifdef CONFIG_CPU_MIPS32_R2
155	.set	mips64r2
 
156	mfc0	t0, CP0_STATUS
157	sll	t0, t0, 5
158	bgez	t0, 1f			# skip loading odd if FR=0
159	 nop
160#endif
161	EX	ldc1 $f1, SC_FPREGS+8(a0)
162	EX	ldc1 $f3, SC_FPREGS+24(a0)
163	EX	ldc1 $f5, SC_FPREGS+40(a0)
164	EX	ldc1 $f7, SC_FPREGS+56(a0)
165	EX	ldc1 $f9, SC_FPREGS+72(a0)
166	EX	ldc1 $f11, SC_FPREGS+88(a0)
167	EX	ldc1 $f13, SC_FPREGS+104(a0)
168	EX	ldc1 $f15, SC_FPREGS+120(a0)
169	EX	ldc1 $f17, SC_FPREGS+136(a0)
170	EX	ldc1 $f19, SC_FPREGS+152(a0)
171	EX	ldc1 $f21, SC_FPREGS+168(a0)
172	EX	ldc1 $f23, SC_FPREGS+184(a0)
173	EX	ldc1 $f25, SC_FPREGS+200(a0)
174	EX	ldc1 $f27, SC_FPREGS+216(a0)
175	EX	ldc1 $f29, SC_FPREGS+232(a0)
176	EX	ldc1 $f31, SC_FPREGS+248(a0)
1771:	.set pop
178#endif
179	EX	ldc1 $f0, SC_FPREGS+0(a0)
180	EX	ldc1 $f2, SC_FPREGS+16(a0)
181	EX	ldc1 $f4, SC_FPREGS+32(a0)
182	EX	ldc1 $f6, SC_FPREGS+48(a0)
183	EX	ldc1 $f8, SC_FPREGS+64(a0)
184	EX	ldc1 $f10, SC_FPREGS+80(a0)
185	EX	ldc1 $f12, SC_FPREGS+96(a0)
186	EX	ldc1 $f14, SC_FPREGS+112(a0)
187	EX	ldc1 $f16, SC_FPREGS+128(a0)
188	EX	ldc1 $f18, SC_FPREGS+144(a0)
189	EX	ldc1 $f20, SC_FPREGS+160(a0)
190	EX	ldc1 $f22, SC_FPREGS+176(a0)
191	EX	ldc1 $f24, SC_FPREGS+192(a0)
192	EX	ldc1 $f26, SC_FPREGS+208(a0)
193	EX	ldc1 $f28, SC_FPREGS+224(a0)
194	EX	ldc1 $f30, SC_FPREGS+240(a0)
 
 
195	ctc1	t1, fcr31
 
196	jr	ra
197	 li	v0, 0					# success
198	END(_restore_fp_context)
199
200#ifdef CONFIG_MIPS32_COMPAT
201LEAF(_restore_fp_context32)
202	/* Restore an o32 sigcontext.  */
203	EX	lw t1, SC32_FPC_CSR(a0)
204
205	mfc0	t0, CP0_STATUS
206	sll	t0, t0, 5
207	bgez	t0, 1f			# skip loading odd if FR=0
 
208	 nop
 
209
210	EX      ldc1 $f1, SC32_FPREGS+8(a0)
211	EX      ldc1 $f3, SC32_FPREGS+24(a0)
212	EX      ldc1 $f5, SC32_FPREGS+40(a0)
213	EX      ldc1 $f7, SC32_FPREGS+56(a0)
214	EX      ldc1 $f9, SC32_FPREGS+72(a0)
215	EX      ldc1 $f11, SC32_FPREGS+88(a0)
216	EX      ldc1 $f13, SC32_FPREGS+104(a0)
217	EX      ldc1 $f15, SC32_FPREGS+120(a0)
218	EX      ldc1 $f17, SC32_FPREGS+136(a0)
219	EX      ldc1 $f19, SC32_FPREGS+152(a0)
220	EX      ldc1 $f21, SC32_FPREGS+168(a0)
221	EX      ldc1 $f23, SC32_FPREGS+184(a0)
222	EX      ldc1 $f25, SC32_FPREGS+200(a0)
223	EX      ldc1 $f27, SC32_FPREGS+216(a0)
224	EX      ldc1 $f29, SC32_FPREGS+232(a0)
225	EX      ldc1 $f31, SC32_FPREGS+248(a0)
226
2271:	EX	ldc1 $f0, SC32_FPREGS+0(a0)
228	EX	ldc1 $f2, SC32_FPREGS+16(a0)
229	EX	ldc1 $f4, SC32_FPREGS+32(a0)
230	EX	ldc1 $f6, SC32_FPREGS+48(a0)
231	EX	ldc1 $f8, SC32_FPREGS+64(a0)
232	EX	ldc1 $f10, SC32_FPREGS+80(a0)
233	EX	ldc1 $f12, SC32_FPREGS+96(a0)
234	EX	ldc1 $f14, SC32_FPREGS+112(a0)
235	EX	ldc1 $f16, SC32_FPREGS+128(a0)
236	EX	ldc1 $f18, SC32_FPREGS+144(a0)
237	EX	ldc1 $f20, SC32_FPREGS+160(a0)
238	EX	ldc1 $f22, SC32_FPREGS+176(a0)
239	EX	ldc1 $f24, SC32_FPREGS+192(a0)
240	EX	ldc1 $f26, SC32_FPREGS+208(a0)
241	EX	ldc1 $f28, SC32_FPREGS+224(a0)
242	EX	ldc1 $f30, SC32_FPREGS+240(a0)
243	ctc1	t1, fcr31
244	jr	ra
245	 li	v0, 0					# success
246	END(_restore_fp_context32)
247#endif
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
248
249#ifdef CONFIG_CPU_HAS_MSA
250
251	.macro	save_sc_msareg	wr, off, sc, tmp
 
 
252#ifdef CONFIG_64BIT
253	copy_u_d \tmp, \wr, 1
254	EX sd	\tmp, (\off+(\wr*8))(\sc)
255#elif defined(CONFIG_CPU_LITTLE_ENDIAN)
256	copy_u_w \tmp, \wr, 2
257	EX sw	\tmp, (\off+(\wr*8)+0)(\sc)
258	copy_u_w \tmp, \wr, 3
259	EX sw	\tmp, (\off+(\wr*8)+4)(\sc)
260#else /* CONFIG_CPU_BIG_ENDIAN */
261	copy_u_w \tmp, \wr, 2
262	EX sw	\tmp, (\off+(\wr*8)+4)(\sc)
263	copy_u_w \tmp, \wr, 3
264	EX sw	\tmp, (\off+(\wr*8)+0)(\sc)
265#endif
 
266	.endm
267
268/*
269 * int _save_msa_context(struct sigcontext *sc)
270 *
271 * Save the upper 64 bits of each vector register along with the MSA_CSR
272 * register into sc. Returns zero on success, else non-zero.
273 */
274LEAF(_save_msa_context)
275	save_sc_msareg	0, SC_MSAREGS, a0, t0
276	save_sc_msareg	1, SC_MSAREGS, a0, t0
277	save_sc_msareg	2, SC_MSAREGS, a0, t0
278	save_sc_msareg	3, SC_MSAREGS, a0, t0
279	save_sc_msareg	4, SC_MSAREGS, a0, t0
280	save_sc_msareg	5, SC_MSAREGS, a0, t0
281	save_sc_msareg	6, SC_MSAREGS, a0, t0
282	save_sc_msareg	7, SC_MSAREGS, a0, t0
283	save_sc_msareg	8, SC_MSAREGS, a0, t0
284	save_sc_msareg	9, SC_MSAREGS, a0, t0
285	save_sc_msareg	10, SC_MSAREGS, a0, t0
286	save_sc_msareg	11, SC_MSAREGS, a0, t0
287	save_sc_msareg	12, SC_MSAREGS, a0, t0
288	save_sc_msareg	13, SC_MSAREGS, a0, t0
289	save_sc_msareg	14, SC_MSAREGS, a0, t0
290	save_sc_msareg	15, SC_MSAREGS, a0, t0
291	save_sc_msareg	16, SC_MSAREGS, a0, t0
292	save_sc_msareg	17, SC_MSAREGS, a0, t0
293	save_sc_msareg	18, SC_MSAREGS, a0, t0
294	save_sc_msareg	19, SC_MSAREGS, a0, t0
295	save_sc_msareg	20, SC_MSAREGS, a0, t0
296	save_sc_msareg	21, SC_MSAREGS, a0, t0
297	save_sc_msareg	22, SC_MSAREGS, a0, t0
298	save_sc_msareg	23, SC_MSAREGS, a0, t0
299	save_sc_msareg	24, SC_MSAREGS, a0, t0
300	save_sc_msareg	25, SC_MSAREGS, a0, t0
301	save_sc_msareg	26, SC_MSAREGS, a0, t0
302	save_sc_msareg	27, SC_MSAREGS, a0, t0
303	save_sc_msareg	28, SC_MSAREGS, a0, t0
304	save_sc_msareg	29, SC_MSAREGS, a0, t0
305	save_sc_msareg	30, SC_MSAREGS, a0, t0
306	save_sc_msareg	31, SC_MSAREGS, a0, t0
307	jr	ra
308	 li	v0, 0
309	END(_save_msa_context)
310
311#ifdef CONFIG_MIPS32_COMPAT
312
313/*
314 * int _save_msa_context32(struct sigcontext32 *sc)
315 *
316 * Save the upper 64 bits of each vector register along with the MSA_CSR
317 * register into sc. Returns zero on success, else non-zero.
318 */
319LEAF(_save_msa_context32)
320	save_sc_msareg	0, SC32_MSAREGS, a0, t0
321	save_sc_msareg	1, SC32_MSAREGS, a0, t0
322	save_sc_msareg	2, SC32_MSAREGS, a0, t0
323	save_sc_msareg	3, SC32_MSAREGS, a0, t0
324	save_sc_msareg	4, SC32_MSAREGS, a0, t0
325	save_sc_msareg	5, SC32_MSAREGS, a0, t0
326	save_sc_msareg	6, SC32_MSAREGS, a0, t0
327	save_sc_msareg	7, SC32_MSAREGS, a0, t0
328	save_sc_msareg	8, SC32_MSAREGS, a0, t0
329	save_sc_msareg	9, SC32_MSAREGS, a0, t0
330	save_sc_msareg	10, SC32_MSAREGS, a0, t0
331	save_sc_msareg	11, SC32_MSAREGS, a0, t0
332	save_sc_msareg	12, SC32_MSAREGS, a0, t0
333	save_sc_msareg	13, SC32_MSAREGS, a0, t0
334	save_sc_msareg	14, SC32_MSAREGS, a0, t0
335	save_sc_msareg	15, SC32_MSAREGS, a0, t0
336	save_sc_msareg	16, SC32_MSAREGS, a0, t0
337	save_sc_msareg	17, SC32_MSAREGS, a0, t0
338	save_sc_msareg	18, SC32_MSAREGS, a0, t0
339	save_sc_msareg	19, SC32_MSAREGS, a0, t0
340	save_sc_msareg	20, SC32_MSAREGS, a0, t0
341	save_sc_msareg	21, SC32_MSAREGS, a0, t0
342	save_sc_msareg	22, SC32_MSAREGS, a0, t0
343	save_sc_msareg	23, SC32_MSAREGS, a0, t0
344	save_sc_msareg	24, SC32_MSAREGS, a0, t0
345	save_sc_msareg	25, SC32_MSAREGS, a0, t0
346	save_sc_msareg	26, SC32_MSAREGS, a0, t0
347	save_sc_msareg	27, SC32_MSAREGS, a0, t0
348	save_sc_msareg	28, SC32_MSAREGS, a0, t0
349	save_sc_msareg	29, SC32_MSAREGS, a0, t0
350	save_sc_msareg	30, SC32_MSAREGS, a0, t0
351	save_sc_msareg	31, SC32_MSAREGS, a0, t0
352	jr	ra
353	 li	v0, 0
354	END(_save_msa_context32)
355
356#endif /* CONFIG_MIPS32_COMPAT */
357
358	.macro restore_sc_msareg	wr, off, sc, tmp
359#ifdef CONFIG_64BIT
360	EX ld	\tmp, (\off+(\wr*8))(\sc)
361	insert_d \wr, 1, \tmp
362#elif defined(CONFIG_CPU_LITTLE_ENDIAN)
363	EX lw	\tmp, (\off+(\wr*8)+0)(\sc)
364	insert_w \wr, 2, \tmp
365	EX lw	\tmp, (\off+(\wr*8)+4)(\sc)
366	insert_w \wr, 3, \tmp
367#else /* CONFIG_CPU_BIG_ENDIAN */
368	EX lw	\tmp, (\off+(\wr*8)+4)(\sc)
369	insert_w \wr, 2, \tmp
370	EX lw	\tmp, (\off+(\wr*8)+0)(\sc)
371	insert_w \wr, 3, \tmp
372#endif
 
373	.endm
374
375/*
376 * int _restore_msa_context(struct sigcontext *sc)
377 */
378LEAF(_restore_msa_context)
379	restore_sc_msareg	0, SC_MSAREGS, a0, t0
380	restore_sc_msareg	1, SC_MSAREGS, a0, t0
381	restore_sc_msareg	2, SC_MSAREGS, a0, t0
382	restore_sc_msareg	3, SC_MSAREGS, a0, t0
383	restore_sc_msareg	4, SC_MSAREGS, a0, t0
384	restore_sc_msareg	5, SC_MSAREGS, a0, t0
385	restore_sc_msareg	6, SC_MSAREGS, a0, t0
386	restore_sc_msareg	7, SC_MSAREGS, a0, t0
387	restore_sc_msareg	8, SC_MSAREGS, a0, t0
388	restore_sc_msareg	9, SC_MSAREGS, a0, t0
389	restore_sc_msareg	10, SC_MSAREGS, a0, t0
390	restore_sc_msareg	11, SC_MSAREGS, a0, t0
391	restore_sc_msareg	12, SC_MSAREGS, a0, t0
392	restore_sc_msareg	13, SC_MSAREGS, a0, t0
393	restore_sc_msareg	14, SC_MSAREGS, a0, t0
394	restore_sc_msareg	15, SC_MSAREGS, a0, t0
395	restore_sc_msareg	16, SC_MSAREGS, a0, t0
396	restore_sc_msareg	17, SC_MSAREGS, a0, t0
397	restore_sc_msareg	18, SC_MSAREGS, a0, t0
398	restore_sc_msareg	19, SC_MSAREGS, a0, t0
399	restore_sc_msareg	20, SC_MSAREGS, a0, t0
400	restore_sc_msareg	21, SC_MSAREGS, a0, t0
401	restore_sc_msareg	22, SC_MSAREGS, a0, t0
402	restore_sc_msareg	23, SC_MSAREGS, a0, t0
403	restore_sc_msareg	24, SC_MSAREGS, a0, t0
404	restore_sc_msareg	25, SC_MSAREGS, a0, t0
405	restore_sc_msareg	26, SC_MSAREGS, a0, t0
406	restore_sc_msareg	27, SC_MSAREGS, a0, t0
407	restore_sc_msareg	28, SC_MSAREGS, a0, t0
408	restore_sc_msareg	29, SC_MSAREGS, a0, t0
409	restore_sc_msareg	30, SC_MSAREGS, a0, t0
410	restore_sc_msareg	31, SC_MSAREGS, a0, t0
411	jr	ra
412	 li	v0, 0
413	END(_restore_msa_context)
414
415#ifdef CONFIG_MIPS32_COMPAT
416
417/*
418 * int _restore_msa_context32(struct sigcontext32 *sc)
419 */
420LEAF(_restore_msa_context32)
421	restore_sc_msareg	0, SC32_MSAREGS, a0, t0
422	restore_sc_msareg	1, SC32_MSAREGS, a0, t0
423	restore_sc_msareg	2, SC32_MSAREGS, a0, t0
424	restore_sc_msareg	3, SC32_MSAREGS, a0, t0
425	restore_sc_msareg	4, SC32_MSAREGS, a0, t0
426	restore_sc_msareg	5, SC32_MSAREGS, a0, t0
427	restore_sc_msareg	6, SC32_MSAREGS, a0, t0
428	restore_sc_msareg	7, SC32_MSAREGS, a0, t0
429	restore_sc_msareg	8, SC32_MSAREGS, a0, t0
430	restore_sc_msareg	9, SC32_MSAREGS, a0, t0
431	restore_sc_msareg	10, SC32_MSAREGS, a0, t0
432	restore_sc_msareg	11, SC32_MSAREGS, a0, t0
433	restore_sc_msareg	12, SC32_MSAREGS, a0, t0
434	restore_sc_msareg	13, SC32_MSAREGS, a0, t0
435	restore_sc_msareg	14, SC32_MSAREGS, a0, t0
436	restore_sc_msareg	15, SC32_MSAREGS, a0, t0
437	restore_sc_msareg	16, SC32_MSAREGS, a0, t0
438	restore_sc_msareg	17, SC32_MSAREGS, a0, t0
439	restore_sc_msareg	18, SC32_MSAREGS, a0, t0
440	restore_sc_msareg	19, SC32_MSAREGS, a0, t0
441	restore_sc_msareg	20, SC32_MSAREGS, a0, t0
442	restore_sc_msareg	21, SC32_MSAREGS, a0, t0
443	restore_sc_msareg	22, SC32_MSAREGS, a0, t0
444	restore_sc_msareg	23, SC32_MSAREGS, a0, t0
445	restore_sc_msareg	24, SC32_MSAREGS, a0, t0
446	restore_sc_msareg	25, SC32_MSAREGS, a0, t0
447	restore_sc_msareg	26, SC32_MSAREGS, a0, t0
448	restore_sc_msareg	27, SC32_MSAREGS, a0, t0
449	restore_sc_msareg	28, SC32_MSAREGS, a0, t0
450	restore_sc_msareg	29, SC32_MSAREGS, a0, t0
451	restore_sc_msareg	30, SC32_MSAREGS, a0, t0
452	restore_sc_msareg	31, SC32_MSAREGS, a0, t0
453	jr	ra
454	 li	v0, 0
455	END(_restore_msa_context32)
456
457#endif /* CONFIG_MIPS32_COMPAT */
458
459#endif /* CONFIG_CPU_HAS_MSA */
460
461	.set	reorder
462
463	.type	fault@function
464	.ent	fault
465fault:	li	v0, -EFAULT				# failure
466	jr	ra
467	.end	fault