Linux Audio

Check our new training course

Loading...
Note: File does not exist in v3.1.
  1/*
  2 * Copyright (C) 2013 Imagination Technologies
  3 * Author: Paul Burton <paul.burton@imgtec.com>
  4 *
  5 * This program is free software; you can redistribute it and/or modify it
  6 * under the terms of the GNU General Public License as published by the
  7 * Free Software Foundation;  either version 2 of the  License, or (at your
  8 * option) any later version.
  9 */
 10#ifndef _ASM_MSA_H
 11#define _ASM_MSA_H
 12
 13#include <asm/mipsregs.h>
 14
 15#ifndef __ASSEMBLY__
 16
 17#include <asm/inst.h>
 18
 19extern void _save_msa(struct task_struct *);
 20extern void _restore_msa(struct task_struct *);
 21extern void _init_msa_upper(void);
 22
 23extern void read_msa_wr_b(unsigned idx, union fpureg *to);
 24extern void read_msa_wr_h(unsigned idx, union fpureg *to);
 25extern void read_msa_wr_w(unsigned idx, union fpureg *to);
 26extern void read_msa_wr_d(unsigned idx, union fpureg *to);
 27
 28/**
 29 * read_msa_wr() - Read a single MSA vector register
 30 * @idx:	The index of the vector register to read
 31 * @to:		The FPU register union to store the registers value in
 32 * @fmt:	The format of the data in the vector register
 33 *
 34 * Read the value of MSA vector register idx into the FPU register
 35 * union to, using the format fmt.
 36 */
 37static inline void read_msa_wr(unsigned idx, union fpureg *to,
 38			       enum msa_2b_fmt fmt)
 39{
 40	switch (fmt) {
 41	case msa_fmt_b:
 42		read_msa_wr_b(idx, to);
 43		break;
 44
 45	case msa_fmt_h:
 46		read_msa_wr_h(idx, to);
 47		break;
 48
 49	case msa_fmt_w:
 50		read_msa_wr_w(idx, to);
 51		break;
 52
 53	case msa_fmt_d:
 54		read_msa_wr_d(idx, to);
 55		break;
 56
 57	default:
 58		BUG();
 59	}
 60}
 61
 62extern void write_msa_wr_b(unsigned idx, union fpureg *from);
 63extern void write_msa_wr_h(unsigned idx, union fpureg *from);
 64extern void write_msa_wr_w(unsigned idx, union fpureg *from);
 65extern void write_msa_wr_d(unsigned idx, union fpureg *from);
 66
 67/**
 68 * write_msa_wr() - Write a single MSA vector register
 69 * @idx:	The index of the vector register to write
 70 * @from:	The FPU register union to take the registers value from
 71 * @fmt:	The format of the data in the vector register
 72 *
 73 * Write the value from the FPU register union from into MSA vector
 74 * register idx, using the format fmt.
 75 */
 76static inline void write_msa_wr(unsigned idx, union fpureg *from,
 77				enum msa_2b_fmt fmt)
 78{
 79	switch (fmt) {
 80	case msa_fmt_b:
 81		write_msa_wr_b(idx, from);
 82		break;
 83
 84	case msa_fmt_h:
 85		write_msa_wr_h(idx, from);
 86		break;
 87
 88	case msa_fmt_w:
 89		write_msa_wr_w(idx, from);
 90		break;
 91
 92	case msa_fmt_d:
 93		write_msa_wr_d(idx, from);
 94		break;
 95
 96	default:
 97		BUG();
 98	}
 99}
100
101static inline void enable_msa(void)
102{
103	if (cpu_has_msa) {
104		set_c0_config5(MIPS_CONF5_MSAEN);
105		enable_fpu_hazard();
106	}
107}
108
109static inline void disable_msa(void)
110{
111	if (cpu_has_msa) {
112		clear_c0_config5(MIPS_CONF5_MSAEN);
113		disable_fpu_hazard();
114	}
115}
116
117static inline int is_msa_enabled(void)
118{
119	if (!cpu_has_msa)
120		return 0;
121
122	return read_c0_config5() & MIPS_CONF5_MSAEN;
123}
124
125static inline int thread_msa_context_live(void)
126{
127	/*
128	 * Check cpu_has_msa only if it's a constant. This will allow the
129	 * compiler to optimise out code for CPUs without MSA without adding
130	 * an extra redundant check for CPUs with MSA.
131	 */
132	if (__builtin_constant_p(cpu_has_msa) && !cpu_has_msa)
133		return 0;
134
135	return test_thread_flag(TIF_MSA_CTX_LIVE);
136}
137
138static inline void save_msa(struct task_struct *t)
139{
140	if (cpu_has_msa)
141		_save_msa(t);
142}
143
144static inline void restore_msa(struct task_struct *t)
145{
146	if (cpu_has_msa)
147		_restore_msa(t);
148}
149
150static inline void init_msa_upper(void)
151{
152	/*
153	 * Check cpu_has_msa only if it's a constant. This will allow the
154	 * compiler to optimise out code for CPUs without MSA without adding
155	 * an extra redundant check for CPUs with MSA.
156	 */
157	if (__builtin_constant_p(cpu_has_msa) && !cpu_has_msa)
158		return;
159
160	_init_msa_upper();
161}
162
163#ifdef TOOLCHAIN_SUPPORTS_MSA
164
165#define __BUILD_MSA_CTL_REG(name, cs)				\
166static inline unsigned int read_msa_##name(void)		\
167{								\
168	unsigned int reg;					\
169	__asm__ __volatile__(					\
170	"	.set	push\n"					\
171	"	.set	fp=64\n"				\
172	"	.set	msa\n"					\
173	"	cfcmsa	%0, $" #cs "\n"				\
174	"	.set	pop\n"					\
175	: "=r"(reg));						\
176	return reg;						\
177}								\
178								\
179static inline void write_msa_##name(unsigned int val)		\
180{								\
181	__asm__ __volatile__(					\
182	"	.set	push\n"					\
183	"	.set	fp=64\n"				\
184	"	.set	msa\n"					\
185	"	ctcmsa	$" #cs ", %0\n"				\
186	"	.set	pop\n"					\
187	: : "r"(val));						\
188}
189
190#else /* !TOOLCHAIN_SUPPORTS_MSA */
191
192/*
193 * Define functions using .word for the c[ft]cmsa instructions in order to
194 * allow compilation with toolchains that do not support MSA. Once all
195 * toolchains in use support MSA these can be removed.
196 */
197
198#define __BUILD_MSA_CTL_REG(name, cs)				\
199static inline unsigned int read_msa_##name(void)		\
200{								\
201	unsigned int reg;					\
202	__asm__ __volatile__(					\
203	"	.set	push\n"					\
204	"	.set	noat\n"					\
205	"	# cfcmsa $1, $%1\n"				\
206	_ASM_INSN_IF_MIPS(0x787e0059 | %1 << 11)		\
207	_ASM_INSN32_IF_MM(0x587e0056 | %1 << 11)		\
208	"	move	%0, $1\n"				\
209	"	.set	pop\n"					\
210	: "=r"(reg) : "i"(cs));					\
211	return reg;						\
212}								\
213								\
214static inline void write_msa_##name(unsigned int val)		\
215{								\
216	__asm__ __volatile__(					\
217	"	.set	push\n"					\
218	"	.set	noat\n"					\
219	"	move	$1, %0\n"				\
220	"	# ctcmsa $%1, $1\n"				\
221	_ASM_INSN_IF_MIPS(0x783e0819 | %1 << 6)			\
222	_ASM_INSN32_IF_MM(0x583e0816 | %1 << 6)			\
223	"	.set	pop\n"					\
224	: : "r"(val), "i"(cs));					\
225}
226
227#endif /* !TOOLCHAIN_SUPPORTS_MSA */
228
229__BUILD_MSA_CTL_REG(ir, 0)
230__BUILD_MSA_CTL_REG(csr, 1)
231__BUILD_MSA_CTL_REG(access, 2)
232__BUILD_MSA_CTL_REG(save, 3)
233__BUILD_MSA_CTL_REG(modify, 4)
234__BUILD_MSA_CTL_REG(request, 5)
235__BUILD_MSA_CTL_REG(map, 6)
236__BUILD_MSA_CTL_REG(unmap, 7)
237
238#endif /* !__ASSEMBLY__ */
239
240#define MSA_IR		0
241#define MSA_CSR		1
242#define MSA_ACCESS	2
243#define MSA_SAVE	3
244#define MSA_MODIFY	4
245#define MSA_REQUEST	5
246#define MSA_MAP		6
247#define MSA_UNMAP	7
248
249/* MSA Implementation Register (MSAIR) */
250#define MSA_IR_REVB		0
251#define MSA_IR_REVF		(_ULCAST_(0xff) << MSA_IR_REVB)
252#define MSA_IR_PROCB		8
253#define MSA_IR_PROCF		(_ULCAST_(0xff) << MSA_IR_PROCB)
254#define MSA_IR_WRPB		16
255#define MSA_IR_WRPF		(_ULCAST_(0x1) << MSA_IR_WRPB)
256
257/* MSA Control & Status Register (MSACSR) */
258#define MSA_CSR_RMB		0
259#define MSA_CSR_RMF		(_ULCAST_(0x3) << MSA_CSR_RMB)
260#define MSA_CSR_RM_NEAREST	0
261#define MSA_CSR_RM_TO_ZERO	1
262#define MSA_CSR_RM_TO_POS	2
263#define MSA_CSR_RM_TO_NEG	3
264#define MSA_CSR_FLAGSB		2
265#define MSA_CSR_FLAGSF		(_ULCAST_(0x1f) << MSA_CSR_FLAGSB)
266#define MSA_CSR_FLAGS_IB	2
267#define MSA_CSR_FLAGS_IF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_IB)
268#define MSA_CSR_FLAGS_UB	3
269#define MSA_CSR_FLAGS_UF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_UB)
270#define MSA_CSR_FLAGS_OB	4
271#define MSA_CSR_FLAGS_OF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_OB)
272#define MSA_CSR_FLAGS_ZB	5
273#define MSA_CSR_FLAGS_ZF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_ZB)
274#define MSA_CSR_FLAGS_VB	6
275#define MSA_CSR_FLAGS_VF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_VB)
276#define MSA_CSR_ENABLESB	7
277#define MSA_CSR_ENABLESF	(_ULCAST_(0x1f) << MSA_CSR_ENABLESB)
278#define MSA_CSR_ENABLES_IB	7
279#define MSA_CSR_ENABLES_IF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_IB)
280#define MSA_CSR_ENABLES_UB	8
281#define MSA_CSR_ENABLES_UF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_UB)
282#define MSA_CSR_ENABLES_OB	9
283#define MSA_CSR_ENABLES_OF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_OB)
284#define MSA_CSR_ENABLES_ZB	10
285#define MSA_CSR_ENABLES_ZF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_ZB)
286#define MSA_CSR_ENABLES_VB	11
287#define MSA_CSR_ENABLES_VF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_VB)
288#define MSA_CSR_CAUSEB		12
289#define MSA_CSR_CAUSEF		(_ULCAST_(0x3f) << MSA_CSR_CAUSEB)
290#define MSA_CSR_CAUSE_IB	12
291#define MSA_CSR_CAUSE_IF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_IB)
292#define MSA_CSR_CAUSE_UB	13
293#define MSA_CSR_CAUSE_UF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_UB)
294#define MSA_CSR_CAUSE_OB	14
295#define MSA_CSR_CAUSE_OF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_OB)
296#define MSA_CSR_CAUSE_ZB	15
297#define MSA_CSR_CAUSE_ZF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_ZB)
298#define MSA_CSR_CAUSE_VB	16
299#define MSA_CSR_CAUSE_VF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_VB)
300#define MSA_CSR_CAUSE_EB	17
301#define MSA_CSR_CAUSE_EF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_EB)
302#define MSA_CSR_NXB		18
303#define MSA_CSR_NXF		(_ULCAST_(0x1) << MSA_CSR_NXB)
304#define MSA_CSR_FSB		24
305#define MSA_CSR_FSF		(_ULCAST_(0x1) << MSA_CSR_FSB)
306
307#endif /* _ASM_MSA_H */