Linux Audio

Check our new training course

Loading...
v4.17
  1/*
  2 * This file is subject to the terms and conditions of the GNU General Public
  3 * License.  See the file "COPYING" in the main directory of this archive
  4 * for more details.
  5 *
  6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
  7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
  8 * Copyright (C) 2007 by Maciej W. Rozycki
  9 * Copyright (C) 2011, 2012 MIPS Technologies, Inc.
 10 */
 11#include <asm/asm.h>
 12#include <asm/asm-offsets.h>
 13#include <asm/export.h>
 14#include <asm/regdef.h>
 15
 16#if LONGSIZE == 4
 17#define LONG_S_L swl
 18#define LONG_S_R swr
 19#else
 20#define LONG_S_L sdl
 21#define LONG_S_R sdr
 22#endif
 23
 24#ifdef CONFIG_CPU_MICROMIPS
 25#define STORSIZE (LONGSIZE * 2)
 26#define STORMASK (STORSIZE - 1)
 27#define FILL64RG t8
 28#define FILLPTRG t7
 29#undef  LONG_S
 30#define LONG_S LONG_SP
 31#else
 32#define STORSIZE LONGSIZE
 33#define STORMASK LONGMASK
 34#define FILL64RG a1
 35#define FILLPTRG t0
 36#endif
 37
 38#define LEGACY_MODE 1
 39#define EVA_MODE    2
 40
 41/*
 42 * No need to protect it with EVA #ifdefery. The generated block of code
 43 * will never be assembled if EVA is not enabled.
 44 */
 45#define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
 46#define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
 47
 48#define EX(insn,reg,addr,handler)			\
 49	.if \mode == LEGACY_MODE;			\
 509:		insn	reg, addr;			\
 51	.else;						\
 529:		___BUILD_EVA_INSN(insn, reg, addr);	\
 53	.endif;						\
 54	.section __ex_table,"a";			\
 55	PTR	9b, handler;				\
 56	.previous
 57
 58	.macro	f_fill64 dst, offset, val, fixup, mode
 59	EX(LONG_S, \val, (\offset +  0 * STORSIZE)(\dst), \fixup)
 60	EX(LONG_S, \val, (\offset +  1 * STORSIZE)(\dst), \fixup)
 61	EX(LONG_S, \val, (\offset +  2 * STORSIZE)(\dst), \fixup)
 62	EX(LONG_S, \val, (\offset +  3 * STORSIZE)(\dst), \fixup)
 63#if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))
 64	EX(LONG_S, \val, (\offset +  4 * STORSIZE)(\dst), \fixup)
 65	EX(LONG_S, \val, (\offset +  5 * STORSIZE)(\dst), \fixup)
 66	EX(LONG_S, \val, (\offset +  6 * STORSIZE)(\dst), \fixup)
 67	EX(LONG_S, \val, (\offset +  7 * STORSIZE)(\dst), \fixup)
 68#endif
 69#if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))
 70	EX(LONG_S, \val, (\offset +  8 * STORSIZE)(\dst), \fixup)
 71	EX(LONG_S, \val, (\offset +  9 * STORSIZE)(\dst), \fixup)
 72	EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)
 73	EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)
 74	EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)
 75	EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)
 76	EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)
 77	EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)
 78#endif
 79	.endm
 80
 81	.set	noreorder
 82	.align	5
 83
 84	/*
 85	 * Macro to generate the __bzero{,_user} symbol
 86	 * Arguments:
 87	 * mode: LEGACY_MODE or EVA_MODE
 88	 */
 89	.macro __BUILD_BZERO mode
 90	/* Initialize __memset if this is the first time we call this macro */
 91	.ifnotdef __memset
 92	.set __memset, 1
 93	.hidden __memset /* Make sure it does not leak */
 94	.endif
 95
 96	sltiu		t0, a2, STORSIZE	/* very small region? */
 97	bnez		t0, .Lsmall_memset\@
 98	andi		t0, a0, STORMASK	/* aligned? */
 99
100#ifdef CONFIG_CPU_MICROMIPS
101	move		t8, a1			/* used by 'swp' instruction */
102	move		t9, a1
103#endif
104#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
105	beqz		t0, 1f
106	PTR_SUBU	t0, STORSIZE		/* alignment in bytes */
107#else
108	.set		noat
109	li		AT, STORSIZE
110	beqz		t0, 1f
111	PTR_SUBU	t0, AT			/* alignment in bytes */
112	.set		at
113#endif
114
115#ifndef CONFIG_CPU_MIPSR6
116	R10KCBARRIER(0(ra))
117#ifdef __MIPSEB__
118	EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@)	/* make word/dword aligned */
119#else
120	EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@)	/* make word/dword aligned */
121#endif
122	PTR_SUBU	a0, t0			/* long align ptr */
123	PTR_ADDU	a2, t0			/* correct size */
124
125#else /* CONFIG_CPU_MIPSR6 */
126#define STORE_BYTE(N)				\
127	EX(sb, a1, N(a0), .Lbyte_fixup\@);	\
128	beqz		t0, 0f;			\
129	PTR_ADDU	t0, 1;
130
131	PTR_ADDU	a2, t0			/* correct size */
132	PTR_ADDU	t0, 1
133	STORE_BYTE(0)
134	STORE_BYTE(1)
135#if LONGSIZE == 4
136	EX(sb, a1, 2(a0), .Lbyte_fixup\@)
137#else
138	STORE_BYTE(2)
139	STORE_BYTE(3)
140	STORE_BYTE(4)
141	STORE_BYTE(5)
142	EX(sb, a1, 6(a0), .Lbyte_fixup\@)
143#endif
1440:
145	ori		a0, STORMASK
146	xori		a0, STORMASK
147	PTR_ADDIU	a0, STORSIZE
148#endif /* CONFIG_CPU_MIPSR6 */
1491:	ori		t1, a2, 0x3f		/* # of full blocks */
150	xori		t1, 0x3f
151	beqz		t1, .Lmemset_partial\@	/* no block to fill */
152	andi		t0, a2, 0x40-STORSIZE
153
154	PTR_ADDU	t1, a0			/* end address */
155	.set		reorder
1561:	PTR_ADDIU	a0, 64
157	R10KCBARRIER(0(ra))
158	f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
159	bne		t1, a0, 1b
160	.set		noreorder
161
162.Lmemset_partial\@:
163	R10KCBARRIER(0(ra))
164	PTR_LA		t1, 2f			/* where to start */
165#ifdef CONFIG_CPU_MICROMIPS
166	LONG_SRL	t7, t0, 1
167#endif
168#if LONGSIZE == 4
169	PTR_SUBU	t1, FILLPTRG
170#else
171	.set		noat
172	LONG_SRL	AT, FILLPTRG, 1
173	PTR_SUBU	t1, AT
174	.set		at
175#endif
176	jr		t1
177	PTR_ADDU	a0, t0			/* dest ptr */
178
179	.set		push
180	.set		noreorder
181	.set		nomacro
182	/* ... but first do longs ... */
183	f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
1842:	.set		pop
185	andi		a2, STORMASK		/* At most one long to go */
186
187	beqz		a2, 1f
188#ifndef CONFIG_CPU_MIPSR6
189	PTR_ADDU	a0, a2			/* What's left */
190	R10KCBARRIER(0(ra))
191#ifdef __MIPSEB__
192	EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
193#else
194	EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
195#endif
196#else
197	PTR_SUBU	t0, $0, a2
198	PTR_ADDIU	t0, 1
199	STORE_BYTE(0)
200	STORE_BYTE(1)
201#if LONGSIZE == 4
202	EX(sb, a1, 2(a0), .Lbyte_fixup\@)
203#else
204	STORE_BYTE(2)
205	STORE_BYTE(3)
206	STORE_BYTE(4)
207	STORE_BYTE(5)
208	EX(sb, a1, 6(a0), .Lbyte_fixup\@)
209#endif
2100:
211#endif
2121:	jr		ra
213	move		a2, zero
214
215.Lsmall_memset\@:
216	beqz		a2, 2f
217	PTR_ADDU	t1, a0, a2
218
2191:	PTR_ADDIU	a0, 1			/* fill bytewise */
220	R10KCBARRIER(0(ra))
221	bne		t1, a0, 1b
222	 EX(sb, a1, -1(a0), .Lsmall_fixup\@)
223
2242:	jr		ra			/* done */
225	move		a2, zero
226	.if __memset == 1
227	END(memset)
228	.set __memset, 0
229	.hidden __memset
230	.endif
231
232#ifdef CONFIG_CPU_MIPSR6
233.Lbyte_fixup\@:
234	PTR_SUBU	a2, $0, t0
235	jr		ra
236	 PTR_ADDIU	a2, 1
237#endif /* CONFIG_CPU_MIPSR6 */
238
239.Lfirst_fixup\@:
240	jr	ra
241	nop
242
243.Lfwd_fixup\@:
244	PTR_L		t0, TI_TASK($28)
245	andi		a2, 0x3f
246	LONG_L		t0, THREAD_BUADDR(t0)
247	LONG_ADDU	a2, t1
248	jr		ra
249	LONG_SUBU	a2, t0
250
251.Lpartial_fixup\@:
252	PTR_L		t0, TI_TASK($28)
253	andi		a2, STORMASK
254	LONG_L		t0, THREAD_BUADDR(t0)
255	LONG_ADDU	a2, a0
256	jr		ra
257	LONG_SUBU	a2, t0
258
259.Llast_fixup\@:
260	jr		ra
261	 nop
262
263.Lsmall_fixup\@:
264	PTR_SUBU	a2, t1, a0
265	jr		ra
266	 PTR_ADDIU	a2, 1
267
268	.endm
269
270/*
271 * memset(void *s, int c, size_t n)
272 *
273 * a0: start of area to clear
274 * a1: char to fill with
275 * a2: size of area to clear
276 */
277
278LEAF(memset)
279EXPORT_SYMBOL(memset)
280	beqz		a1, 1f
281	move		v0, a0			/* result */
282
283	andi		a1, 0xff		/* spread fillword */
284	LONG_SLL		t1, a1, 8
285	or		a1, t1
286	LONG_SLL		t1, a1, 16
287#if LONGSIZE == 8
288	or		a1, t1
289	LONG_SLL		t1, a1, 32
290#endif
291	or		a1, t1
2921:
293#ifndef CONFIG_EVA
294FEXPORT(__bzero)
295EXPORT_SYMBOL(__bzero)
296#else
297FEXPORT(__bzero_kernel)
298EXPORT_SYMBOL(__bzero_kernel)
299#endif
300	__BUILD_BZERO LEGACY_MODE
301
302#ifdef CONFIG_EVA
303LEAF(__bzero)
304EXPORT_SYMBOL(__bzero)
305	__BUILD_BZERO EVA_MODE
306END(__bzero)
307#endif
v4.6
  1/*
  2 * This file is subject to the terms and conditions of the GNU General Public
  3 * License.  See the file "COPYING" in the main directory of this archive
  4 * for more details.
  5 *
  6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
  7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
  8 * Copyright (C) 2007 by Maciej W. Rozycki
  9 * Copyright (C) 2011, 2012 MIPS Technologies, Inc.
 10 */
 11#include <asm/asm.h>
 12#include <asm/asm-offsets.h>
 
 13#include <asm/regdef.h>
 14
 15#if LONGSIZE == 4
 16#define LONG_S_L swl
 17#define LONG_S_R swr
 18#else
 19#define LONG_S_L sdl
 20#define LONG_S_R sdr
 21#endif
 22
 23#ifdef CONFIG_CPU_MICROMIPS
 24#define STORSIZE (LONGSIZE * 2)
 25#define STORMASK (STORSIZE - 1)
 26#define FILL64RG t8
 27#define FILLPTRG t7
 28#undef  LONG_S
 29#define LONG_S LONG_SP
 30#else
 31#define STORSIZE LONGSIZE
 32#define STORMASK LONGMASK
 33#define FILL64RG a1
 34#define FILLPTRG t0
 35#endif
 36
 37#define LEGACY_MODE 1
 38#define EVA_MODE    2
 39
 40/*
 41 * No need to protect it with EVA #ifdefery. The generated block of code
 42 * will never be assembled if EVA is not enabled.
 43 */
 44#define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
 45#define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
 46
 47#define EX(insn,reg,addr,handler)			\
 48	.if \mode == LEGACY_MODE;			\
 499:		insn	reg, addr;			\
 50	.else;						\
 519:		___BUILD_EVA_INSN(insn, reg, addr);	\
 52	.endif;						\
 53	.section __ex_table,"a";			\
 54	PTR	9b, handler;				\
 55	.previous
 56
 57	.macro	f_fill64 dst, offset, val, fixup, mode
 58	EX(LONG_S, \val, (\offset +  0 * STORSIZE)(\dst), \fixup)
 59	EX(LONG_S, \val, (\offset +  1 * STORSIZE)(\dst), \fixup)
 60	EX(LONG_S, \val, (\offset +  2 * STORSIZE)(\dst), \fixup)
 61	EX(LONG_S, \val, (\offset +  3 * STORSIZE)(\dst), \fixup)
 62#if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))
 63	EX(LONG_S, \val, (\offset +  4 * STORSIZE)(\dst), \fixup)
 64	EX(LONG_S, \val, (\offset +  5 * STORSIZE)(\dst), \fixup)
 65	EX(LONG_S, \val, (\offset +  6 * STORSIZE)(\dst), \fixup)
 66	EX(LONG_S, \val, (\offset +  7 * STORSIZE)(\dst), \fixup)
 67#endif
 68#if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))
 69	EX(LONG_S, \val, (\offset +  8 * STORSIZE)(\dst), \fixup)
 70	EX(LONG_S, \val, (\offset +  9 * STORSIZE)(\dst), \fixup)
 71	EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)
 72	EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)
 73	EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)
 74	EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)
 75	EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)
 76	EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)
 77#endif
 78	.endm
 79
 80	.set	noreorder
 81	.align	5
 82
 83	/*
 84	 * Macro to generate the __bzero{,_user} symbol
 85	 * Arguments:
 86	 * mode: LEGACY_MODE or EVA_MODE
 87	 */
 88	.macro __BUILD_BZERO mode
 89	/* Initialize __memset if this is the first time we call this macro */
 90	.ifnotdef __memset
 91	.set __memset, 1
 92	.hidden __memset /* Make sure it does not leak */
 93	.endif
 94
 95	sltiu		t0, a2, STORSIZE	/* very small region? */
 96	bnez		t0, .Lsmall_memset\@
 97	andi		t0, a0, STORMASK	/* aligned? */
 98
 99#ifdef CONFIG_CPU_MICROMIPS
100	move		t8, a1			/* used by 'swp' instruction */
101	move		t9, a1
102#endif
103#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
104	beqz		t0, 1f
105	PTR_SUBU	t0, STORSIZE		/* alignment in bytes */
106#else
107	.set		noat
108	li		AT, STORSIZE
109	beqz		t0, 1f
110	PTR_SUBU	t0, AT			/* alignment in bytes */
111	.set		at
112#endif
113
114#ifndef CONFIG_CPU_MIPSR6
115	R10KCBARRIER(0(ra))
116#ifdef __MIPSEB__
117	EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@)	/* make word/dword aligned */
118#else
119	EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@)	/* make word/dword aligned */
120#endif
121	PTR_SUBU	a0, t0			/* long align ptr */
122	PTR_ADDU	a2, t0			/* correct size */
123
124#else /* CONFIG_CPU_MIPSR6 */
125#define STORE_BYTE(N)				\
126	EX(sb, a1, N(a0), .Lbyte_fixup\@);	\
127	beqz		t0, 0f;			\
128	PTR_ADDU	t0, 1;
129
130	PTR_ADDU	a2, t0			/* correct size */
131	PTR_ADDU	t0, 1
132	STORE_BYTE(0)
133	STORE_BYTE(1)
134#if LONGSIZE == 4
135	EX(sb, a1, 2(a0), .Lbyte_fixup\@)
136#else
137	STORE_BYTE(2)
138	STORE_BYTE(3)
139	STORE_BYTE(4)
140	STORE_BYTE(5)
141	EX(sb, a1, 6(a0), .Lbyte_fixup\@)
142#endif
1430:
144	ori		a0, STORMASK
145	xori		a0, STORMASK
146	PTR_ADDIU	a0, STORSIZE
147#endif /* CONFIG_CPU_MIPSR6 */
1481:	ori		t1, a2, 0x3f		/* # of full blocks */
149	xori		t1, 0x3f
150	beqz		t1, .Lmemset_partial\@	/* no block to fill */
151	andi		t0, a2, 0x40-STORSIZE
152
153	PTR_ADDU	t1, a0			/* end address */
154	.set		reorder
1551:	PTR_ADDIU	a0, 64
156	R10KCBARRIER(0(ra))
157	f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
158	bne		t1, a0, 1b
159	.set		noreorder
160
161.Lmemset_partial\@:
162	R10KCBARRIER(0(ra))
163	PTR_LA		t1, 2f			/* where to start */
164#ifdef CONFIG_CPU_MICROMIPS
165	LONG_SRL	t7, t0, 1
166#endif
167#if LONGSIZE == 4
168	PTR_SUBU	t1, FILLPTRG
169#else
170	.set		noat
171	LONG_SRL	AT, FILLPTRG, 1
172	PTR_SUBU	t1, AT
173	.set		at
174#endif
175	jr		t1
176	PTR_ADDU	a0, t0			/* dest ptr */
177
178	.set		push
179	.set		noreorder
180	.set		nomacro
181	/* ... but first do longs ... */
182	f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
1832:	.set		pop
184	andi		a2, STORMASK		/* At most one long to go */
185
186	beqz		a2, 1f
187#ifndef CONFIG_CPU_MIPSR6
188	PTR_ADDU	a0, a2			/* What's left */
189	R10KCBARRIER(0(ra))
190#ifdef __MIPSEB__
191	EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
192#else
193	EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
194#endif
195#else
196	PTR_SUBU	t0, $0, a2
197	PTR_ADDIU	t0, 1
198	STORE_BYTE(0)
199	STORE_BYTE(1)
200#if LONGSIZE == 4
201	EX(sb, a1, 2(a0), .Lbyte_fixup\@)
202#else
203	STORE_BYTE(2)
204	STORE_BYTE(3)
205	STORE_BYTE(4)
206	STORE_BYTE(5)
207	EX(sb, a1, 6(a0), .Lbyte_fixup\@)
208#endif
2090:
210#endif
2111:	jr		ra
212	move		a2, zero
213
214.Lsmall_memset\@:
215	beqz		a2, 2f
216	PTR_ADDU	t1, a0, a2
217
2181:	PTR_ADDIU	a0, 1			/* fill bytewise */
219	R10KCBARRIER(0(ra))
220	bne		t1, a0, 1b
221	sb		a1, -1(a0)
222
2232:	jr		ra			/* done */
224	move		a2, zero
225	.if __memset == 1
226	END(memset)
227	.set __memset, 0
228	.hidden __memset
229	.endif
230
 
231.Lbyte_fixup\@:
232	PTR_SUBU	a2, $0, t0
233	jr		ra
234	 PTR_ADDIU	a2, 1
 
235
236.Lfirst_fixup\@:
237	jr	ra
238	nop
239
240.Lfwd_fixup\@:
241	PTR_L		t0, TI_TASK($28)
242	andi		a2, 0x3f
243	LONG_L		t0, THREAD_BUADDR(t0)
244	LONG_ADDU	a2, t1
245	jr		ra
246	LONG_SUBU	a2, t0
247
248.Lpartial_fixup\@:
249	PTR_L		t0, TI_TASK($28)
250	andi		a2, STORMASK
251	LONG_L		t0, THREAD_BUADDR(t0)
252	LONG_ADDU	a2, t1
253	jr		ra
254	LONG_SUBU	a2, t0
255
256.Llast_fixup\@:
257	jr		ra
258	andi		v1, a2, STORMASK
 
 
 
 
 
259
260	.endm
261
262/*
263 * memset(void *s, int c, size_t n)
264 *
265 * a0: start of area to clear
266 * a1: char to fill with
267 * a2: size of area to clear
268 */
269
270LEAF(memset)
 
271	beqz		a1, 1f
272	move		v0, a0			/* result */
273
274	andi		a1, 0xff		/* spread fillword */
275	LONG_SLL		t1, a1, 8
276	or		a1, t1
277	LONG_SLL		t1, a1, 16
278#if LONGSIZE == 8
279	or		a1, t1
280	LONG_SLL		t1, a1, 32
281#endif
282	or		a1, t1
2831:
284#ifndef CONFIG_EVA
285FEXPORT(__bzero)
 
286#else
287FEXPORT(__bzero_kernel)
 
288#endif
289	__BUILD_BZERO LEGACY_MODE
290
291#ifdef CONFIG_EVA
292LEAF(__bzero)
 
293	__BUILD_BZERO EVA_MODE
294END(__bzero)
295#endif