Linux Audio

Check our new training course

Loading...
v3.5.6
  1/*
  2 * This file is subject to the terms and conditions of the GNU General Public
  3 * License.  See the file "COPYING" in the main directory of this archive
  4 * for more details.
  5 *
  6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
  7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
  8 * Copyright (C) 2007  Maciej W. Rozycki
 
  9 */
 10#include <asm/asm.h>
 11#include <asm/asm-offsets.h>
 12#include <asm/regdef.h>
 13
 14#if LONGSIZE == 4
 15#define LONG_S_L swl
 16#define LONG_S_R swr
 17#else
 18#define LONG_S_L sdl
 19#define LONG_S_R sdr
 20#endif
 21
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 22#define EX(insn,reg,addr,handler)			\
 239:	insn	reg, addr;				\
 24	.section __ex_table,"a"; 			\
 25	PTR	9b, handler; 				\
 
 
 
 
 26	.previous
 27
 28	.macro	f_fill64 dst, offset, val, fixup
 29	EX(LONG_S, \val, (\offset +  0 * LONGSIZE)(\dst), \fixup)
 30	EX(LONG_S, \val, (\offset +  1 * LONGSIZE)(\dst), \fixup)
 31	EX(LONG_S, \val, (\offset +  2 * LONGSIZE)(\dst), \fixup)
 32	EX(LONG_S, \val, (\offset +  3 * LONGSIZE)(\dst), \fixup)
 33	EX(LONG_S, \val, (\offset +  4 * LONGSIZE)(\dst), \fixup)
 34	EX(LONG_S, \val, (\offset +  5 * LONGSIZE)(\dst), \fixup)
 35	EX(LONG_S, \val, (\offset +  6 * LONGSIZE)(\dst), \fixup)
 36	EX(LONG_S, \val, (\offset +  7 * LONGSIZE)(\dst), \fixup)
 37#if LONGSIZE == 4
 38	EX(LONG_S, \val, (\offset +  8 * LONGSIZE)(\dst), \fixup)
 39	EX(LONG_S, \val, (\offset +  9 * LONGSIZE)(\dst), \fixup)
 40	EX(LONG_S, \val, (\offset + 10 * LONGSIZE)(\dst), \fixup)
 41	EX(LONG_S, \val, (\offset + 11 * LONGSIZE)(\dst), \fixup)
 42	EX(LONG_S, \val, (\offset + 12 * LONGSIZE)(\dst), \fixup)
 43	EX(LONG_S, \val, (\offset + 13 * LONGSIZE)(\dst), \fixup)
 44	EX(LONG_S, \val, (\offset + 14 * LONGSIZE)(\dst), \fixup)
 45	EX(LONG_S, \val, (\offset + 15 * LONGSIZE)(\dst), \fixup)
 
 
 46#endif
 47	.endm
 48
 49/*
 50 * memset(void *s, int c, size_t n)
 51 *
 52 * a0: start of area to clear
 53 * a1: char to fill with
 54 * a2: size of area to clear
 55 */
 56	.set	noreorder
 57	.align	5
 58LEAF(memset)
 59	beqz		a1, 1f
 60	 move		v0, a0			/* result */
 61
 62	andi		a1, 0xff		/* spread fillword */
 63	LONG_SLL		t1, a1, 8
 64	or		a1, t1
 65	LONG_SLL		t1, a1, 16
 66#if LONGSIZE == 8
 67	or		a1, t1
 68	LONG_SLL		t1, a1, 32
 
 
 
 
 
 
 
 
 
 
 
 
 69#endif
 70	or		a1, t1
 711:
 72
 73FEXPORT(__bzero)
 74	sltiu		t0, a2, LONGSIZE	/* very small region? */
 75	bnez		t0, .Lsmall_memset
 76	 andi		t0, a0, LONGMASK	/* aligned? */
 77
 78#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
 79	beqz		t0, 1f
 80	 PTR_SUBU	t0, LONGSIZE		/* alignment in bytes */
 81#else
 82	.set		noat
 83	li		AT, LONGSIZE
 84	beqz		t0, 1f
 85	 PTR_SUBU	t0, AT			/* alignment in bytes */
 86	.set		at
 87#endif
 88
 89	R10KCBARRIER(0(ra))
 90#ifdef __MIPSEB__
 91	EX(LONG_S_L, a1, (a0), .Lfirst_fixup)	/* make word/dword aligned */
 92#endif
 93#ifdef __MIPSEL__
 94	EX(LONG_S_R, a1, (a0), .Lfirst_fixup)	/* make word/dword aligned */
 95#endif
 96	PTR_SUBU	a0, t0			/* long align ptr */
 97	PTR_ADDU	a2, t0			/* correct size */
 98
 991:	ori		t1, a2, 0x3f		/* # of full blocks */
100	xori		t1, 0x3f
101	beqz		t1, .Lmemset_partial	/* no block to fill */
102	 andi		t0, a2, 0x40-LONGSIZE
103
104	PTR_ADDU	t1, a0			/* end address */
105	.set		reorder
1061:	PTR_ADDIU	a0, 64
107	R10KCBARRIER(0(ra))
108	f_fill64 a0, -64, a1, .Lfwd_fixup
109	bne		t1, a0, 1b
110	.set		noreorder
111
112.Lmemset_partial:
113	R10KCBARRIER(0(ra))
114	PTR_LA		t1, 2f			/* where to start */
 
 
 
115#if LONGSIZE == 4
116	PTR_SUBU	t1, t0
117#else
118	.set		noat
119	LONG_SRL		AT, t0, 1
120	PTR_SUBU	t1, AT
121	.set		at
122#endif
123	jr		t1
124	 PTR_ADDU	a0, t0			/* dest ptr */
125
126	.set		push
127	.set		noreorder
128	.set		nomacro
129	f_fill64 a0, -64, a1, .Lpartial_fixup	/* ... but first do longs ... */
 
1302:	.set		pop
131	andi		a2, LONGMASK		/* At most one long to go */
132
133	beqz		a2, 1f
134	 PTR_ADDU	a0, a2			/* What's left */
135	R10KCBARRIER(0(ra))
136#ifdef __MIPSEB__
137	EX(LONG_S_R, a1, -1(a0), .Llast_fixup)
138#endif
139#ifdef __MIPSEL__
140	EX(LONG_S_L, a1, -1(a0), .Llast_fixup)
141#endif
1421:	jr		ra
143	 move		a2, zero
144
145.Lsmall_memset:
146	beqz		a2, 2f
147	 PTR_ADDU	t1, a0, a2
148
1491:	PTR_ADDIU	a0, 1			/* fill bytewise */
150	R10KCBARRIER(0(ra))
151	bne		t1, a0, 1b
152	 sb		a1, -1(a0)
153
1542:	jr		ra			/* done */
155	 move		a2, zero
 
156	END(memset)
 
 
 
157
158.Lfirst_fixup:
159	jr	ra
160	 nop
161
162.Lfwd_fixup:
163	PTR_L		t0, TI_TASK($28)
164	andi		a2, 0x3f
165	LONG_L		t0, THREAD_BUADDR(t0)
166	LONG_ADDU	a2, t1
167	jr		ra
168	 LONG_SUBU	a2, t0
169
170.Lpartial_fixup:
171	PTR_L		t0, TI_TASK($28)
172	andi		a2, LONGMASK
173	LONG_L		t0, THREAD_BUADDR(t0)
174	LONG_ADDU	a2, t1
175	jr		ra
176	 LONG_SUBU	a2, t0
177
178.Llast_fixup:
179	jr		ra
180	 andi		v1, a2, LONGMASK
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
v3.15
  1/*
  2 * This file is subject to the terms and conditions of the GNU General Public
  3 * License.  See the file "COPYING" in the main directory of this archive
  4 * for more details.
  5 *
  6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
  7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
  8 * Copyright (C) 2007 by Maciej W. Rozycki
  9 * Copyright (C) 2011, 2012 MIPS Technologies, Inc.
 10 */
 11#include <asm/asm.h>
 12#include <asm/asm-offsets.h>
 13#include <asm/regdef.h>
 14
 15#if LONGSIZE == 4
 16#define LONG_S_L swl
 17#define LONG_S_R swr
 18#else
 19#define LONG_S_L sdl
 20#define LONG_S_R sdr
 21#endif
 22
 23#ifdef CONFIG_CPU_MICROMIPS
 24#define STORSIZE (LONGSIZE * 2)
 25#define STORMASK (STORSIZE - 1)
 26#define FILL64RG t8
 27#define FILLPTRG t7
 28#undef  LONG_S
 29#define LONG_S LONG_SP
 30#else
 31#define STORSIZE LONGSIZE
 32#define STORMASK LONGMASK
 33#define FILL64RG a1
 34#define FILLPTRG t0
 35#endif
 36
 37#define LEGACY_MODE 1
 38#define EVA_MODE    2
 39
 40/*
 41 * No need to protect it with EVA #ifdefery. The generated block of code
 42 * will never be assembled if EVA is not enabled.
 43 */
 44#define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
 45#define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
 46
 47#define EX(insn,reg,addr,handler)			\
 48	.if \mode == LEGACY_MODE;			\
 499:		insn	reg, addr;			\
 50	.else;						\
 519:		___BUILD_EVA_INSN(insn, reg, addr);	\
 52	.endif;						\
 53	.section __ex_table,"a";			\
 54	PTR	9b, handler;				\
 55	.previous
 56
 57	.macro	f_fill64 dst, offset, val, fixup, mode
 58	EX(LONG_S, \val, (\offset +  0 * STORSIZE)(\dst), \fixup)
 59	EX(LONG_S, \val, (\offset +  1 * STORSIZE)(\dst), \fixup)
 60	EX(LONG_S, \val, (\offset +  2 * STORSIZE)(\dst), \fixup)
 61	EX(LONG_S, \val, (\offset +  3 * STORSIZE)(\dst), \fixup)
 62#if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))
 63	EX(LONG_S, \val, (\offset +  4 * STORSIZE)(\dst), \fixup)
 64	EX(LONG_S, \val, (\offset +  5 * STORSIZE)(\dst), \fixup)
 65	EX(LONG_S, \val, (\offset +  6 * STORSIZE)(\dst), \fixup)
 66	EX(LONG_S, \val, (\offset +  7 * STORSIZE)(\dst), \fixup)
 67#endif
 68#if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))
 69	EX(LONG_S, \val, (\offset +  8 * STORSIZE)(\dst), \fixup)
 70	EX(LONG_S, \val, (\offset +  9 * STORSIZE)(\dst), \fixup)
 71	EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)
 72	EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)
 73	EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)
 74	EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)
 75	EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)
 76	EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)
 77#endif
 78	.endm
 79
 
 
 
 
 
 
 
 80	.set	noreorder
 81	.align	5
 
 
 
 82
 83	/*
 84	 * Macro to generate the __bzero{,_user} symbol
 85	 * Arguments:
 86	 * mode: LEGACY_MODE or EVA_MODE
 87	 */
 88	.macro __BUILD_BZERO mode
 89	/* Initialize __memset if this is the first time we call this macro */
 90	.ifnotdef __memset
 91	.set __memset, 1
 92	.hidden __memset /* Make sure it does not leak */
 93	.endif
 94
 95	sltiu		t0, a2, STORSIZE	/* very small region? */
 96	bnez		t0, .Lsmall_memset\@
 97	andi		t0, a0, STORMASK	/* aligned? */
 98
 99#ifdef CONFIG_CPU_MICROMIPS
100	move		t8, a1			/* used by 'swp' instruction */
101	move		t9, a1
102#endif
 
 
 
 
 
 
 
 
103#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
104	beqz		t0, 1f
105	PTR_SUBU	t0, STORSIZE		/* alignment in bytes */
106#else
107	.set		noat
108	li		AT, STORSIZE
109	beqz		t0, 1f
110	PTR_SUBU	t0, AT			/* alignment in bytes */
111	.set		at
112#endif
113
114	R10KCBARRIER(0(ra))
115#ifdef __MIPSEB__
116	EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@)	/* make word/dword aligned */
117#endif
118#ifdef __MIPSEL__
119	EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@)	/* make word/dword aligned */
120#endif
121	PTR_SUBU	a0, t0			/* long align ptr */
122	PTR_ADDU	a2, t0			/* correct size */
123
1241:	ori		t1, a2, 0x3f		/* # of full blocks */
125	xori		t1, 0x3f
126	beqz		t1, .Lmemset_partial\@	/* no block to fill */
127	andi		t0, a2, 0x40-STORSIZE
128
129	PTR_ADDU	t1, a0			/* end address */
130	.set		reorder
1311:	PTR_ADDIU	a0, 64
132	R10KCBARRIER(0(ra))
133	f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
134	bne		t1, a0, 1b
135	.set		noreorder
136
137.Lmemset_partial\@:
138	R10KCBARRIER(0(ra))
139	PTR_LA		t1, 2f			/* where to start */
140#ifdef CONFIG_CPU_MICROMIPS
141	LONG_SRL	t7, t0, 1
142#endif
143#if LONGSIZE == 4
144	PTR_SUBU	t1, FILLPTRG
145#else
146	.set		noat
147	LONG_SRL	AT, FILLPTRG, 1
148	PTR_SUBU	t1, AT
149	.set		at
150#endif
151	jr		t1
152	PTR_ADDU	a0, t0			/* dest ptr */
153
154	.set		push
155	.set		noreorder
156	.set		nomacro
157	/* ... but first do longs ... */
158	f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
1592:	.set		pop
160	andi		a2, STORMASK		/* At most one long to go */
161
162	beqz		a2, 1f
163	PTR_ADDU	a0, a2			/* What's left */
164	R10KCBARRIER(0(ra))
165#ifdef __MIPSEB__
166	EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
167#endif
168#ifdef __MIPSEL__
169	EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
170#endif
1711:	jr		ra
172	move		a2, zero
173
174.Lsmall_memset\@:
175	beqz		a2, 2f
176	PTR_ADDU	t1, a0, a2
177
1781:	PTR_ADDIU	a0, 1			/* fill bytewise */
179	R10KCBARRIER(0(ra))
180	bne		t1, a0, 1b
181	sb		a1, -1(a0)
182
1832:	jr		ra			/* done */
184	move		a2, zero
185	.if __memset == 1
186	END(memset)
187	.set __memset, 0
188	.hidden __memset
189	.endif
190
191.Lfirst_fixup\@:
192	jr	ra
193	nop
194
195.Lfwd_fixup\@:
196	PTR_L		t0, TI_TASK($28)
197	andi		a2, 0x3f
198	LONG_L		t0, THREAD_BUADDR(t0)
199	LONG_ADDU	a2, t1
200	jr		ra
201	LONG_SUBU	a2, t0
202
203.Lpartial_fixup\@:
204	PTR_L		t0, TI_TASK($28)
205	andi		a2, STORMASK
206	LONG_L		t0, THREAD_BUADDR(t0)
207	LONG_ADDU	a2, t1
208	jr		ra
209	LONG_SUBU	a2, t0
210
211.Llast_fixup\@:
212	jr		ra
213	andi		v1, a2, STORMASK
214
215	.endm
216
217/*
218 * memset(void *s, int c, size_t n)
219 *
220 * a0: start of area to clear
221 * a1: char to fill with
222 * a2: size of area to clear
223 */
224
225LEAF(memset)
226	beqz		a1, 1f
227	move		v0, a0			/* result */
228
229	andi		a1, 0xff		/* spread fillword */
230	LONG_SLL		t1, a1, 8
231	or		a1, t1
232	LONG_SLL		t1, a1, 16
233#if LONGSIZE == 8
234	or		a1, t1
235	LONG_SLL		t1, a1, 32
236#endif
237	or		a1, t1
2381:
239#ifndef CONFIG_EVA
240FEXPORT(__bzero)
241#endif
242	__BUILD_BZERO LEGACY_MODE
243
244#ifdef CONFIG_EVA
245LEAF(__bzero)
246	__BUILD_BZERO EVA_MODE
247END(__bzero)
248#endif