Linux Audio

Check our new training course

Loading...
v6.8
  1/*
  2 * This file is subject to the terms and conditions of the GNU General Public
  3 * License.  See the file "COPYING" in the main directory of this archive
  4 * for more details.
  5 *
  6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
  7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
  8 * Copyright (C) 2007 by Maciej W. Rozycki
  9 * Copyright (C) 2011, 2012 MIPS Technologies, Inc.
 10 */
 11#include <linux/export.h>
 12#include <asm/asm.h>
 13#include <asm/asm-offsets.h>
 14#include <asm/regdef.h>
 15
 16#if LONGSIZE == 4
 17#define LONG_S_L swl
 18#define LONG_S_R swr
 19#else
 20#define LONG_S_L sdl
 21#define LONG_S_R sdr
 22#endif
 23
 24#ifdef CONFIG_CPU_MICROMIPS
 25#define STORSIZE (LONGSIZE * 2)
 26#define STORMASK (STORSIZE - 1)
 27#define FILL64RG t8
 28#define FILLPTRG t7
 29#undef  LONG_S
 30#define LONG_S LONG_SP
 31#else
 32#define STORSIZE LONGSIZE
 33#define STORMASK LONGMASK
 34#define FILL64RG a1
 35#define FILLPTRG t0
 36#endif
 37
 38#define LEGACY_MODE 1
 39#define EVA_MODE    2
 40
 41/*
 42 * No need to protect it with EVA #ifdefery. The generated block of code
 43 * will never be assembled if EVA is not enabled.
 44 */
 45#define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
 46#define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
 47
 48#define EX(insn,reg,addr,handler)			\
 49	.if \mode == LEGACY_MODE;			\
 509:		insn	reg, addr;			\
 51	.else;						\
 529:		___BUILD_EVA_INSN(insn, reg, addr);	\
 53	.endif;						\
 54	.section __ex_table,"a";			\
 55	PTR_WD	9b, handler;				\
 56	.previous
 57
 58	.macro	f_fill64 dst, offset, val, fixup, mode
 59	EX(LONG_S, \val, (\offset +  0 * STORSIZE)(\dst), \fixup)
 60	EX(LONG_S, \val, (\offset +  1 * STORSIZE)(\dst), \fixup)
 61	EX(LONG_S, \val, (\offset +  2 * STORSIZE)(\dst), \fixup)
 62	EX(LONG_S, \val, (\offset +  3 * STORSIZE)(\dst), \fixup)
 63#if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))
 64	EX(LONG_S, \val, (\offset +  4 * STORSIZE)(\dst), \fixup)
 65	EX(LONG_S, \val, (\offset +  5 * STORSIZE)(\dst), \fixup)
 66	EX(LONG_S, \val, (\offset +  6 * STORSIZE)(\dst), \fixup)
 67	EX(LONG_S, \val, (\offset +  7 * STORSIZE)(\dst), \fixup)
 68#endif
 69#if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))
 70	EX(LONG_S, \val, (\offset +  8 * STORSIZE)(\dst), \fixup)
 71	EX(LONG_S, \val, (\offset +  9 * STORSIZE)(\dst), \fixup)
 72	EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)
 73	EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)
 74	EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)
 75	EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)
 76	EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)
 77	EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)
 78#endif
 79	.endm
 80
 
 81	.align	5
 82
 83	/*
 84	 * Macro to generate the __bzero{,_user} symbol
 85	 * Arguments:
 86	 * mode: LEGACY_MODE or EVA_MODE
 87	 */
 88	.macro __BUILD_BZERO mode
 89	/* Initialize __memset if this is the first time we call this macro */
 90	.ifnotdef __memset
 91	.set __memset, 1
 92	.hidden __memset /* Make sure it does not leak */
 93	.endif
 94
 95	sltiu		t0, a2, STORSIZE	/* very small region? */
 96	.set		noreorder
 97	bnez		t0, .Lsmall_memset\@
 98	 andi		t0, a0, STORMASK	/* aligned? */
 99	.set		reorder
100
101#ifdef CONFIG_CPU_MICROMIPS
102	move		t8, a1			/* used by 'swp' instruction */
103	move		t9, a1
104#endif
105	.set		noreorder
106#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
107	beqz		t0, 1f
108	 PTR_SUBU	t0, STORSIZE		/* alignment in bytes */
109#else
110	.set		noat
111	li		AT, STORSIZE
112	beqz		t0, 1f
113	 PTR_SUBU	t0, AT			/* alignment in bytes */
114	.set		at
115#endif
116	.set		reorder
117
118#ifndef CONFIG_CPU_NO_LOAD_STORE_LR
119	R10KCBARRIER(0(ra))
120#ifdef __MIPSEB__
121	EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@)	/* make word/dword aligned */
122#else
123	EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@)	/* make word/dword aligned */
124#endif
125	PTR_SUBU	a0, t0			/* long align ptr */
126	PTR_ADDU	a2, t0			/* correct size */
127
128#else /* CONFIG_CPU_NO_LOAD_STORE_LR */
129#define STORE_BYTE(N)				\
130	EX(sb, a1, N(a0), .Lbyte_fixup\@);	\
131	.set		noreorder;		\
132	beqz		t0, 0f;			\
133	 PTR_ADDU	t0, 1;			\
134	.set		reorder;
135
136	PTR_ADDU	a2, t0			/* correct size */
137	PTR_ADDU	t0, 1
138	STORE_BYTE(0)
139	STORE_BYTE(1)
140#if LONGSIZE == 4
141	EX(sb, a1, 2(a0), .Lbyte_fixup\@)
142#else
143	STORE_BYTE(2)
144	STORE_BYTE(3)
145	STORE_BYTE(4)
146	STORE_BYTE(5)
147	EX(sb, a1, 6(a0), .Lbyte_fixup\@)
148#endif
1490:
150	ori		a0, STORMASK
151	xori		a0, STORMASK
152	PTR_ADDIU	a0, STORSIZE
153#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
1541:	ori		t1, a2, 0x3f		/* # of full blocks */
155	xori		t1, 0x3f
156	andi		t0, a2, 0x40-STORSIZE
157	beqz		t1, .Lmemset_partial\@	/* no block to fill */
 
158
159	PTR_ADDU	t1, a0			/* end address */
 
1601:	PTR_ADDIU	a0, 64
161	R10KCBARRIER(0(ra))
162	f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
163	bne		t1, a0, 1b
 
164
165.Lmemset_partial\@:
166	R10KCBARRIER(0(ra))
167	PTR_LA		t1, 2f			/* where to start */
168#ifdef CONFIG_CPU_MICROMIPS
169	LONG_SRL	t7, t0, 1
170#endif
171#if LONGSIZE == 4
172	PTR_SUBU	t1, FILLPTRG
173#else
174	.set		noat
175	LONG_SRL	AT, FILLPTRG, 1
176	PTR_SUBU	t1, AT
177	.set		at
178#endif
179	PTR_ADDU	a0, t0			/* dest ptr */
180	jr		t1
 
181
 
 
 
182	/* ... but first do longs ... */
183	f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
1842:	andi		a2, STORMASK		/* At most one long to go */
 
185
186	.set		noreorder
187	beqz		a2, 1f
188#ifndef CONFIG_CPU_NO_LOAD_STORE_LR
189	 PTR_ADDU	a0, a2			/* What's left */
190	.set		reorder
191	R10KCBARRIER(0(ra))
192#ifdef __MIPSEB__
193	EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
194#else
195	EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
196#endif
197#else /* CONFIG_CPU_NO_LOAD_STORE_LR */
198	 PTR_SUBU	t0, $0, a2
199	.set		reorder
200	move		a2, zero		/* No remaining longs */
201	PTR_ADDIU	t0, 1
202	STORE_BYTE(0)
203	STORE_BYTE(1)
204#if LONGSIZE == 4
205	EX(sb, a1, 2(a0), .Lbyte_fixup\@)
206#else
207	STORE_BYTE(2)
208	STORE_BYTE(3)
209	STORE_BYTE(4)
210	STORE_BYTE(5)
211	EX(sb, a1, 6(a0), .Lbyte_fixup\@)
212#endif
2130:
214#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
2151:	move		a2, zero
216	jr		ra
217
218.Lsmall_memset\@:
219	PTR_ADDU	t1, a0, a2
220	beqz		a2, 2f
 
221
2221:	PTR_ADDIU	a0, 1			/* fill bytewise */
223	R10KCBARRIER(0(ra))
224	.set		noreorder
225	bne		t1, a0, 1b
226	 EX(sb, a1, -1(a0), .Lsmall_fixup\@)
227	.set		reorder
228
2292:	move		a2, zero
230	jr		ra			/* done */
231	.if __memset == 1
232	END(memset)
233	.set __memset, 0
234	.hidden __memset
235	.endif
236
237#ifdef CONFIG_CPU_NO_LOAD_STORE_LR
238.Lbyte_fixup\@:
239	/*
240	 * unset_bytes = (#bytes - (#unaligned bytes)) - (-#unaligned bytes remaining + 1) + 1
241	 *      a2     =             a2                -              t0                   + 1
242	 */
243	PTR_SUBU	a2, t0
244	PTR_ADDIU	a2, 1
245	jr		ra
246#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
247
248.Lfirst_fixup\@:
249	/* unset_bytes already in a2 */
250	jr	ra
 
251
252.Lfwd_fixup\@:
253	/*
254	 * unset_bytes = partial_start_addr +  #bytes   -     fault_addr
255	 *      a2     =         t1         + (a2 & 3f) - $28->task->BUADDR
256	 */
257	PTR_L		t0, TI_TASK($28)
258	andi		a2, 0x3f
259	LONG_L		t0, THREAD_BUADDR(t0)
260	LONG_ADDU	a2, t1
261	LONG_SUBU	a2, t0
262	jr		ra
 
263
264.Lpartial_fixup\@:
265	/*
266	 * unset_bytes = partial_end_addr +      #bytes     -     fault_addr
267	 *      a2     =       a0         + (a2 & STORMASK) - $28->task->BUADDR
268	 */
269	PTR_L		t0, TI_TASK($28)
270	andi		a2, STORMASK
271	LONG_L		t0, THREAD_BUADDR(t0)
272	LONG_ADDU	a2, a0
273	LONG_SUBU	a2, t0
274	jr		ra
 
275
276.Llast_fixup\@:
277	/* unset_bytes already in a2 */
278	jr		ra
279
280.Lsmall_fixup\@:
281	/*
282	 * unset_bytes = end_addr - current_addr + 1
283	 *      a2     =    t1    -      a0      + 1
284	 */
285	PTR_SUBU	a2, t1, a0
286	PTR_ADDIU	a2, 1
287	jr		ra
 
288
289	.endm
290
291/*
292 * memset(void *s, int c, size_t n)
293 *
294 * a0: start of area to clear
295 * a1: char to fill with
296 * a2: size of area to clear
297 */
298
299LEAF(memset)
300EXPORT_SYMBOL(memset)
301	move		v0, a0			/* result */
302	beqz		a1, 1f
 
303
304	andi		a1, 0xff		/* spread fillword */
305	LONG_SLL		t1, a1, 8
306	or		a1, t1
307	LONG_SLL		t1, a1, 16
308#if LONGSIZE == 8
309	or		a1, t1
310	LONG_SLL		t1, a1, 32
311#endif
312	or		a1, t1
3131:
314#ifndef CONFIG_EVA
315FEXPORT(__bzero)
316EXPORT_SYMBOL(__bzero)
 
317#endif
318	__BUILD_BZERO LEGACY_MODE
319
320#ifdef CONFIG_EVA
321LEAF(__bzero)
322EXPORT_SYMBOL(__bzero)
323	__BUILD_BZERO EVA_MODE
324END(__bzero)
325#endif
v4.6
  1/*
  2 * This file is subject to the terms and conditions of the GNU General Public
  3 * License.  See the file "COPYING" in the main directory of this archive
  4 * for more details.
  5 *
  6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
  7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
  8 * Copyright (C) 2007 by Maciej W. Rozycki
  9 * Copyright (C) 2011, 2012 MIPS Technologies, Inc.
 10 */
 
 11#include <asm/asm.h>
 12#include <asm/asm-offsets.h>
 13#include <asm/regdef.h>
 14
 15#if LONGSIZE == 4
 16#define LONG_S_L swl
 17#define LONG_S_R swr
 18#else
 19#define LONG_S_L sdl
 20#define LONG_S_R sdr
 21#endif
 22
 23#ifdef CONFIG_CPU_MICROMIPS
 24#define STORSIZE (LONGSIZE * 2)
 25#define STORMASK (STORSIZE - 1)
 26#define FILL64RG t8
 27#define FILLPTRG t7
 28#undef  LONG_S
 29#define LONG_S LONG_SP
 30#else
 31#define STORSIZE LONGSIZE
 32#define STORMASK LONGMASK
 33#define FILL64RG a1
 34#define FILLPTRG t0
 35#endif
 36
 37#define LEGACY_MODE 1
 38#define EVA_MODE    2
 39
 40/*
 41 * No need to protect it with EVA #ifdefery. The generated block of code
 42 * will never be assembled if EVA is not enabled.
 43 */
 44#define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
 45#define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
 46
 47#define EX(insn,reg,addr,handler)			\
 48	.if \mode == LEGACY_MODE;			\
 499:		insn	reg, addr;			\
 50	.else;						\
 519:		___BUILD_EVA_INSN(insn, reg, addr);	\
 52	.endif;						\
 53	.section __ex_table,"a";			\
 54	PTR	9b, handler;				\
 55	.previous
 56
 57	.macro	f_fill64 dst, offset, val, fixup, mode
 58	EX(LONG_S, \val, (\offset +  0 * STORSIZE)(\dst), \fixup)
 59	EX(LONG_S, \val, (\offset +  1 * STORSIZE)(\dst), \fixup)
 60	EX(LONG_S, \val, (\offset +  2 * STORSIZE)(\dst), \fixup)
 61	EX(LONG_S, \val, (\offset +  3 * STORSIZE)(\dst), \fixup)
 62#if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))
 63	EX(LONG_S, \val, (\offset +  4 * STORSIZE)(\dst), \fixup)
 64	EX(LONG_S, \val, (\offset +  5 * STORSIZE)(\dst), \fixup)
 65	EX(LONG_S, \val, (\offset +  6 * STORSIZE)(\dst), \fixup)
 66	EX(LONG_S, \val, (\offset +  7 * STORSIZE)(\dst), \fixup)
 67#endif
 68#if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))
 69	EX(LONG_S, \val, (\offset +  8 * STORSIZE)(\dst), \fixup)
 70	EX(LONG_S, \val, (\offset +  9 * STORSIZE)(\dst), \fixup)
 71	EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)
 72	EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)
 73	EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)
 74	EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)
 75	EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)
 76	EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)
 77#endif
 78	.endm
 79
 80	.set	noreorder
 81	.align	5
 82
 83	/*
 84	 * Macro to generate the __bzero{,_user} symbol
 85	 * Arguments:
 86	 * mode: LEGACY_MODE or EVA_MODE
 87	 */
 88	.macro __BUILD_BZERO mode
 89	/* Initialize __memset if this is the first time we call this macro */
 90	.ifnotdef __memset
 91	.set __memset, 1
 92	.hidden __memset /* Make sure it does not leak */
 93	.endif
 94
 95	sltiu		t0, a2, STORSIZE	/* very small region? */
 
 96	bnez		t0, .Lsmall_memset\@
 97	andi		t0, a0, STORMASK	/* aligned? */
 
 98
 99#ifdef CONFIG_CPU_MICROMIPS
100	move		t8, a1			/* used by 'swp' instruction */
101	move		t9, a1
102#endif
 
103#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
104	beqz		t0, 1f
105	PTR_SUBU	t0, STORSIZE		/* alignment in bytes */
106#else
107	.set		noat
108	li		AT, STORSIZE
109	beqz		t0, 1f
110	PTR_SUBU	t0, AT			/* alignment in bytes */
111	.set		at
112#endif
 
113
114#ifndef CONFIG_CPU_MIPSR6
115	R10KCBARRIER(0(ra))
116#ifdef __MIPSEB__
117	EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@)	/* make word/dword aligned */
118#else
119	EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@)	/* make word/dword aligned */
120#endif
121	PTR_SUBU	a0, t0			/* long align ptr */
122	PTR_ADDU	a2, t0			/* correct size */
123
124#else /* CONFIG_CPU_MIPSR6 */
125#define STORE_BYTE(N)				\
126	EX(sb, a1, N(a0), .Lbyte_fixup\@);	\
 
127	beqz		t0, 0f;			\
128	PTR_ADDU	t0, 1;
 
129
130	PTR_ADDU	a2, t0			/* correct size */
131	PTR_ADDU	t0, 1
132	STORE_BYTE(0)
133	STORE_BYTE(1)
134#if LONGSIZE == 4
135	EX(sb, a1, 2(a0), .Lbyte_fixup\@)
136#else
137	STORE_BYTE(2)
138	STORE_BYTE(3)
139	STORE_BYTE(4)
140	STORE_BYTE(5)
141	EX(sb, a1, 6(a0), .Lbyte_fixup\@)
142#endif
1430:
144	ori		a0, STORMASK
145	xori		a0, STORMASK
146	PTR_ADDIU	a0, STORSIZE
147#endif /* CONFIG_CPU_MIPSR6 */
1481:	ori		t1, a2, 0x3f		/* # of full blocks */
149	xori		t1, 0x3f
 
150	beqz		t1, .Lmemset_partial\@	/* no block to fill */
151	andi		t0, a2, 0x40-STORSIZE
152
153	PTR_ADDU	t1, a0			/* end address */
154	.set		reorder
1551:	PTR_ADDIU	a0, 64
156	R10KCBARRIER(0(ra))
157	f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
158	bne		t1, a0, 1b
159	.set		noreorder
160
161.Lmemset_partial\@:
162	R10KCBARRIER(0(ra))
163	PTR_LA		t1, 2f			/* where to start */
164#ifdef CONFIG_CPU_MICROMIPS
165	LONG_SRL	t7, t0, 1
166#endif
167#if LONGSIZE == 4
168	PTR_SUBU	t1, FILLPTRG
169#else
170	.set		noat
171	LONG_SRL	AT, FILLPTRG, 1
172	PTR_SUBU	t1, AT
173	.set		at
174#endif
 
175	jr		t1
176	PTR_ADDU	a0, t0			/* dest ptr */
177
178	.set		push
179	.set		noreorder
180	.set		nomacro
181	/* ... but first do longs ... */
182	f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
1832:	.set		pop
184	andi		a2, STORMASK		/* At most one long to go */
185
 
186	beqz		a2, 1f
187#ifndef CONFIG_CPU_MIPSR6
188	PTR_ADDU	a0, a2			/* What's left */
 
189	R10KCBARRIER(0(ra))
190#ifdef __MIPSEB__
191	EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
192#else
193	EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
194#endif
195#else
196	PTR_SUBU	t0, $0, a2
 
 
197	PTR_ADDIU	t0, 1
198	STORE_BYTE(0)
199	STORE_BYTE(1)
200#if LONGSIZE == 4
201	EX(sb, a1, 2(a0), .Lbyte_fixup\@)
202#else
203	STORE_BYTE(2)
204	STORE_BYTE(3)
205	STORE_BYTE(4)
206	STORE_BYTE(5)
207	EX(sb, a1, 6(a0), .Lbyte_fixup\@)
208#endif
2090:
210#endif
2111:	jr		ra
212	move		a2, zero
213
214.Lsmall_memset\@:
 
215	beqz		a2, 2f
216	PTR_ADDU	t1, a0, a2
217
2181:	PTR_ADDIU	a0, 1			/* fill bytewise */
219	R10KCBARRIER(0(ra))
 
220	bne		t1, a0, 1b
221	sb		a1, -1(a0)
 
222
2232:	jr		ra			/* done */
224	move		a2, zero
225	.if __memset == 1
226	END(memset)
227	.set __memset, 0
228	.hidden __memset
229	.endif
230
 
231.Lbyte_fixup\@:
232	PTR_SUBU	a2, $0, t0
 
 
 
 
 
233	jr		ra
234	 PTR_ADDIU	a2, 1
235
236.Lfirst_fixup\@:
 
237	jr	ra
238	nop
239
240.Lfwd_fixup\@:
 
 
 
 
241	PTR_L		t0, TI_TASK($28)
242	andi		a2, 0x3f
243	LONG_L		t0, THREAD_BUADDR(t0)
244	LONG_ADDU	a2, t1
 
245	jr		ra
246	LONG_SUBU	a2, t0
247
248.Lpartial_fixup\@:
 
 
 
 
249	PTR_L		t0, TI_TASK($28)
250	andi		a2, STORMASK
251	LONG_L		t0, THREAD_BUADDR(t0)
252	LONG_ADDU	a2, t1
 
253	jr		ra
254	LONG_SUBU	a2, t0
255
256.Llast_fixup\@:
 
 
 
 
 
 
 
 
 
 
257	jr		ra
258	andi		v1, a2, STORMASK
259
260	.endm
261
262/*
263 * memset(void *s, int c, size_t n)
264 *
265 * a0: start of area to clear
266 * a1: char to fill with
267 * a2: size of area to clear
268 */
269
270LEAF(memset)
 
 
271	beqz		a1, 1f
272	move		v0, a0			/* result */
273
274	andi		a1, 0xff		/* spread fillword */
275	LONG_SLL		t1, a1, 8
276	or		a1, t1
277	LONG_SLL		t1, a1, 16
278#if LONGSIZE == 8
279	or		a1, t1
280	LONG_SLL		t1, a1, 32
281#endif
282	or		a1, t1
2831:
284#ifndef CONFIG_EVA
285FEXPORT(__bzero)
286#else
287FEXPORT(__bzero_kernel)
288#endif
289	__BUILD_BZERO LEGACY_MODE
290
291#ifdef CONFIG_EVA
292LEAF(__bzero)
 
293	__BUILD_BZERO EVA_MODE
294END(__bzero)
295#endif