Linux Audio

Check our new training course

Loading...
v4.17
  1/* SPDX-License-Identifier: GPL-2.0 */
  2#ifndef _ASM_X86_STRING_32_H
  3#define _ASM_X86_STRING_32_H
  4
  5#ifdef __KERNEL__
  6
  7/* Let gcc decide whether to inline or use the out of line functions */
  8
  9#define __HAVE_ARCH_STRCPY
 10extern char *strcpy(char *dest, const char *src);
 11
 12#define __HAVE_ARCH_STRNCPY
 13extern char *strncpy(char *dest, const char *src, size_t count);
 14
 15#define __HAVE_ARCH_STRCAT
 16extern char *strcat(char *dest, const char *src);
 17
 18#define __HAVE_ARCH_STRNCAT
 19extern char *strncat(char *dest, const char *src, size_t count);
 20
 21#define __HAVE_ARCH_STRCMP
 22extern int strcmp(const char *cs, const char *ct);
 23
 24#define __HAVE_ARCH_STRNCMP
 25extern int strncmp(const char *cs, const char *ct, size_t count);
 26
 27#define __HAVE_ARCH_STRCHR
 28extern char *strchr(const char *s, int c);
 29
 30#define __HAVE_ARCH_STRLEN
 31extern size_t strlen(const char *s);
 32
 33static __always_inline void *__memcpy(void *to, const void *from, size_t n)
 34{
 35	int d0, d1, d2;
 36	asm volatile("rep ; movsl\n\t"
 37		     "movl %4,%%ecx\n\t"
 38		     "andl $3,%%ecx\n\t"
 39		     "jz 1f\n\t"
 40		     "rep ; movsb\n\t"
 41		     "1:"
 42		     : "=&c" (d0), "=&D" (d1), "=&S" (d2)
 43		     : "0" (n / 4), "g" (n), "1" ((long)to), "2" ((long)from)
 44		     : "memory");
 45	return to;
 46}
 47
 48/*
 49 * This looks ugly, but the compiler can optimize it totally,
 50 * as the count is constant.
 51 */
 52static __always_inline void *__constant_memcpy(void *to, const void *from,
 53					       size_t n)
 54{
 55	long esi, edi;
 56	if (!n)
 57		return to;
 58
 59	switch (n) {
 60	case 1:
 61		*(char *)to = *(char *)from;
 62		return to;
 63	case 2:
 64		*(short *)to = *(short *)from;
 65		return to;
 66	case 4:
 67		*(int *)to = *(int *)from;
 68		return to;
 69	case 3:
 70		*(short *)to = *(short *)from;
 71		*((char *)to + 2) = *((char *)from + 2);
 72		return to;
 73	case 5:
 74		*(int *)to = *(int *)from;
 75		*((char *)to + 4) = *((char *)from + 4);
 76		return to;
 77	case 6:
 78		*(int *)to = *(int *)from;
 79		*((short *)to + 2) = *((short *)from + 2);
 80		return to;
 81	case 8:
 82		*(int *)to = *(int *)from;
 83		*((int *)to + 1) = *((int *)from + 1);
 84		return to;
 85	}
 86
 87	esi = (long)from;
 88	edi = (long)to;
 89	if (n >= 5 * 4) {
 90		/* large block: use rep prefix */
 91		int ecx;
 92		asm volatile("rep ; movsl"
 93			     : "=&c" (ecx), "=&D" (edi), "=&S" (esi)
 94			     : "0" (n / 4), "1" (edi), "2" (esi)
 95			     : "memory"
 96		);
 97	} else {
 98		/* small block: don't clobber ecx + smaller code */
 99		if (n >= 4 * 4)
100			asm volatile("movsl"
101				     : "=&D"(edi), "=&S"(esi)
102				     : "0"(edi), "1"(esi)
103				     : "memory");
104		if (n >= 3 * 4)
105			asm volatile("movsl"
106				     : "=&D"(edi), "=&S"(esi)
107				     : "0"(edi), "1"(esi)
108				     : "memory");
109		if (n >= 2 * 4)
110			asm volatile("movsl"
111				     : "=&D"(edi), "=&S"(esi)
112				     : "0"(edi), "1"(esi)
113				     : "memory");
114		if (n >= 1 * 4)
115			asm volatile("movsl"
116				     : "=&D"(edi), "=&S"(esi)
117				     : "0"(edi), "1"(esi)
118				     : "memory");
119	}
120	switch (n % 4) {
121		/* tail */
122	case 0:
123		return to;
124	case 1:
125		asm volatile("movsb"
126			     : "=&D"(edi), "=&S"(esi)
127			     : "0"(edi), "1"(esi)
128			     : "memory");
129		return to;
130	case 2:
131		asm volatile("movsw"
132			     : "=&D"(edi), "=&S"(esi)
133			     : "0"(edi), "1"(esi)
134			     : "memory");
135		return to;
136	default:
137		asm volatile("movsw\n\tmovsb"
138			     : "=&D"(edi), "=&S"(esi)
139			     : "0"(edi), "1"(esi)
140			     : "memory");
141		return to;
142	}
143}
144
145#define __HAVE_ARCH_MEMCPY
146extern void *memcpy(void *, const void *, size_t);
147
148#ifndef CONFIG_FORTIFY_SOURCE
149#ifdef CONFIG_X86_USE_3DNOW
150
151#include <asm/mmx.h>
152
153/*
154 *	This CPU favours 3DNow strongly (eg AMD Athlon)
155 */
156
157static inline void *__constant_memcpy3d(void *to, const void *from, size_t len)
158{
159	if (len < 512)
160		return __constant_memcpy(to, from, len);
161	return _mmx_memcpy(to, from, len);
162}
163
164static inline void *__memcpy3d(void *to, const void *from, size_t len)
165{
166	if (len < 512)
167		return __memcpy(to, from, len);
168	return _mmx_memcpy(to, from, len);
169}
170
171#define memcpy(t, f, n)				\
172	(__builtin_constant_p((n))		\
173	 ? __constant_memcpy3d((t), (f), (n))	\
174	 : __memcpy3d((t), (f), (n)))
175
176#else
177
178/*
179 *	No 3D Now!
180 */
181
182#if (__GNUC__ >= 4)
183#define memcpy(t, f, n) __builtin_memcpy(t, f, n)
184#else
185#define memcpy(t, f, n)				\
186	(__builtin_constant_p((n))		\
187	 ? __constant_memcpy((t), (f), (n))	\
188	 : __memcpy((t), (f), (n)))
189#endif
190
191#endif
192#endif /* !CONFIG_FORTIFY_SOURCE */
193
194#define __HAVE_ARCH_MEMMOVE
195void *memmove(void *dest, const void *src, size_t n);
196
197extern int memcmp(const void *, const void *, size_t);
198#ifndef CONFIG_FORTIFY_SOURCE
199#define memcmp __builtin_memcmp
200#endif
201
202#define __HAVE_ARCH_MEMCHR
203extern void *memchr(const void *cs, int c, size_t count);
204
205static inline void *__memset_generic(void *s, char c, size_t count)
206{
207	int d0, d1;
208	asm volatile("rep\n\t"
209		     "stosb"
210		     : "=&c" (d0), "=&D" (d1)
211		     : "a" (c), "1" (s), "0" (count)
212		     : "memory");
213	return s;
214}
215
216/* we might want to write optimized versions of these later */
217#define __constant_count_memset(s, c, count) __memset_generic((s), (c), (count))
218
219/*
220 * memset(x, 0, y) is a reasonably common thing to do, so we want to fill
221 * things 32 bits at a time even when we don't know the size of the
222 * area at compile-time..
223 */
224static __always_inline
225void *__constant_c_memset(void *s, unsigned long c, size_t count)
226{
227	int d0, d1;
228	asm volatile("rep ; stosl\n\t"
229		     "testb $2,%b3\n\t"
230		     "je 1f\n\t"
231		     "stosw\n"
232		     "1:\ttestb $1,%b3\n\t"
233		     "je 2f\n\t"
234		     "stosb\n"
235		     "2:"
236		     : "=&c" (d0), "=&D" (d1)
237		     : "a" (c), "q" (count), "0" (count/4), "1" ((long)s)
238		     : "memory");
239	return s;
240}
241
242/* Added by Gertjan van Wingerde to make minix and sysv module work */
243#define __HAVE_ARCH_STRNLEN
244extern size_t strnlen(const char *s, size_t count);
245/* end of additional stuff */
246
247#define __HAVE_ARCH_STRSTR
248extern char *strstr(const char *cs, const char *ct);
249
250/*
251 * This looks horribly ugly, but the compiler can optimize it totally,
252 * as we by now know that both pattern and count is constant..
253 */
254static __always_inline
255void *__constant_c_and_count_memset(void *s, unsigned long pattern,
256				    size_t count)
257{
258	switch (count) {
259	case 0:
260		return s;
261	case 1:
262		*(unsigned char *)s = pattern & 0xff;
263		return s;
264	case 2:
265		*(unsigned short *)s = pattern & 0xffff;
266		return s;
267	case 3:
268		*(unsigned short *)s = pattern & 0xffff;
269		*((unsigned char *)s + 2) = pattern & 0xff;
270		return s;
271	case 4:
272		*(unsigned long *)s = pattern;
273		return s;
274	}
275
276#define COMMON(x)							\
277	asm volatile("rep ; stosl"					\
278		     x							\
279		     : "=&c" (d0), "=&D" (d1)				\
280		     : "a" (eax), "0" (count/4), "1" ((long)s)	\
281		     : "memory")
282
283	{
284		int d0, d1;
285#if __GNUC__ == 4 && __GNUC_MINOR__ == 0
286		/* Workaround for broken gcc 4.0 */
287		register unsigned long eax asm("%eax") = pattern;
288#else
289		unsigned long eax = pattern;
290#endif
291
292		switch (count % 4) {
293		case 0:
294			COMMON("");
295			return s;
296		case 1:
297			COMMON("\n\tstosb");
298			return s;
299		case 2:
300			COMMON("\n\tstosw");
301			return s;
302		default:
303			COMMON("\n\tstosw\n\tstosb");
304			return s;
305		}
306	}
307
308#undef COMMON
309}
310
311#define __constant_c_x_memset(s, c, count)			\
312	(__builtin_constant_p(count)				\
313	 ? __constant_c_and_count_memset((s), (c), (count))	\
314	 : __constant_c_memset((s), (c), (count)))
315
316#define __memset(s, c, count)				\
317	(__builtin_constant_p(count)			\
318	 ? __constant_count_memset((s), (c), (count))	\
319	 : __memset_generic((s), (c), (count)))
320
321#define __HAVE_ARCH_MEMSET
322extern void *memset(void *, int, size_t);
323#ifndef CONFIG_FORTIFY_SOURCE
324#if (__GNUC__ >= 4)
325#define memset(s, c, count) __builtin_memset(s, c, count)
326#else
327#define memset(s, c, count)						\
328	(__builtin_constant_p(c)					\
329	 ? __constant_c_x_memset((s), (0x01010101UL * (unsigned char)(c)), \
330				 (count))				\
331	 : __memset((s), (c), (count)))
332#endif
333#endif /* !CONFIG_FORTIFY_SOURCE */
334
335#define __HAVE_ARCH_MEMSET16
336static inline void *memset16(uint16_t *s, uint16_t v, size_t n)
337{
338	int d0, d1;
339	asm volatile("rep\n\t"
340		     "stosw"
341		     : "=&c" (d0), "=&D" (d1)
342		     : "a" (v), "1" (s), "0" (n)
343		     : "memory");
344	return s;
345}
346
347#define __HAVE_ARCH_MEMSET32
348static inline void *memset32(uint32_t *s, uint32_t v, size_t n)
349{
350	int d0, d1;
351	asm volatile("rep\n\t"
352		     "stosl"
353		     : "=&c" (d0), "=&D" (d1)
354		     : "a" (v), "1" (s), "0" (n)
355		     : "memory");
356	return s;
357}
358
359/*
360 * find the first occurrence of byte 'c', or 1 past the area if none
361 */
362#define __HAVE_ARCH_MEMSCAN
363extern void *memscan(void *addr, int c, size_t size);
364
365#endif /* __KERNEL__ */
366
367#endif /* _ASM_X86_STRING_32_H */
v6.2
  1/* SPDX-License-Identifier: GPL-2.0 */
  2#ifndef _ASM_X86_STRING_32_H
  3#define _ASM_X86_STRING_32_H
  4
  5#ifdef __KERNEL__
  6
  7/* Let gcc decide whether to inline or use the out of line functions */
  8
  9#define __HAVE_ARCH_STRCPY
 10extern char *strcpy(char *dest, const char *src);
 11
 12#define __HAVE_ARCH_STRNCPY
 13extern char *strncpy(char *dest, const char *src, size_t count);
 14
 15#define __HAVE_ARCH_STRCAT
 16extern char *strcat(char *dest, const char *src);
 17
 18#define __HAVE_ARCH_STRNCAT
 19extern char *strncat(char *dest, const char *src, size_t count);
 20
 21#define __HAVE_ARCH_STRCMP
 22extern int strcmp(const char *cs, const char *ct);
 23
 24#define __HAVE_ARCH_STRNCMP
 25extern int strncmp(const char *cs, const char *ct, size_t count);
 26
 27#define __HAVE_ARCH_STRCHR
 28extern char *strchr(const char *s, int c);
 29
 30#define __HAVE_ARCH_STRLEN
 31extern size_t strlen(const char *s);
 32
 33static __always_inline void *__memcpy(void *to, const void *from, size_t n)
 34{
 35	int d0, d1, d2;
 36	asm volatile("rep ; movsl\n\t"
 37		     "movl %4,%%ecx\n\t"
 38		     "andl $3,%%ecx\n\t"
 39		     "jz 1f\n\t"
 40		     "rep ; movsb\n\t"
 41		     "1:"
 42		     : "=&c" (d0), "=&D" (d1), "=&S" (d2)
 43		     : "0" (n / 4), "g" (n), "1" ((long)to), "2" ((long)from)
 44		     : "memory");
 45	return to;
 46}
 47
 48/*
 49 * This looks ugly, but the compiler can optimize it totally,
 50 * as the count is constant.
 51 */
 52static __always_inline void *__constant_memcpy(void *to, const void *from,
 53					       size_t n)
 54{
 55	long esi, edi;
 56	if (!n)
 57		return to;
 58
 59	switch (n) {
 60	case 1:
 61		*(char *)to = *(char *)from;
 62		return to;
 63	case 2:
 64		*(short *)to = *(short *)from;
 65		return to;
 66	case 4:
 67		*(int *)to = *(int *)from;
 68		return to;
 69	case 3:
 70		*(short *)to = *(short *)from;
 71		*((char *)to + 2) = *((char *)from + 2);
 72		return to;
 73	case 5:
 74		*(int *)to = *(int *)from;
 75		*((char *)to + 4) = *((char *)from + 4);
 76		return to;
 77	case 6:
 78		*(int *)to = *(int *)from;
 79		*((short *)to + 2) = *((short *)from + 2);
 80		return to;
 81	case 8:
 82		*(int *)to = *(int *)from;
 83		*((int *)to + 1) = *((int *)from + 1);
 84		return to;
 85	}
 86
 87	esi = (long)from;
 88	edi = (long)to;
 89	if (n >= 5 * 4) {
 90		/* large block: use rep prefix */
 91		int ecx;
 92		asm volatile("rep ; movsl"
 93			     : "=&c" (ecx), "=&D" (edi), "=&S" (esi)
 94			     : "0" (n / 4), "1" (edi), "2" (esi)
 95			     : "memory"
 96		);
 97	} else {
 98		/* small block: don't clobber ecx + smaller code */
 99		if (n >= 4 * 4)
100			asm volatile("movsl"
101				     : "=&D"(edi), "=&S"(esi)
102				     : "0"(edi), "1"(esi)
103				     : "memory");
104		if (n >= 3 * 4)
105			asm volatile("movsl"
106				     : "=&D"(edi), "=&S"(esi)
107				     : "0"(edi), "1"(esi)
108				     : "memory");
109		if (n >= 2 * 4)
110			asm volatile("movsl"
111				     : "=&D"(edi), "=&S"(esi)
112				     : "0"(edi), "1"(esi)
113				     : "memory");
114		if (n >= 1 * 4)
115			asm volatile("movsl"
116				     : "=&D"(edi), "=&S"(esi)
117				     : "0"(edi), "1"(esi)
118				     : "memory");
119	}
120	switch (n % 4) {
121		/* tail */
122	case 0:
123		return to;
124	case 1:
125		asm volatile("movsb"
126			     : "=&D"(edi), "=&S"(esi)
127			     : "0"(edi), "1"(esi)
128			     : "memory");
129		return to;
130	case 2:
131		asm volatile("movsw"
132			     : "=&D"(edi), "=&S"(esi)
133			     : "0"(edi), "1"(esi)
134			     : "memory");
135		return to;
136	default:
137		asm volatile("movsw\n\tmovsb"
138			     : "=&D"(edi), "=&S"(esi)
139			     : "0"(edi), "1"(esi)
140			     : "memory");
141		return to;
142	}
143}
144
145#define __HAVE_ARCH_MEMCPY
146extern void *memcpy(void *, const void *, size_t);
147
148#ifndef CONFIG_FORTIFY_SOURCE
 
149
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
150#define memcpy(t, f, n) __builtin_memcpy(t, f, n)
 
 
 
 
 
 
151
 
152#endif /* !CONFIG_FORTIFY_SOURCE */
153
154#define __HAVE_ARCH_MEMMOVE
155void *memmove(void *dest, const void *src, size_t n);
156
157extern int memcmp(const void *, const void *, size_t);
158#ifndef CONFIG_FORTIFY_SOURCE
159#define memcmp __builtin_memcmp
160#endif
161
162#define __HAVE_ARCH_MEMCHR
163extern void *memchr(const void *cs, int c, size_t count);
164
165static inline void *__memset_generic(void *s, char c, size_t count)
166{
167	int d0, d1;
168	asm volatile("rep\n\t"
169		     "stosb"
170		     : "=&c" (d0), "=&D" (d1)
171		     : "a" (c), "1" (s), "0" (count)
172		     : "memory");
173	return s;
174}
175
176/* we might want to write optimized versions of these later */
177#define __constant_count_memset(s, c, count) __memset_generic((s), (c), (count))
178
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
179/* Added by Gertjan van Wingerde to make minix and sysv module work */
180#define __HAVE_ARCH_STRNLEN
181extern size_t strnlen(const char *s, size_t count);
182/* end of additional stuff */
183
184#define __HAVE_ARCH_STRSTR
185extern char *strstr(const char *cs, const char *ct);
186
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
187#define __memset(s, c, count)				\
188	(__builtin_constant_p(count)			\
189	 ? __constant_count_memset((s), (c), (count))	\
190	 : __memset_generic((s), (c), (count)))
191
192#define __HAVE_ARCH_MEMSET
193extern void *memset(void *, int, size_t);
194#ifndef CONFIG_FORTIFY_SOURCE
 
195#define memset(s, c, count) __builtin_memset(s, c, count)
 
 
 
 
 
 
 
196#endif /* !CONFIG_FORTIFY_SOURCE */
197
198#define __HAVE_ARCH_MEMSET16
199static inline void *memset16(uint16_t *s, uint16_t v, size_t n)
200{
201	int d0, d1;
202	asm volatile("rep\n\t"
203		     "stosw"
204		     : "=&c" (d0), "=&D" (d1)
205		     : "a" (v), "1" (s), "0" (n)
206		     : "memory");
207	return s;
208}
209
210#define __HAVE_ARCH_MEMSET32
211static inline void *memset32(uint32_t *s, uint32_t v, size_t n)
212{
213	int d0, d1;
214	asm volatile("rep\n\t"
215		     "stosl"
216		     : "=&c" (d0), "=&D" (d1)
217		     : "a" (v), "1" (s), "0" (n)
218		     : "memory");
219	return s;
220}
221
222/*
223 * find the first occurrence of byte 'c', or 1 past the area if none
224 */
225#define __HAVE_ARCH_MEMSCAN
226extern void *memscan(void *addr, int c, size_t size);
227
228#endif /* __KERNEL__ */
229
230#endif /* _ASM_X86_STRING_32_H */