Linux Audio

Check our new training course

Loading...
v6.2
  1/* SPDX-License-Identifier: GPL-2.0 */
  2#ifndef _ASM_X86_STRING_64_H
  3#define _ASM_X86_STRING_64_H
  4
  5#ifdef __KERNEL__
  6#include <linux/jump_label.h>
  7
  8/* Written 2002 by Andi Kleen */
  9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 10/* Even with __builtin_ the compiler may decide to use the out of line
 11   function. */
 12
 13#if defined(__SANITIZE_MEMORY__) && defined(__NO_FORTIFY)
 14#include <linux/kmsan_string.h>
 15#endif
 16
 17#define __HAVE_ARCH_MEMCPY 1
 18#if defined(__SANITIZE_MEMORY__) && defined(__NO_FORTIFY)
 19#undef memcpy
 20#define memcpy __msan_memcpy
 21#else
 22extern void *memcpy(void *to, const void *from, size_t len);
 23#endif
 24extern void *__memcpy(void *to, const void *from, size_t len);
 25
 26#define __HAVE_ARCH_MEMSET
 27#if defined(__SANITIZE_MEMORY__) && defined(__NO_FORTIFY)
 28extern void *__msan_memset(void *s, int c, size_t n);
 29#undef memset
 30#define memset __msan_memset
 
 
 
 
 
 
 
 
 31#else
 32void *memset(void *s, int c, size_t n);
 
 
 
 
 33#endif
 34void *__memset(void *s, int c, size_t n);
 35
 36#define __HAVE_ARCH_MEMSET16
 37static inline void *memset16(uint16_t *s, uint16_t v, size_t n)
 38{
 39	long d0, d1;
 40	asm volatile("rep\n\t"
 41		     "stosw"
 42		     : "=&c" (d0), "=&D" (d1)
 43		     : "a" (v), "1" (s), "0" (n)
 44		     : "memory");
 45	return s;
 46}
 47
 48#define __HAVE_ARCH_MEMSET32
 49static inline void *memset32(uint32_t *s, uint32_t v, size_t n)
 50{
 51	long d0, d1;
 52	asm volatile("rep\n\t"
 53		     "stosl"
 54		     : "=&c" (d0), "=&D" (d1)
 55		     : "a" (v), "1" (s), "0" (n)
 56		     : "memory");
 57	return s;
 58}
 59
 60#define __HAVE_ARCH_MEMSET64
 61static inline void *memset64(uint64_t *s, uint64_t v, size_t n)
 62{
 63	long d0, d1;
 64	asm volatile("rep\n\t"
 65		     "stosq"
 66		     : "=&c" (d0), "=&D" (d1)
 67		     : "a" (v), "1" (s), "0" (n)
 68		     : "memory");
 69	return s;
 70}
 71
 72#define __HAVE_ARCH_MEMMOVE
 73#if defined(__SANITIZE_MEMORY__) && defined(__NO_FORTIFY)
 74#undef memmove
 75void *__msan_memmove(void *dest, const void *src, size_t len);
 76#define memmove __msan_memmove
 77#else
 78void *memmove(void *dest, const void *src, size_t count);
 79#endif
 80void *__memmove(void *dest, const void *src, size_t count);
 81
 82int memcmp(const void *cs, const void *ct, size_t count);
 83size_t strlen(const char *s);
 84char *strcpy(char *dest, const char *src);
 85char *strcat(char *dest, const char *src);
 86int strcmp(const char *cs, const char *ct);
 87
 88#if (defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__))
 
 89/*
 90 * For files that not instrumented (e.g. mm/slub.c) we
 91 * should use not instrumented version of mem* functions.
 92 */
 93
 94#undef memcpy
 95#define memcpy(dst, src, len) __memcpy(dst, src, len)
 96#undef memmove
 97#define memmove(dst, src, len) __memmove(dst, src, len)
 98#undef memset
 99#define memset(s, c, n) __memset(s, c, n)
100
101#ifndef __NO_FORTIFY
102#define __NO_FORTIFY /* FORTIFY_SOURCE uses __builtin_memcpy, etc. */
103#endif
104
105#endif
106
107#ifdef CONFIG_ARCH_HAS_UACCESS_FLUSHCACHE
108#define __HAVE_ARCH_MEMCPY_FLUSHCACHE 1
109void __memcpy_flushcache(void *dst, const void *src, size_t cnt);
110static __always_inline void memcpy_flushcache(void *dst, const void *src, size_t cnt)
111{
112	if (__builtin_constant_p(cnt)) {
113		switch (cnt) {
114			case 4:
115				asm ("movntil %1, %0" : "=m"(*(u32 *)dst) : "r"(*(u32 *)src));
116				return;
117			case 8:
118				asm ("movntiq %1, %0" : "=m"(*(u64 *)dst) : "r"(*(u64 *)src));
119				return;
120			case 16:
121				asm ("movntiq %1, %0" : "=m"(*(u64 *)dst) : "r"(*(u64 *)src));
122				asm ("movntiq %1, %0" : "=m"(*(u64 *)(dst + 8)) : "r"(*(u64 *)(src + 8)));
123				return;
124		}
125	}
126	__memcpy_flushcache(dst, src, cnt);
127}
128#endif
129
130#endif /* __KERNEL__ */
131
132#endif /* _ASM_X86_STRING_64_H */
v4.6
 
 1#ifndef _ASM_X86_STRING_64_H
 2#define _ASM_X86_STRING_64_H
 3
 4#ifdef __KERNEL__
 
 5
 6/* Written 2002 by Andi Kleen */
 7
 8/* Only used for special circumstances. Stolen from i386/string.h */
 9static __always_inline void *__inline_memcpy(void *to, const void *from, size_t n)
10{
11	unsigned long d0, d1, d2;
12	asm volatile("rep ; movsl\n\t"
13		     "testb $2,%b4\n\t"
14		     "je 1f\n\t"
15		     "movsw\n"
16		     "1:\ttestb $1,%b4\n\t"
17		     "je 2f\n\t"
18		     "movsb\n"
19		     "2:"
20		     : "=&c" (d0), "=&D" (d1), "=&S" (d2)
21		     : "0" (n / 4), "q" (n), "1" ((long)to), "2" ((long)from)
22		     : "memory");
23	return to;
24}
25
26/* Even with __builtin_ the compiler may decide to use the out of line
27   function. */
28
 
 
 
 
29#define __HAVE_ARCH_MEMCPY 1
 
 
 
 
30extern void *memcpy(void *to, const void *from, size_t len);
 
31extern void *__memcpy(void *to, const void *from, size_t len);
32
33#ifndef CONFIG_KMEMCHECK
34#if (__GNUC__ == 4 && __GNUC_MINOR__ < 3) || __GNUC__ < 4
35#define memcpy(dst, src, len)					\
36({								\
37	size_t __len = (len);					\
38	void *__ret;						\
39	if (__builtin_constant_p(len) && __len >= 64)		\
40		__ret = __memcpy((dst), (src), __len);		\
41	else							\
42		__ret = __builtin_memcpy((dst), (src), __len);	\
43	__ret;							\
44})
45#endif
46#else
47/*
48 * kmemcheck becomes very happy if we use the REP instructions unconditionally,
49 * because it means that we know both memory operands in advance.
50 */
51#define memcpy(dst, src, len) __inline_memcpy((dst), (src), (len))
52#endif
 
53
54#define __HAVE_ARCH_MEMSET
55void *memset(void *s, int c, size_t n);
56void *__memset(void *s, int c, size_t n);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
58#define __HAVE_ARCH_MEMMOVE
 
 
 
 
 
59void *memmove(void *dest, const void *src, size_t count);
 
60void *__memmove(void *dest, const void *src, size_t count);
61
62int memcmp(const void *cs, const void *ct, size_t count);
63size_t strlen(const char *s);
64char *strcpy(char *dest, const char *src);
65char *strcat(char *dest, const char *src);
66int strcmp(const char *cs, const char *ct);
67
68#if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__)
69
70/*
71 * For files that not instrumented (e.g. mm/slub.c) we
72 * should use not instrumented version of mem* functions.
73 */
74
75#undef memcpy
76#define memcpy(dst, src, len) __memcpy(dst, src, len)
 
77#define memmove(dst, src, len) __memmove(dst, src, len)
 
78#define memset(s, c, n) __memset(s, c, n)
 
 
 
 
 
79#endif
80
81/**
82 * memcpy_mcsafe - copy memory with indication if a machine check happened
83 *
84 * @dst:	destination address
85 * @src:	source address
86 * @cnt:	number of bytes to copy
87 *
88 * Low level memory copy function that catches machine checks
89 *
90 * Return 0 for success, -EFAULT for fail
91 */
92int memcpy_mcsafe(void *dst, const void *src, size_t cnt);
 
 
 
 
 
 
 
 
 
 
93
94#endif /* __KERNEL__ */
95
96#endif /* _ASM_X86_STRING_64_H */