Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * S390 version
4 * Copyright IBM Corp. 1999
5 * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com),
6 */
7
8#ifndef _S390_STRING_H_
9#define _S390_STRING_H_
10
11#ifndef _LINUX_TYPES_H
12#include <linux/types.h>
13#endif
14
15#define __HAVE_ARCH_MEMCPY /* gcc builtin & arch function */
16#define __HAVE_ARCH_MEMMOVE /* gcc builtin & arch function */
17#define __HAVE_ARCH_MEMSET /* gcc builtin & arch function */
18#define __HAVE_ARCH_MEMSET16 /* arch function */
19#define __HAVE_ARCH_MEMSET32 /* arch function */
20#define __HAVE_ARCH_MEMSET64 /* arch function */
21
22void *memcpy(void *dest, const void *src, size_t n);
23void *memset(void *s, int c, size_t n);
24void *memmove(void *dest, const void *src, size_t n);
25
26#ifndef CONFIG_KASAN
27#define __HAVE_ARCH_MEMCHR /* inline & arch function */
28#define __HAVE_ARCH_MEMCMP /* arch function */
29#define __HAVE_ARCH_MEMSCAN /* inline & arch function */
30#define __HAVE_ARCH_STRCAT /* inline & arch function */
31#define __HAVE_ARCH_STRCMP /* arch function */
32#define __HAVE_ARCH_STRCPY /* inline & arch function */
33#define __HAVE_ARCH_STRLCAT /* arch function */
34#define __HAVE_ARCH_STRLEN /* inline & arch function */
35#define __HAVE_ARCH_STRNCAT /* arch function */
36#define __HAVE_ARCH_STRNCPY /* arch function */
37#define __HAVE_ARCH_STRNLEN /* inline & arch function */
38#define __HAVE_ARCH_STRSTR /* arch function */
39
40/* Prototypes for non-inlined arch strings functions. */
41int memcmp(const void *s1, const void *s2, size_t n);
42int strcmp(const char *s1, const char *s2);
43size_t strlcat(char *dest, const char *src, size_t n);
44char *strncat(char *dest, const char *src, size_t n);
45char *strncpy(char *dest, const char *src, size_t n);
46char *strstr(const char *s1, const char *s2);
47#endif /* !CONFIG_KASAN */
48
49#undef __HAVE_ARCH_STRCHR
50#undef __HAVE_ARCH_STRNCHR
51#undef __HAVE_ARCH_STRNCMP
52#undef __HAVE_ARCH_STRPBRK
53#undef __HAVE_ARCH_STRSEP
54#undef __HAVE_ARCH_STRSPN
55
56#if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__)
57
58extern void *__memcpy(void *dest, const void *src, size_t n);
59extern void *__memset(void *s, int c, size_t n);
60extern void *__memmove(void *dest, const void *src, size_t n);
61
62/*
63 * For files that are not instrumented (e.g. mm/slub.c) we
64 * should use not instrumented version of mem* functions.
65 */
66
67#define memcpy(dst, src, len) __memcpy(dst, src, len)
68#define memmove(dst, src, len) __memmove(dst, src, len)
69#define memset(s, c, n) __memset(s, c, n)
70#define strlen(s) __strlen(s)
71
72#define __no_sanitize_prefix_strfunc(x) __##x
73
74#ifndef __NO_FORTIFY
75#define __NO_FORTIFY /* FORTIFY_SOURCE uses __builtin_memcpy, etc. */
76#endif
77
78#else
79#define __no_sanitize_prefix_strfunc(x) x
80#endif /* defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__) */
81
82void *__memset16(uint16_t *s, uint16_t v, size_t count);
83void *__memset32(uint32_t *s, uint32_t v, size_t count);
84void *__memset64(uint64_t *s, uint64_t v, size_t count);
85
86static inline void *memset16(uint16_t *s, uint16_t v, size_t count)
87{
88 return __memset16(s, v, count * sizeof(v));
89}
90
91static inline void *memset32(uint32_t *s, uint32_t v, size_t count)
92{
93 return __memset32(s, v, count * sizeof(v));
94}
95
96static inline void *memset64(uint64_t *s, uint64_t v, size_t count)
97{
98 return __memset64(s, v, count * sizeof(v));
99}
100
101#if !defined(IN_ARCH_STRING_C) && (!defined(CONFIG_FORTIFY_SOURCE) || defined(__NO_FORTIFY))
102
103#ifdef __HAVE_ARCH_MEMCHR
104static inline void *memchr(const void * s, int c, size_t n)
105{
106 const void *ret = s + n;
107
108 asm volatile(
109 " lgr 0,%[c]\n"
110 "0: srst %[ret],%[s]\n"
111 " jo 0b\n"
112 " jl 1f\n"
113 " la %[ret],0\n"
114 "1:"
115 : [ret] "+&a" (ret), [s] "+&a" (s)
116 : [c] "d" (c)
117 : "cc", "memory", "0");
118 return (void *) ret;
119}
120#endif
121
122#ifdef __HAVE_ARCH_MEMSCAN
123static inline void *memscan(void *s, int c, size_t n)
124{
125 const void *ret = s + n;
126
127 asm volatile(
128 " lgr 0,%[c]\n"
129 "0: srst %[ret],%[s]\n"
130 " jo 0b\n"
131 : [ret] "+&a" (ret), [s] "+&a" (s)
132 : [c] "d" (c)
133 : "cc", "memory", "0");
134 return (void *) ret;
135}
136#endif
137
138#ifdef __HAVE_ARCH_STRCAT
139static inline char *strcat(char *dst, const char *src)
140{
141 unsigned long dummy = 0;
142 char *ret = dst;
143
144 asm volatile(
145 " lghi 0,0\n"
146 "0: srst %[dummy],%[dst]\n"
147 " jo 0b\n"
148 "1: mvst %[dummy],%[src]\n"
149 " jo 1b"
150 : [dummy] "+&a" (dummy), [dst] "+&a" (dst), [src] "+&a" (src)
151 :
152 : "cc", "memory", "0");
153 return ret;
154}
155#endif
156
157#ifdef __HAVE_ARCH_STRCPY
158static inline char *strcpy(char *dst, const char *src)
159{
160 char *ret = dst;
161
162 asm volatile(
163 " lghi 0,0\n"
164 "0: mvst %[dst],%[src]\n"
165 " jo 0b"
166 : [dst] "+&a" (dst), [src] "+&a" (src)
167 :
168 : "cc", "memory", "0");
169 return ret;
170}
171#endif
172
173#if defined(__HAVE_ARCH_STRLEN) || (defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__))
174static inline size_t __no_sanitize_prefix_strfunc(strlen)(const char *s)
175{
176 unsigned long end = 0;
177 const char *tmp = s;
178
179 asm volatile(
180 " lghi 0,0\n"
181 "0: srst %[end],%[tmp]\n"
182 " jo 0b"
183 : [end] "+&a" (end), [tmp] "+&a" (tmp)
184 :
185 : "cc", "memory", "0");
186 return end - (unsigned long)s;
187}
188#endif
189
190#ifdef __HAVE_ARCH_STRNLEN
191static inline size_t strnlen(const char * s, size_t n)
192{
193 const char *tmp = s;
194 const char *end = s + n;
195
196 asm volatile(
197 " lghi 0,0\n"
198 "0: srst %[end],%[tmp]\n"
199 " jo 0b"
200 : [end] "+&a" (end), [tmp] "+&a" (tmp)
201 :
202 : "cc", "memory", "0");
203 return end - s;
204}
205#endif
206#else /* IN_ARCH_STRING_C */
207void *memchr(const void * s, int c, size_t n);
208void *memscan(void *s, int c, size_t n);
209char *strcat(char *dst, const char *src);
210char *strcpy(char *dst, const char *src);
211size_t strlen(const char *s);
212size_t strnlen(const char * s, size_t n);
213#endif /* !IN_ARCH_STRING_C */
214
215#endif /* __S390_STRING_H_ */
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * S390 version
4 * Copyright IBM Corp. 1999
5 * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com),
6 */
7
8#ifndef _S390_STRING_H_
9#define _S390_STRING_H_
10
11#ifndef _LINUX_TYPES_H
12#include <linux/types.h>
13#endif
14
15#define __HAVE_ARCH_MEMCPY /* gcc builtin & arch function */
16#define __HAVE_ARCH_MEMMOVE /* gcc builtin & arch function */
17#define __HAVE_ARCH_MEMSET /* gcc builtin & arch function */
18#define __HAVE_ARCH_MEMSET16 /* arch function */
19#define __HAVE_ARCH_MEMSET32 /* arch function */
20#define __HAVE_ARCH_MEMSET64 /* arch function */
21
22void *memcpy(void *dest, const void *src, size_t n);
23void *memset(void *s, int c, size_t n);
24void *memmove(void *dest, const void *src, size_t n);
25
26#ifndef CONFIG_KASAN
27#define __HAVE_ARCH_MEMCHR /* inline & arch function */
28#define __HAVE_ARCH_MEMCMP /* arch function */
29#define __HAVE_ARCH_MEMSCAN /* inline & arch function */
30#define __HAVE_ARCH_STRCAT /* inline & arch function */
31#define __HAVE_ARCH_STRCMP /* arch function */
32#define __HAVE_ARCH_STRCPY /* inline & arch function */
33#define __HAVE_ARCH_STRLCAT /* arch function */
34#define __HAVE_ARCH_STRLEN /* inline & arch function */
35#define __HAVE_ARCH_STRNCAT /* arch function */
36#define __HAVE_ARCH_STRNCPY /* arch function */
37#define __HAVE_ARCH_STRNLEN /* inline & arch function */
38#define __HAVE_ARCH_STRSTR /* arch function */
39
40/* Prototypes for non-inlined arch strings functions. */
41int memcmp(const void *s1, const void *s2, size_t n);
42int strcmp(const char *s1, const char *s2);
43size_t strlcat(char *dest, const char *src, size_t n);
44char *strncat(char *dest, const char *src, size_t n);
45char *strncpy(char *dest, const char *src, size_t n);
46char *strstr(const char *s1, const char *s2);
47#endif /* !CONFIG_KASAN */
48
49#undef __HAVE_ARCH_STRCHR
50#undef __HAVE_ARCH_STRNCHR
51#undef __HAVE_ARCH_STRNCMP
52#undef __HAVE_ARCH_STRPBRK
53#undef __HAVE_ARCH_STRSEP
54#undef __HAVE_ARCH_STRSPN
55
56#if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__)
57
58#define strlen(s) __strlen(s)
59
60#define __no_sanitize_prefix_strfunc(x) __##x
61
62#ifndef __NO_FORTIFY
63#define __NO_FORTIFY /* FORTIFY_SOURCE uses __builtin_memcpy, etc. */
64#endif
65
66#else
67#define __no_sanitize_prefix_strfunc(x) x
68#endif /* defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__) */
69
70void *__memcpy(void *dest, const void *src, size_t n);
71void *__memset(void *s, int c, size_t n);
72void *__memmove(void *dest, const void *src, size_t n);
73void *__memset16(uint16_t *s, uint16_t v, size_t count);
74void *__memset32(uint32_t *s, uint32_t v, size_t count);
75void *__memset64(uint64_t *s, uint64_t v, size_t count);
76
77static inline void *memset16(uint16_t *s, uint16_t v, size_t count)
78{
79 return __memset16(s, v, count * sizeof(v));
80}
81
82static inline void *memset32(uint32_t *s, uint32_t v, size_t count)
83{
84 return __memset32(s, v, count * sizeof(v));
85}
86
87static inline void *memset64(uint64_t *s, uint64_t v, size_t count)
88{
89 return __memset64(s, v, count * sizeof(v));
90}
91
92#if !defined(IN_ARCH_STRING_C) && (!defined(CONFIG_FORTIFY_SOURCE) || defined(__NO_FORTIFY))
93
94#ifdef __HAVE_ARCH_MEMCHR
95static inline void *memchr(const void * s, int c, size_t n)
96{
97 const void *ret = s + n;
98
99 asm volatile(
100 " lgr 0,%[c]\n"
101 "0: srst %[ret],%[s]\n"
102 " jo 0b\n"
103 " jl 1f\n"
104 " la %[ret],0\n"
105 "1:"
106 : [ret] "+&a" (ret), [s] "+&a" (s)
107 : [c] "d" (c)
108 : "cc", "memory", "0");
109 return (void *) ret;
110}
111#endif
112
113#ifdef __HAVE_ARCH_MEMSCAN
114static inline void *memscan(void *s, int c, size_t n)
115{
116 const void *ret = s + n;
117
118 asm volatile(
119 " lgr 0,%[c]\n"
120 "0: srst %[ret],%[s]\n"
121 " jo 0b\n"
122 : [ret] "+&a" (ret), [s] "+&a" (s)
123 : [c] "d" (c)
124 : "cc", "memory", "0");
125 return (void *) ret;
126}
127#endif
128
129#ifdef __HAVE_ARCH_STRCAT
130static inline char *strcat(char *dst, const char *src)
131{
132 unsigned long dummy = 0;
133 char *ret = dst;
134
135 asm volatile(
136 " lghi 0,0\n"
137 "0: srst %[dummy],%[dst]\n"
138 " jo 0b\n"
139 "1: mvst %[dummy],%[src]\n"
140 " jo 1b"
141 : [dummy] "+&a" (dummy), [dst] "+&a" (dst), [src] "+&a" (src)
142 :
143 : "cc", "memory", "0");
144 return ret;
145}
146#endif
147
148#ifdef __HAVE_ARCH_STRCPY
149static inline char *strcpy(char *dst, const char *src)
150{
151 char *ret = dst;
152
153 asm volatile(
154 " lghi 0,0\n"
155 "0: mvst %[dst],%[src]\n"
156 " jo 0b"
157 : [dst] "+&a" (dst), [src] "+&a" (src)
158 :
159 : "cc", "memory", "0");
160 return ret;
161}
162#endif
163
164#if defined(__HAVE_ARCH_STRLEN) || (defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__))
165static inline size_t __no_sanitize_prefix_strfunc(strlen)(const char *s)
166{
167 unsigned long end = 0;
168 const char *tmp = s;
169
170 asm volatile(
171 " lghi 0,0\n"
172 "0: srst %[end],%[tmp]\n"
173 " jo 0b"
174 : [end] "+&a" (end), [tmp] "+&a" (tmp)
175 :
176 : "cc", "memory", "0");
177 return end - (unsigned long)s;
178}
179#endif
180
181#ifdef __HAVE_ARCH_STRNLEN
182static inline size_t strnlen(const char * s, size_t n)
183{
184 const char *tmp = s;
185 const char *end = s + n;
186
187 asm volatile(
188 " lghi 0,0\n"
189 "0: srst %[end],%[tmp]\n"
190 " jo 0b"
191 : [end] "+&a" (end), [tmp] "+&a" (tmp)
192 :
193 : "cc", "memory", "0");
194 return end - s;
195}
196#endif
197#else /* IN_ARCH_STRING_C */
198void *memchr(const void * s, int c, size_t n);
199void *memscan(void *s, int c, size_t n);
200char *strcat(char *dst, const char *src);
201char *strcpy(char *dst, const char *src);
202size_t strlen(const char *s);
203size_t strnlen(const char * s, size_t n);
204#endif /* !IN_ARCH_STRING_C */
205
206#endif /* __S390_STRING_H_ */