Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * S390 version
4 * Copyright IBM Corp. 1999
5 * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com),
6 */
7
8#ifndef _S390_STRING_H_
9#define _S390_STRING_H_
10
11#ifndef _LINUX_TYPES_H
12#include <linux/types.h>
13#endif
14
15#define __HAVE_ARCH_MEMCPY /* gcc builtin & arch function */
16#define __HAVE_ARCH_MEMMOVE /* gcc builtin & arch function */
17#define __HAVE_ARCH_MEMSET /* gcc builtin & arch function */
18#define __HAVE_ARCH_MEMSET16 /* arch function */
19#define __HAVE_ARCH_MEMSET32 /* arch function */
20#define __HAVE_ARCH_MEMSET64 /* arch function */
21
22void *memcpy(void *dest, const void *src, size_t n);
23void *memset(void *s, int c, size_t n);
24void *memmove(void *dest, const void *src, size_t n);
25
26#ifndef CONFIG_KASAN
27#define __HAVE_ARCH_MEMCHR /* inline & arch function */
28#define __HAVE_ARCH_MEMCMP /* arch function */
29#define __HAVE_ARCH_MEMSCAN /* inline & arch function */
30#define __HAVE_ARCH_STRCAT /* inline & arch function */
31#define __HAVE_ARCH_STRCMP /* arch function */
32#define __HAVE_ARCH_STRCPY /* inline & arch function */
33#define __HAVE_ARCH_STRLCAT /* arch function */
34#define __HAVE_ARCH_STRLEN /* inline & arch function */
35#define __HAVE_ARCH_STRNCAT /* arch function */
36#define __HAVE_ARCH_STRNCPY /* arch function */
37#define __HAVE_ARCH_STRNLEN /* inline & arch function */
38#define __HAVE_ARCH_STRSTR /* arch function */
39
40/* Prototypes for non-inlined arch strings functions. */
41int memcmp(const void *s1, const void *s2, size_t n);
42int strcmp(const char *s1, const char *s2);
43size_t strlcat(char *dest, const char *src, size_t n);
44char *strncat(char *dest, const char *src, size_t n);
45char *strncpy(char *dest, const char *src, size_t n);
46char *strstr(const char *s1, const char *s2);
47#endif /* !CONFIG_KASAN */
48
49#undef __HAVE_ARCH_STRCHR
50#undef __HAVE_ARCH_STRNCHR
51#undef __HAVE_ARCH_STRNCMP
52#undef __HAVE_ARCH_STRPBRK
53#undef __HAVE_ARCH_STRSEP
54#undef __HAVE_ARCH_STRSPN
55
56#if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__)
57
58extern void *__memcpy(void *dest, const void *src, size_t n);
59extern void *__memset(void *s, int c, size_t n);
60extern void *__memmove(void *dest, const void *src, size_t n);
61
62/*
63 * For files that are not instrumented (e.g. mm/slub.c) we
64 * should use not instrumented version of mem* functions.
65 */
66
67#define memcpy(dst, src, len) __memcpy(dst, src, len)
68#define memmove(dst, src, len) __memmove(dst, src, len)
69#define memset(s, c, n) __memset(s, c, n)
70#define strlen(s) __strlen(s)
71
72#define __no_sanitize_prefix_strfunc(x) __##x
73
74#ifndef __NO_FORTIFY
75#define __NO_FORTIFY /* FORTIFY_SOURCE uses __builtin_memcpy, etc. */
76#endif
77
78#else
79#define __no_sanitize_prefix_strfunc(x) x
80#endif /* defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__) */
81
82void *__memset16(uint16_t *s, uint16_t v, size_t count);
83void *__memset32(uint32_t *s, uint32_t v, size_t count);
84void *__memset64(uint64_t *s, uint64_t v, size_t count);
85
86static inline void *memset16(uint16_t *s, uint16_t v, size_t count)
87{
88 return __memset16(s, v, count * sizeof(v));
89}
90
91static inline void *memset32(uint32_t *s, uint32_t v, size_t count)
92{
93 return __memset32(s, v, count * sizeof(v));
94}
95
96static inline void *memset64(uint64_t *s, uint64_t v, size_t count)
97{
98 return __memset64(s, v, count * sizeof(v));
99}
100
101#if !defined(IN_ARCH_STRING_C) && (!defined(CONFIG_FORTIFY_SOURCE) || defined(__NO_FORTIFY))
102
103#ifdef __HAVE_ARCH_MEMCHR
104static inline void *memchr(const void * s, int c, size_t n)
105{
106 const void *ret = s + n;
107
108 asm volatile(
109 " lgr 0,%[c]\n"
110 "0: srst %[ret],%[s]\n"
111 " jo 0b\n"
112 " jl 1f\n"
113 " la %[ret],0\n"
114 "1:"
115 : [ret] "+&a" (ret), [s] "+&a" (s)
116 : [c] "d" (c)
117 : "cc", "memory", "0");
118 return (void *) ret;
119}
120#endif
121
122#ifdef __HAVE_ARCH_MEMSCAN
123static inline void *memscan(void *s, int c, size_t n)
124{
125 const void *ret = s + n;
126
127 asm volatile(
128 " lgr 0,%[c]\n"
129 "0: srst %[ret],%[s]\n"
130 " jo 0b\n"
131 : [ret] "+&a" (ret), [s] "+&a" (s)
132 : [c] "d" (c)
133 : "cc", "memory", "0");
134 return (void *) ret;
135}
136#endif
137
138#ifdef __HAVE_ARCH_STRCAT
139static inline char *strcat(char *dst, const char *src)
140{
141 unsigned long dummy = 0;
142 char *ret = dst;
143
144 asm volatile(
145 " lghi 0,0\n"
146 "0: srst %[dummy],%[dst]\n"
147 " jo 0b\n"
148 "1: mvst %[dummy],%[src]\n"
149 " jo 1b"
150 : [dummy] "+&a" (dummy), [dst] "+&a" (dst), [src] "+&a" (src)
151 :
152 : "cc", "memory", "0");
153 return ret;
154}
155#endif
156
157#ifdef __HAVE_ARCH_STRCPY
158static inline char *strcpy(char *dst, const char *src)
159{
160 char *ret = dst;
161
162 asm volatile(
163 " lghi 0,0\n"
164 "0: mvst %[dst],%[src]\n"
165 " jo 0b"
166 : [dst] "+&a" (dst), [src] "+&a" (src)
167 :
168 : "cc", "memory", "0");
169 return ret;
170}
171#endif
172
173#if defined(__HAVE_ARCH_STRLEN) || (defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__))
174static inline size_t __no_sanitize_prefix_strfunc(strlen)(const char *s)
175{
176 unsigned long end = 0;
177 const char *tmp = s;
178
179 asm volatile(
180 " lghi 0,0\n"
181 "0: srst %[end],%[tmp]\n"
182 " jo 0b"
183 : [end] "+&a" (end), [tmp] "+&a" (tmp)
184 :
185 : "cc", "memory", "0");
186 return end - (unsigned long)s;
187}
188#endif
189
190#ifdef __HAVE_ARCH_STRNLEN
191static inline size_t strnlen(const char * s, size_t n)
192{
193 const char *tmp = s;
194 const char *end = s + n;
195
196 asm volatile(
197 " lghi 0,0\n"
198 "0: srst %[end],%[tmp]\n"
199 " jo 0b"
200 : [end] "+&a" (end), [tmp] "+&a" (tmp)
201 :
202 : "cc", "memory", "0");
203 return end - s;
204}
205#endif
206#else /* IN_ARCH_STRING_C */
207void *memchr(const void * s, int c, size_t n);
208void *memscan(void *s, int c, size_t n);
209char *strcat(char *dst, const char *src);
210char *strcpy(char *dst, const char *src);
211size_t strlen(const char *s);
212size_t strnlen(const char * s, size_t n);
213#endif /* !IN_ARCH_STRING_C */
214
215#endif /* __S390_STRING_H_ */
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * S390 version
4 * Copyright IBM Corp. 1999
5 * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com),
6 */
7
8#ifndef _S390_STRING_H_
9#define _S390_STRING_H_
10
11#ifndef _LINUX_TYPES_H
12#include <linux/types.h>
13#endif
14
15#define __HAVE_ARCH_MEMCHR /* inline & arch function */
16#define __HAVE_ARCH_MEMCMP /* arch function */
17#define __HAVE_ARCH_MEMCPY /* gcc builtin & arch function */
18#define __HAVE_ARCH_MEMMOVE /* gcc builtin & arch function */
19#define __HAVE_ARCH_MEMSCAN /* inline & arch function */
20#define __HAVE_ARCH_MEMSET /* gcc builtin & arch function */
21#define __HAVE_ARCH_MEMSET16 /* arch function */
22#define __HAVE_ARCH_MEMSET32 /* arch function */
23#define __HAVE_ARCH_MEMSET64 /* arch function */
24#define __HAVE_ARCH_STRCAT /* inline & arch function */
25#define __HAVE_ARCH_STRCMP /* arch function */
26#define __HAVE_ARCH_STRCPY /* inline & arch function */
27#define __HAVE_ARCH_STRLCAT /* arch function */
28#define __HAVE_ARCH_STRLCPY /* arch function */
29#define __HAVE_ARCH_STRLEN /* inline & arch function */
30#define __HAVE_ARCH_STRNCAT /* arch function */
31#define __HAVE_ARCH_STRNCPY /* arch function */
32#define __HAVE_ARCH_STRNLEN /* inline & arch function */
33#define __HAVE_ARCH_STRRCHR /* arch function */
34#define __HAVE_ARCH_STRSTR /* arch function */
35
36/* Prototypes for non-inlined arch strings functions. */
37int memcmp(const void *s1, const void *s2, size_t n);
38void *memcpy(void *dest, const void *src, size_t n);
39void *memset(void *s, int c, size_t n);
40void *memmove(void *dest, const void *src, size_t n);
41int strcmp(const char *s1, const char *s2);
42size_t strlcat(char *dest, const char *src, size_t n);
43size_t strlcpy(char *dest, const char *src, size_t size);
44char *strncat(char *dest, const char *src, size_t n);
45char *strncpy(char *dest, const char *src, size_t n);
46char *strrchr(const char *s, int c);
47char *strstr(const char *s1, const char *s2);
48
49#undef __HAVE_ARCH_STRCHR
50#undef __HAVE_ARCH_STRNCHR
51#undef __HAVE_ARCH_STRNCMP
52#undef __HAVE_ARCH_STRPBRK
53#undef __HAVE_ARCH_STRSEP
54#undef __HAVE_ARCH_STRSPN
55
56void *__memset16(uint16_t *s, uint16_t v, size_t count);
57void *__memset32(uint32_t *s, uint32_t v, size_t count);
58void *__memset64(uint64_t *s, uint64_t v, size_t count);
59
60static inline void *memset16(uint16_t *s, uint16_t v, size_t count)
61{
62 return __memset16(s, v, count * sizeof(v));
63}
64
65static inline void *memset32(uint32_t *s, uint32_t v, size_t count)
66{
67 return __memset32(s, v, count * sizeof(v));
68}
69
70static inline void *memset64(uint64_t *s, uint64_t v, size_t count)
71{
72 return __memset64(s, v, count * sizeof(v));
73}
74
75#if !defined(IN_ARCH_STRING_C) && (!defined(CONFIG_FORTIFY_SOURCE) || defined(__NO_FORTIFY))
76
77static inline void *memchr(const void * s, int c, size_t n)
78{
79 register int r0 asm("0") = (char) c;
80 const void *ret = s + n;
81
82 asm volatile(
83 "0: srst %0,%1\n"
84 " jo 0b\n"
85 " jl 1f\n"
86 " la %0,0\n"
87 "1:"
88 : "+a" (ret), "+&a" (s) : "d" (r0) : "cc", "memory");
89 return (void *) ret;
90}
91
92static inline void *memscan(void *s, int c, size_t n)
93{
94 register int r0 asm("0") = (char) c;
95 const void *ret = s + n;
96
97 asm volatile(
98 "0: srst %0,%1\n"
99 " jo 0b\n"
100 : "+a" (ret), "+&a" (s) : "d" (r0) : "cc", "memory");
101 return (void *) ret;
102}
103
104static inline char *strcat(char *dst, const char *src)
105{
106 register int r0 asm("0") = 0;
107 unsigned long dummy;
108 char *ret = dst;
109
110 asm volatile(
111 "0: srst %0,%1\n"
112 " jo 0b\n"
113 "1: mvst %0,%2\n"
114 " jo 1b"
115 : "=&a" (dummy), "+a" (dst), "+a" (src)
116 : "d" (r0), "0" (0) : "cc", "memory" );
117 return ret;
118}
119
120static inline char *strcpy(char *dst, const char *src)
121{
122 register int r0 asm("0") = 0;
123 char *ret = dst;
124
125 asm volatile(
126 "0: mvst %0,%1\n"
127 " jo 0b"
128 : "+&a" (dst), "+&a" (src) : "d" (r0)
129 : "cc", "memory");
130 return ret;
131}
132
133static inline size_t strlen(const char *s)
134{
135 register unsigned long r0 asm("0") = 0;
136 const char *tmp = s;
137
138 asm volatile(
139 "0: srst %0,%1\n"
140 " jo 0b"
141 : "+d" (r0), "+a" (tmp) : : "cc", "memory");
142 return r0 - (unsigned long) s;
143}
144
145static inline size_t strnlen(const char * s, size_t n)
146{
147 register int r0 asm("0") = 0;
148 const char *tmp = s;
149 const char *end = s + n;
150
151 asm volatile(
152 "0: srst %0,%1\n"
153 " jo 0b"
154 : "+a" (end), "+a" (tmp) : "d" (r0) : "cc", "memory");
155 return end - s;
156}
157#else /* IN_ARCH_STRING_C */
158void *memchr(const void * s, int c, size_t n);
159void *memscan(void *s, int c, size_t n);
160char *strcat(char *dst, const char *src);
161char *strcpy(char *dst, const char *src);
162size_t strlen(const char *s);
163size_t strnlen(const char * s, size_t n);
164#endif /* !IN_ARCH_STRING_C */
165
166#endif /* __S390_STRING_H_ */