Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * S390 version
4 * Copyright IBM Corp. 1999
5 * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com),
6 */
7
8#ifndef _S390_STRING_H_
9#define _S390_STRING_H_
10
11#ifndef _LINUX_TYPES_H
12#include <linux/types.h>
13#endif
14
15#define __HAVE_ARCH_MEMCPY /* gcc builtin & arch function */
16#define __HAVE_ARCH_MEMMOVE /* gcc builtin & arch function */
17#define __HAVE_ARCH_MEMSET /* gcc builtin & arch function */
18
19void *memcpy(void *dest, const void *src, size_t n);
20void *memset(void *s, int c, size_t n);
21void *memmove(void *dest, const void *src, size_t n);
22
23#if !defined(CONFIG_KASAN) && !defined(CONFIG_KMSAN)
24#define __HAVE_ARCH_MEMCHR /* inline & arch function */
25#define __HAVE_ARCH_MEMCMP /* arch function */
26#define __HAVE_ARCH_MEMSCAN /* inline & arch function */
27#define __HAVE_ARCH_STRCAT /* inline & arch function */
28#define __HAVE_ARCH_STRCMP /* arch function */
29#define __HAVE_ARCH_STRCPY /* inline & arch function */
30#define __HAVE_ARCH_STRLCAT /* arch function */
31#define __HAVE_ARCH_STRLEN /* inline & arch function */
32#define __HAVE_ARCH_STRNCAT /* arch function */
33#define __HAVE_ARCH_STRNCPY /* arch function */
34#define __HAVE_ARCH_STRNLEN /* inline & arch function */
35#define __HAVE_ARCH_STRSTR /* arch function */
36#define __HAVE_ARCH_MEMSET16 /* arch function */
37#define __HAVE_ARCH_MEMSET32 /* arch function */
38#define __HAVE_ARCH_MEMSET64 /* arch function */
39
40/* Prototypes for non-inlined arch strings functions. */
41int memcmp(const void *s1, const void *s2, size_t n);
42int strcmp(const char *s1, const char *s2);
43size_t strlcat(char *dest, const char *src, size_t n);
44char *strncat(char *dest, const char *src, size_t n);
45char *strncpy(char *dest, const char *src, size_t n);
46char *strstr(const char *s1, const char *s2);
47#endif /* !defined(CONFIG_KASAN) && !defined(CONFIG_KMSAN) */
48
49#undef __HAVE_ARCH_STRCHR
50#undef __HAVE_ARCH_STRNCHR
51#undef __HAVE_ARCH_STRNCMP
52#undef __HAVE_ARCH_STRPBRK
53#undef __HAVE_ARCH_STRSEP
54#undef __HAVE_ARCH_STRSPN
55
56#if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__)
57
58#define strlen(s) __strlen(s)
59
60#define __no_sanitize_prefix_strfunc(x) __##x
61
62#ifndef __NO_FORTIFY
63#define __NO_FORTIFY /* FORTIFY_SOURCE uses __builtin_memcpy, etc. */
64#endif
65
66#else
67#define __no_sanitize_prefix_strfunc(x) x
68#endif /* defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__) */
69
70void *__memcpy(void *dest, const void *src, size_t n);
71void *__memset(void *s, int c, size_t n);
72void *__memmove(void *dest, const void *src, size_t n);
73void *__memset16(uint16_t *s, uint16_t v, size_t count);
74void *__memset32(uint32_t *s, uint32_t v, size_t count);
75void *__memset64(uint64_t *s, uint64_t v, size_t count);
76
77#ifdef __HAVE_ARCH_MEMSET16
78static inline void *memset16(uint16_t *s, uint16_t v, size_t count)
79{
80 return __memset16(s, v, count * sizeof(v));
81}
82#endif
83
84#ifdef __HAVE_ARCH_MEMSET32
85static inline void *memset32(uint32_t *s, uint32_t v, size_t count)
86{
87 return __memset32(s, v, count * sizeof(v));
88}
89#endif
90
91#ifdef __HAVE_ARCH_MEMSET64
92#ifdef IN_BOOT_STRING_C
93void *memset64(uint64_t *s, uint64_t v, size_t count);
94#else
95static inline void *memset64(uint64_t *s, uint64_t v, size_t count)
96{
97 return __memset64(s, v, count * sizeof(v));
98}
99#endif
100#endif
101
102#if !defined(IN_ARCH_STRING_C) && (!defined(CONFIG_FORTIFY_SOURCE) || defined(__NO_FORTIFY))
103
104#ifdef __HAVE_ARCH_MEMCHR
105static inline void *memchr(const void * s, int c, size_t n)
106{
107 const void *ret = s + n;
108
109 asm volatile(
110 " lgr 0,%[c]\n"
111 "0: srst %[ret],%[s]\n"
112 " jo 0b\n"
113 " jl 1f\n"
114 " la %[ret],0\n"
115 "1:"
116 : [ret] "+&a" (ret), [s] "+&a" (s)
117 : [c] "d" (c)
118 : "cc", "memory", "0");
119 return (void *) ret;
120}
121#endif
122
123#ifdef __HAVE_ARCH_MEMSCAN
124static inline void *memscan(void *s, int c, size_t n)
125{
126 const void *ret = s + n;
127
128 asm volatile(
129 " lgr 0,%[c]\n"
130 "0: srst %[ret],%[s]\n"
131 " jo 0b\n"
132 : [ret] "+&a" (ret), [s] "+&a" (s)
133 : [c] "d" (c)
134 : "cc", "memory", "0");
135 return (void *) ret;
136}
137#endif
138
139#ifdef __HAVE_ARCH_STRCAT
140static inline char *strcat(char *dst, const char *src)
141{
142 unsigned long dummy = 0;
143 char *ret = dst;
144
145 asm volatile(
146 " lghi 0,0\n"
147 "0: srst %[dummy],%[dst]\n"
148 " jo 0b\n"
149 "1: mvst %[dummy],%[src]\n"
150 " jo 1b"
151 : [dummy] "+&a" (dummy), [dst] "+&a" (dst), [src] "+&a" (src)
152 :
153 : "cc", "memory", "0");
154 return ret;
155}
156#endif
157
158#ifdef __HAVE_ARCH_STRCPY
159static inline char *strcpy(char *dst, const char *src)
160{
161 char *ret = dst;
162
163 asm volatile(
164 " lghi 0,0\n"
165 "0: mvst %[dst],%[src]\n"
166 " jo 0b"
167 : [dst] "+&a" (dst), [src] "+&a" (src)
168 :
169 : "cc", "memory", "0");
170 return ret;
171}
172#endif
173
174#if defined(__HAVE_ARCH_STRLEN) || (defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__))
175static inline size_t __no_sanitize_prefix_strfunc(strlen)(const char *s)
176{
177 unsigned long end = 0;
178 const char *tmp = s;
179
180 asm volatile(
181 " lghi 0,0\n"
182 "0: srst %[end],%[tmp]\n"
183 " jo 0b"
184 : [end] "+&a" (end), [tmp] "+&a" (tmp)
185 :
186 : "cc", "memory", "0");
187 return end - (unsigned long)s;
188}
189#endif
190
191#ifdef __HAVE_ARCH_STRNLEN
192static inline size_t strnlen(const char * s, size_t n)
193{
194 const char *tmp = s;
195 const char *end = s + n;
196
197 asm volatile(
198 " lghi 0,0\n"
199 "0: srst %[end],%[tmp]\n"
200 " jo 0b"
201 : [end] "+&a" (end), [tmp] "+&a" (tmp)
202 :
203 : "cc", "memory", "0");
204 return end - s;
205}
206#endif
207#else /* IN_ARCH_STRING_C */
208void *memchr(const void * s, int c, size_t n);
209void *memscan(void *s, int c, size_t n);
210char *strcat(char *dst, const char *src);
211char *strcpy(char *dst, const char *src);
212size_t strlen(const char *s);
213size_t strnlen(const char * s, size_t n);
214#endif /* !IN_ARCH_STRING_C */
215
216#endif /* __S390_STRING_H_ */