Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef _LINUX_FORTIFY_STRING_H_
3#define _LINUX_FORTIFY_STRING_H_
4
5
6#if defined(CONFIG_KASAN_GENERIC) || defined(CONFIG_KASAN_SW_TAGS)
7extern void *__underlying_memchr(const void *p, int c, __kernel_size_t size) __RENAME(memchr);
8extern int __underlying_memcmp(const void *p, const void *q, __kernel_size_t size) __RENAME(memcmp);
9extern void *__underlying_memcpy(void *p, const void *q, __kernel_size_t size) __RENAME(memcpy);
10extern void *__underlying_memmove(void *p, const void *q, __kernel_size_t size) __RENAME(memmove);
11extern void *__underlying_memset(void *p, int c, __kernel_size_t size) __RENAME(memset);
12extern char *__underlying_strcat(char *p, const char *q) __RENAME(strcat);
13extern char *__underlying_strcpy(char *p, const char *q) __RENAME(strcpy);
14extern __kernel_size_t __underlying_strlen(const char *p) __RENAME(strlen);
15extern char *__underlying_strncat(char *p, const char *q, __kernel_size_t count) __RENAME(strncat);
16extern char *__underlying_strncpy(char *p, const char *q, __kernel_size_t size) __RENAME(strncpy);
17#else
18#define __underlying_memchr __builtin_memchr
19#define __underlying_memcmp __builtin_memcmp
20#define __underlying_memcpy __builtin_memcpy
21#define __underlying_memmove __builtin_memmove
22#define __underlying_memset __builtin_memset
23#define __underlying_strcat __builtin_strcat
24#define __underlying_strcpy __builtin_strcpy
25#define __underlying_strlen __builtin_strlen
26#define __underlying_strncat __builtin_strncat
27#define __underlying_strncpy __builtin_strncpy
28#endif
29
30__FORTIFY_INLINE char *strncpy(char *p, const char *q, __kernel_size_t size)
31{
32 size_t p_size = __builtin_object_size(p, 1);
33
34 if (__builtin_constant_p(size) && p_size < size)
35 __write_overflow();
36 if (p_size < size)
37 fortify_panic(__func__);
38 return __underlying_strncpy(p, q, size);
39}
40
41__FORTIFY_INLINE char *strcat(char *p, const char *q)
42{
43 size_t p_size = __builtin_object_size(p, 1);
44
45 if (p_size == (size_t)-1)
46 return __underlying_strcat(p, q);
47 if (strlcat(p, q, p_size) >= p_size)
48 fortify_panic(__func__);
49 return p;
50}
51
52__FORTIFY_INLINE __kernel_size_t strlen(const char *p)
53{
54 __kernel_size_t ret;
55 size_t p_size = __builtin_object_size(p, 1);
56
57 /* Work around gcc excess stack consumption issue */
58 if (p_size == (size_t)-1 ||
59 (__builtin_constant_p(p[p_size - 1]) && p[p_size - 1] == '\0'))
60 return __underlying_strlen(p);
61 ret = strnlen(p, p_size);
62 if (p_size <= ret)
63 fortify_panic(__func__);
64 return ret;
65}
66
67extern __kernel_size_t __real_strnlen(const char *, __kernel_size_t) __RENAME(strnlen);
68__FORTIFY_INLINE __kernel_size_t strnlen(const char *p, __kernel_size_t maxlen)
69{
70 size_t p_size = __builtin_object_size(p, 1);
71 __kernel_size_t ret = __real_strnlen(p, maxlen < p_size ? maxlen : p_size);
72
73 if (p_size <= ret && maxlen != ret)
74 fortify_panic(__func__);
75 return ret;
76}
77
78/* defined after fortified strlen to reuse it */
79extern size_t __real_strlcpy(char *, const char *, size_t) __RENAME(strlcpy);
80__FORTIFY_INLINE size_t strlcpy(char *p, const char *q, size_t size)
81{
82 size_t ret;
83 size_t p_size = __builtin_object_size(p, 1);
84 size_t q_size = __builtin_object_size(q, 1);
85
86 if (p_size == (size_t)-1 && q_size == (size_t)-1)
87 return __real_strlcpy(p, q, size);
88 ret = strlen(q);
89 if (size) {
90 size_t len = (ret >= size) ? size - 1 : ret;
91
92 if (__builtin_constant_p(len) && len >= p_size)
93 __write_overflow();
94 if (len >= p_size)
95 fortify_panic(__func__);
96 __underlying_memcpy(p, q, len);
97 p[len] = '\0';
98 }
99 return ret;
100}
101
102/* defined after fortified strnlen to reuse it */
103extern ssize_t __real_strscpy(char *, const char *, size_t) __RENAME(strscpy);
104__FORTIFY_INLINE ssize_t strscpy(char *p, const char *q, size_t size)
105{
106 size_t len;
107 /* Use string size rather than possible enclosing struct size. */
108 size_t p_size = __builtin_object_size(p, 1);
109 size_t q_size = __builtin_object_size(q, 1);
110
111 /* If we cannot get size of p and q default to call strscpy. */
112 if (p_size == (size_t) -1 && q_size == (size_t) -1)
113 return __real_strscpy(p, q, size);
114
115 /*
116 * If size can be known at compile time and is greater than
117 * p_size, generate a compile time write overflow error.
118 */
119 if (__builtin_constant_p(size) && size > p_size)
120 __write_overflow();
121
122 /*
123 * This call protects from read overflow, because len will default to q
124 * length if it smaller than size.
125 */
126 len = strnlen(q, size);
127 /*
128 * If len equals size, we will copy only size bytes which leads to
129 * -E2BIG being returned.
130 * Otherwise we will copy len + 1 because of the final '\O'.
131 */
132 len = len == size ? size : len + 1;
133
134 /*
135 * Generate a runtime write overflow error if len is greater than
136 * p_size.
137 */
138 if (len > p_size)
139 fortify_panic(__func__);
140
141 /*
142 * We can now safely call vanilla strscpy because we are protected from:
143 * 1. Read overflow thanks to call to strnlen().
144 * 2. Write overflow thanks to above ifs.
145 */
146 return __real_strscpy(p, q, len);
147}
148
149/* defined after fortified strlen and strnlen to reuse them */
150__FORTIFY_INLINE char *strncat(char *p, const char *q, __kernel_size_t count)
151{
152 size_t p_len, copy_len;
153 size_t p_size = __builtin_object_size(p, 1);
154 size_t q_size = __builtin_object_size(q, 1);
155
156 if (p_size == (size_t)-1 && q_size == (size_t)-1)
157 return __underlying_strncat(p, q, count);
158 p_len = strlen(p);
159 copy_len = strnlen(q, count);
160 if (p_size < p_len + copy_len + 1)
161 fortify_panic(__func__);
162 __underlying_memcpy(p + p_len, q, copy_len);
163 p[p_len + copy_len] = '\0';
164 return p;
165}
166
167__FORTIFY_INLINE void *memset(void *p, int c, __kernel_size_t size)
168{
169 size_t p_size = __builtin_object_size(p, 0);
170
171 if (__builtin_constant_p(size) && p_size < size)
172 __write_overflow();
173 if (p_size < size)
174 fortify_panic(__func__);
175 return __underlying_memset(p, c, size);
176}
177
178__FORTIFY_INLINE void *memcpy(void *p, const void *q, __kernel_size_t size)
179{
180 size_t p_size = __builtin_object_size(p, 0);
181 size_t q_size = __builtin_object_size(q, 0);
182
183 if (__builtin_constant_p(size)) {
184 if (p_size < size)
185 __write_overflow();
186 if (q_size < size)
187 __read_overflow2();
188 }
189 if (p_size < size || q_size < size)
190 fortify_panic(__func__);
191 return __underlying_memcpy(p, q, size);
192}
193
194__FORTIFY_INLINE void *memmove(void *p, const void *q, __kernel_size_t size)
195{
196 size_t p_size = __builtin_object_size(p, 0);
197 size_t q_size = __builtin_object_size(q, 0);
198
199 if (__builtin_constant_p(size)) {
200 if (p_size < size)
201 __write_overflow();
202 if (q_size < size)
203 __read_overflow2();
204 }
205 if (p_size < size || q_size < size)
206 fortify_panic(__func__);
207 return __underlying_memmove(p, q, size);
208}
209
210extern void *__real_memscan(void *, int, __kernel_size_t) __RENAME(memscan);
211__FORTIFY_INLINE void *memscan(void *p, int c, __kernel_size_t size)
212{
213 size_t p_size = __builtin_object_size(p, 0);
214
215 if (__builtin_constant_p(size) && p_size < size)
216 __read_overflow();
217 if (p_size < size)
218 fortify_panic(__func__);
219 return __real_memscan(p, c, size);
220}
221
222__FORTIFY_INLINE int memcmp(const void *p, const void *q, __kernel_size_t size)
223{
224 size_t p_size = __builtin_object_size(p, 0);
225 size_t q_size = __builtin_object_size(q, 0);
226
227 if (__builtin_constant_p(size)) {
228 if (p_size < size)
229 __read_overflow();
230 if (q_size < size)
231 __read_overflow2();
232 }
233 if (p_size < size || q_size < size)
234 fortify_panic(__func__);
235 return __underlying_memcmp(p, q, size);
236}
237
238__FORTIFY_INLINE void *memchr(const void *p, int c, __kernel_size_t size)
239{
240 size_t p_size = __builtin_object_size(p, 0);
241
242 if (__builtin_constant_p(size) && p_size < size)
243 __read_overflow();
244 if (p_size < size)
245 fortify_panic(__func__);
246 return __underlying_memchr(p, c, size);
247}
248
249void *__real_memchr_inv(const void *s, int c, size_t n) __RENAME(memchr_inv);
250__FORTIFY_INLINE void *memchr_inv(const void *p, int c, size_t size)
251{
252 size_t p_size = __builtin_object_size(p, 0);
253
254 if (__builtin_constant_p(size) && p_size < size)
255 __read_overflow();
256 if (p_size < size)
257 fortify_panic(__func__);
258 return __real_memchr_inv(p, c, size);
259}
260
261extern void *__real_kmemdup(const void *src, size_t len, gfp_t gfp) __RENAME(kmemdup);
262__FORTIFY_INLINE void *kmemdup(const void *p, size_t size, gfp_t gfp)
263{
264 size_t p_size = __builtin_object_size(p, 0);
265
266 if (__builtin_constant_p(size) && p_size < size)
267 __read_overflow();
268 if (p_size < size)
269 fortify_panic(__func__);
270 return __real_kmemdup(p, size, gfp);
271}
272
273/* defined after fortified strlen and memcpy to reuse them */
274__FORTIFY_INLINE char *strcpy(char *p, const char *q)
275{
276 size_t p_size = __builtin_object_size(p, 1);
277 size_t q_size = __builtin_object_size(q, 1);
278 size_t size;
279
280 if (p_size == (size_t)-1 && q_size == (size_t)-1)
281 return __underlying_strcpy(p, q);
282 size = strlen(q) + 1;
283 /* test here to use the more stringent object size */
284 if (p_size < size)
285 fortify_panic(__func__);
286 memcpy(p, q, size);
287 return p;
288}
289
290/* Don't use these outside the FORITFY_SOURCE implementation */
291#undef __underlying_memchr
292#undef __underlying_memcmp
293#undef __underlying_memcpy
294#undef __underlying_memmove
295#undef __underlying_memset
296#undef __underlying_strcat
297#undef __underlying_strcpy
298#undef __underlying_strlen
299#undef __underlying_strncat
300#undef __underlying_strncpy
301
302#endif /* _LINUX_FORTIFY_STRING_H_ */
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef _LINUX_FORTIFY_STRING_H_
3#define _LINUX_FORTIFY_STRING_H_
4
5#include <linux/bitfield.h>
6#include <linux/bug.h>
7#include <linux/const.h>
8#include <linux/limits.h>
9
10#define __FORTIFY_INLINE extern __always_inline __gnu_inline __overloadable
11#define __RENAME(x) __asm__(#x)
12
13#define FORTIFY_REASON_DIR(r) FIELD_GET(BIT(0), r)
14#define FORTIFY_REASON_FUNC(r) FIELD_GET(GENMASK(7, 1), r)
15#define FORTIFY_REASON(func, write) (FIELD_PREP(BIT(0), write) | \
16 FIELD_PREP(GENMASK(7, 1), func))
17
18/* Overridden by KUnit tests. */
19#ifndef fortify_panic
20# define fortify_panic(func, write, avail, size, retfail) \
21 __fortify_panic(FORTIFY_REASON(func, write), avail, size)
22#endif
23#ifndef fortify_warn_once
24# define fortify_warn_once(x...) WARN_ONCE(x)
25#endif
26
27#define FORTIFY_READ 0
28#define FORTIFY_WRITE 1
29
30#define EACH_FORTIFY_FUNC(macro) \
31 macro(strncpy), \
32 macro(strnlen), \
33 macro(strlen), \
34 macro(strscpy), \
35 macro(strlcat), \
36 macro(strcat), \
37 macro(strncat), \
38 macro(memset), \
39 macro(memcpy), \
40 macro(memmove), \
41 macro(memscan), \
42 macro(memcmp), \
43 macro(memchr), \
44 macro(memchr_inv), \
45 macro(kmemdup), \
46 macro(strcpy), \
47 macro(UNKNOWN),
48
49#define MAKE_FORTIFY_FUNC(func) FORTIFY_FUNC_##func
50
51enum fortify_func {
52 EACH_FORTIFY_FUNC(MAKE_FORTIFY_FUNC)
53};
54
55void __fortify_report(const u8 reason, const size_t avail, const size_t size);
56void __fortify_panic(const u8 reason, const size_t avail, const size_t size) __cold __noreturn;
57void __read_overflow(void) __compiletime_error("detected read beyond size of object (1st parameter)");
58void __read_overflow2(void) __compiletime_error("detected read beyond size of object (2nd parameter)");
59void __read_overflow2_field(size_t avail, size_t wanted) __compiletime_warning("detected read beyond size of field (2nd parameter); maybe use struct_group()?");
60void __write_overflow(void) __compiletime_error("detected write beyond size of object (1st parameter)");
61void __write_overflow_field(size_t avail, size_t wanted) __compiletime_warning("detected write beyond size of field (1st parameter); maybe use struct_group()?");
62
63#define __compiletime_strlen(p) \
64({ \
65 char *__p = (char *)(p); \
66 size_t __ret = SIZE_MAX; \
67 const size_t __p_size = __member_size(p); \
68 if (__p_size != SIZE_MAX && \
69 __builtin_constant_p(*__p)) { \
70 size_t __p_len = __p_size - 1; \
71 if (__builtin_constant_p(__p[__p_len]) && \
72 __p[__p_len] == '\0') \
73 __ret = __builtin_strlen(__p); \
74 } \
75 __ret; \
76})
77
78#if defined(__SANITIZE_ADDRESS__)
79
80#if !defined(CONFIG_CC_HAS_KASAN_MEMINTRINSIC_PREFIX) && !defined(CONFIG_GENERIC_ENTRY)
81extern void *__underlying_memset(void *p, int c, __kernel_size_t size) __RENAME(memset);
82extern void *__underlying_memmove(void *p, const void *q, __kernel_size_t size) __RENAME(memmove);
83extern void *__underlying_memcpy(void *p, const void *q, __kernel_size_t size) __RENAME(memcpy);
84#elif defined(CONFIG_KASAN_GENERIC)
85extern void *__underlying_memset(void *p, int c, __kernel_size_t size) __RENAME(__asan_memset);
86extern void *__underlying_memmove(void *p, const void *q, __kernel_size_t size) __RENAME(__asan_memmove);
87extern void *__underlying_memcpy(void *p, const void *q, __kernel_size_t size) __RENAME(__asan_memcpy);
88#else /* CONFIG_KASAN_SW_TAGS */
89extern void *__underlying_memset(void *p, int c, __kernel_size_t size) __RENAME(__hwasan_memset);
90extern void *__underlying_memmove(void *p, const void *q, __kernel_size_t size) __RENAME(__hwasan_memmove);
91extern void *__underlying_memcpy(void *p, const void *q, __kernel_size_t size) __RENAME(__hwasan_memcpy);
92#endif
93
94extern void *__underlying_memchr(const void *p, int c, __kernel_size_t size) __RENAME(memchr);
95extern int __underlying_memcmp(const void *p, const void *q, __kernel_size_t size) __RENAME(memcmp);
96extern char *__underlying_strcat(char *p, const char *q) __RENAME(strcat);
97extern char *__underlying_strcpy(char *p, const char *q) __RENAME(strcpy);
98extern __kernel_size_t __underlying_strlen(const char *p) __RENAME(strlen);
99extern char *__underlying_strncat(char *p, const char *q, __kernel_size_t count) __RENAME(strncat);
100extern char *__underlying_strncpy(char *p, const char *q, __kernel_size_t size) __RENAME(strncpy);
101
102#else
103
104#if defined(__SANITIZE_MEMORY__)
105/*
106 * For KMSAN builds all memcpy/memset/memmove calls should be replaced by the
107 * corresponding __msan_XXX functions.
108 */
109#include <linux/kmsan_string.h>
110#define __underlying_memcpy __msan_memcpy
111#define __underlying_memmove __msan_memmove
112#define __underlying_memset __msan_memset
113#else
114#define __underlying_memcpy __builtin_memcpy
115#define __underlying_memmove __builtin_memmove
116#define __underlying_memset __builtin_memset
117#endif
118
119#define __underlying_memchr __builtin_memchr
120#define __underlying_memcmp __builtin_memcmp
121#define __underlying_strcat __builtin_strcat
122#define __underlying_strcpy __builtin_strcpy
123#define __underlying_strlen __builtin_strlen
124#define __underlying_strncat __builtin_strncat
125#define __underlying_strncpy __builtin_strncpy
126
127#endif
128
129/**
130 * unsafe_memcpy - memcpy implementation with no FORTIFY bounds checking
131 *
132 * @dst: Destination memory address to write to
133 * @src: Source memory address to read from
134 * @bytes: How many bytes to write to @dst from @src
135 * @justification: Free-form text or comment describing why the use is needed
136 *
137 * This should be used for corner cases where the compiler cannot do the
138 * right thing, or during transitions between APIs, etc. It should be used
139 * very rarely, and includes a place for justification detailing where bounds
140 * checking has happened, and why existing solutions cannot be employed.
141 */
142#define unsafe_memcpy(dst, src, bytes, justification) \
143 __underlying_memcpy(dst, src, bytes)
144
145/*
146 * Clang's use of __builtin_*object_size() within inlines needs hinting via
147 * __pass_*object_size(). The preference is to only ever use type 1 (member
148 * size, rather than struct size), but there remain some stragglers using
149 * type 0 that will be converted in the future.
150 */
151#if __has_builtin(__builtin_dynamic_object_size)
152#define POS __pass_dynamic_object_size(1)
153#define POS0 __pass_dynamic_object_size(0)
154#else
155#define POS __pass_object_size(1)
156#define POS0 __pass_object_size(0)
157#endif
158
159#define __compiletime_lessthan(bounds, length) ( \
160 __builtin_constant_p((bounds) < (length)) && \
161 (bounds) < (length) \
162)
163
164/**
165 * strncpy - Copy a string to memory with non-guaranteed NUL padding
166 *
167 * @p: pointer to destination of copy
168 * @q: pointer to NUL-terminated source string to copy
169 * @size: bytes to write at @p
170 *
171 * If strlen(@q) >= @size, the copy of @q will stop after @size bytes,
172 * and @p will NOT be NUL-terminated
173 *
174 * If strlen(@q) < @size, following the copy of @q, trailing NUL bytes
175 * will be written to @p until @size total bytes have been written.
176 *
177 * Do not use this function. While FORTIFY_SOURCE tries to avoid
178 * over-reads of @q, it cannot defend against writing unterminated
179 * results to @p. Using strncpy() remains ambiguous and fragile.
180 * Instead, please choose an alternative, so that the expectation
181 * of @p's contents is unambiguous:
182 *
183 * +--------------------+--------------------+------------+
184 * | **p** needs to be: | padded to **size** | not padded |
185 * +====================+====================+============+
186 * | NUL-terminated | strscpy_pad() | strscpy() |
187 * +--------------------+--------------------+------------+
188 * | not NUL-terminated | strtomem_pad() | strtomem() |
189 * +--------------------+--------------------+------------+
190 *
191 * Note strscpy*()'s differing return values for detecting truncation,
192 * and strtomem*()'s expectation that the destination is marked with
193 * __nonstring when it is a character array.
194 *
195 */
196__FORTIFY_INLINE __diagnose_as(__builtin_strncpy, 1, 2, 3)
197char *strncpy(char * const POS p, const char *q, __kernel_size_t size)
198{
199 const size_t p_size = __member_size(p);
200
201 if (__compiletime_lessthan(p_size, size))
202 __write_overflow();
203 if (p_size < size)
204 fortify_panic(FORTIFY_FUNC_strncpy, FORTIFY_WRITE, p_size, size, p);
205 return __underlying_strncpy(p, q, size);
206}
207
208extern __kernel_size_t __real_strnlen(const char *, __kernel_size_t) __RENAME(strnlen);
209/**
210 * strnlen - Return bounded count of characters in a NUL-terminated string
211 *
212 * @p: pointer to NUL-terminated string to count.
213 * @maxlen: maximum number of characters to count.
214 *
215 * Returns number of characters in @p (NOT including the final NUL), or
216 * @maxlen, if no NUL has been found up to there.
217 *
218 */
219__FORTIFY_INLINE __kernel_size_t strnlen(const char * const POS p, __kernel_size_t maxlen)
220{
221 const size_t p_size = __member_size(p);
222 const size_t p_len = __compiletime_strlen(p);
223 size_t ret;
224
225 /* We can take compile-time actions when maxlen is const. */
226 if (__builtin_constant_p(maxlen) && p_len != SIZE_MAX) {
227 /* If p is const, we can use its compile-time-known len. */
228 if (maxlen >= p_size)
229 return p_len;
230 }
231
232 /* Do not check characters beyond the end of p. */
233 ret = __real_strnlen(p, maxlen < p_size ? maxlen : p_size);
234 if (p_size <= ret && maxlen != ret)
235 fortify_panic(FORTIFY_FUNC_strnlen, FORTIFY_READ, p_size, ret + 1, ret);
236 return ret;
237}
238
239/*
240 * Defined after fortified strnlen to reuse it. However, it must still be
241 * possible for strlen() to be used on compile-time strings for use in
242 * static initializers (i.e. as a constant expression).
243 */
244/**
245 * strlen - Return count of characters in a NUL-terminated string
246 *
247 * @p: pointer to NUL-terminated string to count.
248 *
249 * Do not use this function unless the string length is known at
250 * compile-time. When @p is unterminated, this function may crash
251 * or return unexpected counts that could lead to memory content
252 * exposures. Prefer strnlen().
253 *
254 * Returns number of characters in @p (NOT including the final NUL).
255 *
256 */
257#define strlen(p) \
258 __builtin_choose_expr(__is_constexpr(__builtin_strlen(p)), \
259 __builtin_strlen(p), __fortify_strlen(p))
260__FORTIFY_INLINE __diagnose_as(__builtin_strlen, 1)
261__kernel_size_t __fortify_strlen(const char * const POS p)
262{
263 const size_t p_size = __member_size(p);
264 __kernel_size_t ret;
265
266 /* Give up if we don't know how large p is. */
267 if (p_size == SIZE_MAX)
268 return __underlying_strlen(p);
269 ret = strnlen(p, p_size);
270 if (p_size <= ret)
271 fortify_panic(FORTIFY_FUNC_strlen, FORTIFY_READ, p_size, ret + 1, ret);
272 return ret;
273}
274
275/* Defined after fortified strnlen() to reuse it. */
276extern ssize_t __real_strscpy(char *, const char *, size_t) __RENAME(sized_strscpy);
277__FORTIFY_INLINE ssize_t sized_strscpy(char * const POS p, const char * const POS q, size_t size)
278{
279 /* Use string size rather than possible enclosing struct size. */
280 const size_t p_size = __member_size(p);
281 const size_t q_size = __member_size(q);
282 size_t len;
283
284 /* If we cannot get size of p and q default to call strscpy. */
285 if (p_size == SIZE_MAX && q_size == SIZE_MAX)
286 return __real_strscpy(p, q, size);
287
288 /*
289 * If size can be known at compile time and is greater than
290 * p_size, generate a compile time write overflow error.
291 */
292 if (__compiletime_lessthan(p_size, size))
293 __write_overflow();
294
295 /* Short-circuit for compile-time known-safe lengths. */
296 if (__compiletime_lessthan(p_size, SIZE_MAX)) {
297 len = __compiletime_strlen(q);
298
299 if (len < SIZE_MAX && __compiletime_lessthan(len, size)) {
300 __underlying_memcpy(p, q, len + 1);
301 return len;
302 }
303 }
304
305 /*
306 * This call protects from read overflow, because len will default to q
307 * length if it smaller than size.
308 */
309 len = strnlen(q, size);
310 /*
311 * If len equals size, we will copy only size bytes which leads to
312 * -E2BIG being returned.
313 * Otherwise we will copy len + 1 because of the final '\O'.
314 */
315 len = len == size ? size : len + 1;
316
317 /*
318 * Generate a runtime write overflow error if len is greater than
319 * p_size.
320 */
321 if (p_size < len)
322 fortify_panic(FORTIFY_FUNC_strscpy, FORTIFY_WRITE, p_size, len, -E2BIG);
323
324 /*
325 * We can now safely call vanilla strscpy because we are protected from:
326 * 1. Read overflow thanks to call to strnlen().
327 * 2. Write overflow thanks to above ifs.
328 */
329 return __real_strscpy(p, q, len);
330}
331
332/* Defined after fortified strlen() to reuse it. */
333extern size_t __real_strlcat(char *p, const char *q, size_t avail) __RENAME(strlcat);
334/**
335 * strlcat - Append a string to an existing string
336 *
337 * @p: pointer to %NUL-terminated string to append to
338 * @q: pointer to %NUL-terminated string to append from
339 * @avail: Maximum bytes available in @p
340 *
341 * Appends %NUL-terminated string @q after the %NUL-terminated
342 * string at @p, but will not write beyond @avail bytes total,
343 * potentially truncating the copy from @q. @p will stay
344 * %NUL-terminated only if a %NUL already existed within
345 * the @avail bytes of @p. If so, the resulting number of
346 * bytes copied from @q will be at most "@avail - strlen(@p) - 1".
347 *
348 * Do not use this function. While FORTIFY_SOURCE tries to avoid
349 * read and write overflows, this is only possible when the sizes
350 * of @p and @q are known to the compiler. Prefer building the
351 * string with formatting, via scnprintf(), seq_buf, or similar.
352 *
353 * Returns total bytes that _would_ have been contained by @p
354 * regardless of truncation, similar to snprintf(). If return
355 * value is >= @avail, the string has been truncated.
356 *
357 */
358__FORTIFY_INLINE
359size_t strlcat(char * const POS p, const char * const POS q, size_t avail)
360{
361 const size_t p_size = __member_size(p);
362 const size_t q_size = __member_size(q);
363 size_t p_len, copy_len;
364 size_t actual, wanted;
365
366 /* Give up immediately if both buffer sizes are unknown. */
367 if (p_size == SIZE_MAX && q_size == SIZE_MAX)
368 return __real_strlcat(p, q, avail);
369
370 p_len = strnlen(p, avail);
371 copy_len = strlen(q);
372 wanted = actual = p_len + copy_len;
373
374 /* Cannot append any more: report truncation. */
375 if (avail <= p_len)
376 return wanted;
377
378 /* Give up if string is already overflowed. */
379 if (p_size <= p_len)
380 fortify_panic(FORTIFY_FUNC_strlcat, FORTIFY_READ, p_size, p_len + 1, wanted);
381
382 if (actual >= avail) {
383 copy_len = avail - p_len - 1;
384 actual = p_len + copy_len;
385 }
386
387 /* Give up if copy will overflow. */
388 if (p_size <= actual)
389 fortify_panic(FORTIFY_FUNC_strlcat, FORTIFY_WRITE, p_size, actual + 1, wanted);
390 __underlying_memcpy(p + p_len, q, copy_len);
391 p[actual] = '\0';
392
393 return wanted;
394}
395
396/* Defined after fortified strlcat() to reuse it. */
397/**
398 * strcat - Append a string to an existing string
399 *
400 * @p: pointer to NUL-terminated string to append to
401 * @q: pointer to NUL-terminated source string to append from
402 *
403 * Do not use this function. While FORTIFY_SOURCE tries to avoid
404 * read and write overflows, this is only possible when the
405 * destination buffer size is known to the compiler. Prefer
406 * building the string with formatting, via scnprintf() or similar.
407 * At the very least, use strncat().
408 *
409 * Returns @p.
410 *
411 */
412__FORTIFY_INLINE __diagnose_as(__builtin_strcat, 1, 2)
413char *strcat(char * const POS p, const char *q)
414{
415 const size_t p_size = __member_size(p);
416 const size_t wanted = strlcat(p, q, p_size);
417
418 if (p_size <= wanted)
419 fortify_panic(FORTIFY_FUNC_strcat, FORTIFY_WRITE, p_size, wanted + 1, p);
420 return p;
421}
422
423/**
424 * strncat - Append a string to an existing string
425 *
426 * @p: pointer to NUL-terminated string to append to
427 * @q: pointer to source string to append from
428 * @count: Maximum bytes to read from @q
429 *
430 * Appends at most @count bytes from @q (stopping at the first
431 * NUL byte) after the NUL-terminated string at @p. @p will be
432 * NUL-terminated.
433 *
434 * Do not use this function. While FORTIFY_SOURCE tries to avoid
435 * read and write overflows, this is only possible when the sizes
436 * of @p and @q are known to the compiler. Prefer building the
437 * string with formatting, via scnprintf() or similar.
438 *
439 * Returns @p.
440 *
441 */
442/* Defined after fortified strlen() and strnlen() to reuse them. */
443__FORTIFY_INLINE __diagnose_as(__builtin_strncat, 1, 2, 3)
444char *strncat(char * const POS p, const char * const POS q, __kernel_size_t count)
445{
446 const size_t p_size = __member_size(p);
447 const size_t q_size = __member_size(q);
448 size_t p_len, copy_len, total;
449
450 if (p_size == SIZE_MAX && q_size == SIZE_MAX)
451 return __underlying_strncat(p, q, count);
452 p_len = strlen(p);
453 copy_len = strnlen(q, count);
454 total = p_len + copy_len + 1;
455 if (p_size < total)
456 fortify_panic(FORTIFY_FUNC_strncat, FORTIFY_WRITE, p_size, total, p);
457 __underlying_memcpy(p + p_len, q, copy_len);
458 p[p_len + copy_len] = '\0';
459 return p;
460}
461
462__FORTIFY_INLINE bool fortify_memset_chk(__kernel_size_t size,
463 const size_t p_size,
464 const size_t p_size_field)
465{
466 if (__builtin_constant_p(size)) {
467 /*
468 * Length argument is a constant expression, so we
469 * can perform compile-time bounds checking where
470 * buffer sizes are also known at compile time.
471 */
472
473 /* Error when size is larger than enclosing struct. */
474 if (__compiletime_lessthan(p_size_field, p_size) &&
475 __compiletime_lessthan(p_size, size))
476 __write_overflow();
477
478 /* Warn when write size is larger than dest field. */
479 if (__compiletime_lessthan(p_size_field, size))
480 __write_overflow_field(p_size_field, size);
481 }
482 /*
483 * At this point, length argument may not be a constant expression,
484 * so run-time bounds checking can be done where buffer sizes are
485 * known. (This is not an "else" because the above checks may only
486 * be compile-time warnings, and we want to still warn for run-time
487 * overflows.)
488 */
489
490 /*
491 * Always stop accesses beyond the struct that contains the
492 * field, when the buffer's remaining size is known.
493 * (The SIZE_MAX test is to optimize away checks where the buffer
494 * lengths are unknown.)
495 */
496 if (p_size != SIZE_MAX && p_size < size)
497 fortify_panic(FORTIFY_FUNC_memset, FORTIFY_WRITE, p_size, size, true);
498 return false;
499}
500
501#define __fortify_memset_chk(p, c, size, p_size, p_size_field) ({ \
502 size_t __fortify_size = (size_t)(size); \
503 fortify_memset_chk(__fortify_size, p_size, p_size_field), \
504 __underlying_memset(p, c, __fortify_size); \
505})
506
507/*
508 * __struct_size() vs __member_size() must be captured here to avoid
509 * evaluating argument side-effects further into the macro layers.
510 */
511#ifndef CONFIG_KMSAN
512#define memset(p, c, s) __fortify_memset_chk(p, c, s, \
513 __struct_size(p), __member_size(p))
514#endif
515
516/*
517 * To make sure the compiler can enforce protection against buffer overflows,
518 * memcpy(), memmove(), and memset() must not be used beyond individual
519 * struct members. If you need to copy across multiple members, please use
520 * struct_group() to create a named mirror of an anonymous struct union.
521 * (e.g. see struct sk_buff.) Read overflow checking is currently only
522 * done when a write overflow is also present, or when building with W=1.
523 *
524 * Mitigation coverage matrix
525 * Bounds checking at:
526 * +-------+-------+-------+-------+
527 * | Compile time | Run time |
528 * memcpy() argument sizes: | write | read | write | read |
529 * dest source length +-------+-------+-------+-------+
530 * memcpy(known, known, constant) | y | y | n/a | n/a |
531 * memcpy(known, unknown, constant) | y | n | n/a | V |
532 * memcpy(known, known, dynamic) | n | n | B | B |
533 * memcpy(known, unknown, dynamic) | n | n | B | V |
534 * memcpy(unknown, known, constant) | n | y | V | n/a |
535 * memcpy(unknown, unknown, constant) | n | n | V | V |
536 * memcpy(unknown, known, dynamic) | n | n | V | B |
537 * memcpy(unknown, unknown, dynamic) | n | n | V | V |
538 * +-------+-------+-------+-------+
539 *
540 * y = perform deterministic compile-time bounds checking
541 * n = cannot perform deterministic compile-time bounds checking
542 * n/a = no run-time bounds checking needed since compile-time deterministic
543 * B = can perform run-time bounds checking (currently unimplemented)
544 * V = vulnerable to run-time overflow (will need refactoring to solve)
545 *
546 */
547__FORTIFY_INLINE bool fortify_memcpy_chk(__kernel_size_t size,
548 const size_t p_size,
549 const size_t q_size,
550 const size_t p_size_field,
551 const size_t q_size_field,
552 const u8 func)
553{
554 if (__builtin_constant_p(size)) {
555 /*
556 * Length argument is a constant expression, so we
557 * can perform compile-time bounds checking where
558 * buffer sizes are also known at compile time.
559 */
560
561 /* Error when size is larger than enclosing struct. */
562 if (__compiletime_lessthan(p_size_field, p_size) &&
563 __compiletime_lessthan(p_size, size))
564 __write_overflow();
565 if (__compiletime_lessthan(q_size_field, q_size) &&
566 __compiletime_lessthan(q_size, size))
567 __read_overflow2();
568
569 /* Warn when write size argument larger than dest field. */
570 if (__compiletime_lessthan(p_size_field, size))
571 __write_overflow_field(p_size_field, size);
572 /*
573 * Warn for source field over-read when building with W=1
574 * or when an over-write happened, so both can be fixed at
575 * the same time.
576 */
577 if ((IS_ENABLED(KBUILD_EXTRA_WARN1) ||
578 __compiletime_lessthan(p_size_field, size)) &&
579 __compiletime_lessthan(q_size_field, size))
580 __read_overflow2_field(q_size_field, size);
581 }
582 /*
583 * At this point, length argument may not be a constant expression,
584 * so run-time bounds checking can be done where buffer sizes are
585 * known. (This is not an "else" because the above checks may only
586 * be compile-time warnings, and we want to still warn for run-time
587 * overflows.)
588 */
589
590 /*
591 * Always stop accesses beyond the struct that contains the
592 * field, when the buffer's remaining size is known.
593 * (The SIZE_MAX test is to optimize away checks where the buffer
594 * lengths are unknown.)
595 */
596 if (p_size != SIZE_MAX && p_size < size)
597 fortify_panic(func, FORTIFY_WRITE, p_size, size, true);
598 else if (q_size != SIZE_MAX && q_size < size)
599 fortify_panic(func, FORTIFY_READ, p_size, size, true);
600
601 /*
602 * Warn when writing beyond destination field size.
603 *
604 * Note the implementation of __builtin_*object_size() behaves
605 * like sizeof() when not directly referencing a flexible
606 * array member, which means there will be many bounds checks
607 * that will appear at run-time, without a way for them to be
608 * detected at compile-time (as can be done when the destination
609 * is specifically the flexible array member).
610 * https://gcc.gnu.org/bugzilla/show_bug.cgi?id=101832
611 */
612 if (p_size_field != SIZE_MAX &&
613 p_size != p_size_field && p_size_field < size)
614 return true;
615
616 return false;
617}
618
619/*
620 * To work around what seems to be an optimizer bug, the macro arguments
621 * need to have const copies or the values end up changed by the time they
622 * reach fortify_warn_once(). See commit 6f7630b1b5bc ("fortify: Capture
623 * __bos() results in const temp vars") for more details.
624 */
625#define __fortify_memcpy_chk(p, q, size, p_size, q_size, \
626 p_size_field, q_size_field, op) ({ \
627 const size_t __fortify_size = (size_t)(size); \
628 const size_t __p_size = (p_size); \
629 const size_t __q_size = (q_size); \
630 const size_t __p_size_field = (p_size_field); \
631 const size_t __q_size_field = (q_size_field); \
632 /* Keep a mutable version of the size for the final copy. */ \
633 size_t __copy_size = __fortify_size; \
634 fortify_warn_once(fortify_memcpy_chk(__fortify_size, __p_size, \
635 __q_size, __p_size_field, \
636 __q_size_field, FORTIFY_FUNC_ ##op), \
637 #op ": detected field-spanning write (size %zu) of single %s (size %zu)\n", \
638 __fortify_size, \
639 "field \"" #p "\" at " FILE_LINE, \
640 __p_size_field); \
641 /* Hide only the run-time size from value range tracking to */ \
642 /* silence compile-time false positive bounds warnings. */ \
643 if (!__builtin_constant_p(__copy_size)) \
644 OPTIMIZER_HIDE_VAR(__copy_size); \
645 __underlying_##op(p, q, __copy_size); \
646})
647
648/*
649 * Notes about compile-time buffer size detection:
650 *
651 * With these types...
652 *
653 * struct middle {
654 * u16 a;
655 * u8 middle_buf[16];
656 * int b;
657 * };
658 * struct end {
659 * u16 a;
660 * u8 end_buf[16];
661 * };
662 * struct flex {
663 * int a;
664 * u8 flex_buf[];
665 * };
666 *
667 * void func(TYPE *ptr) { ... }
668 *
669 * Cases where destination size cannot be currently detected:
670 * - the size of ptr's object (seemingly by design, gcc & clang fail):
671 * __builtin_object_size(ptr, 1) == SIZE_MAX
672 * - the size of flexible arrays in ptr's obj (by design, dynamic size):
673 * __builtin_object_size(ptr->flex_buf, 1) == SIZE_MAX
674 * - the size of ANY array at the end of ptr's obj (gcc and clang bug):
675 * __builtin_object_size(ptr->end_buf, 1) == SIZE_MAX
676 * https://gcc.gnu.org/bugzilla/show_bug.cgi?id=101836
677 *
678 * Cases where destination size is currently detected:
679 * - the size of non-array members within ptr's object:
680 * __builtin_object_size(ptr->a, 1) == 2
681 * - the size of non-flexible-array in the middle of ptr's obj:
682 * __builtin_object_size(ptr->middle_buf, 1) == 16
683 *
684 */
685
686/*
687 * __struct_size() vs __member_size() must be captured here to avoid
688 * evaluating argument side-effects further into the macro layers.
689 */
690#define memcpy(p, q, s) __fortify_memcpy_chk(p, q, s, \
691 __struct_size(p), __struct_size(q), \
692 __member_size(p), __member_size(q), \
693 memcpy)
694#define memmove(p, q, s) __fortify_memcpy_chk(p, q, s, \
695 __struct_size(p), __struct_size(q), \
696 __member_size(p), __member_size(q), \
697 memmove)
698
699extern void *__real_memscan(void *, int, __kernel_size_t) __RENAME(memscan);
700__FORTIFY_INLINE void *memscan(void * const POS0 p, int c, __kernel_size_t size)
701{
702 const size_t p_size = __struct_size(p);
703
704 if (__compiletime_lessthan(p_size, size))
705 __read_overflow();
706 if (p_size < size)
707 fortify_panic(FORTIFY_FUNC_memscan, FORTIFY_READ, p_size, size, NULL);
708 return __real_memscan(p, c, size);
709}
710
711__FORTIFY_INLINE __diagnose_as(__builtin_memcmp, 1, 2, 3)
712int memcmp(const void * const POS0 p, const void * const POS0 q, __kernel_size_t size)
713{
714 const size_t p_size = __struct_size(p);
715 const size_t q_size = __struct_size(q);
716
717 if (__builtin_constant_p(size)) {
718 if (__compiletime_lessthan(p_size, size))
719 __read_overflow();
720 if (__compiletime_lessthan(q_size, size))
721 __read_overflow2();
722 }
723 if (p_size < size)
724 fortify_panic(FORTIFY_FUNC_memcmp, FORTIFY_READ, p_size, size, INT_MIN);
725 else if (q_size < size)
726 fortify_panic(FORTIFY_FUNC_memcmp, FORTIFY_READ, q_size, size, INT_MIN);
727 return __underlying_memcmp(p, q, size);
728}
729
730__FORTIFY_INLINE __diagnose_as(__builtin_memchr, 1, 2, 3)
731void *memchr(const void * const POS0 p, int c, __kernel_size_t size)
732{
733 const size_t p_size = __struct_size(p);
734
735 if (__compiletime_lessthan(p_size, size))
736 __read_overflow();
737 if (p_size < size)
738 fortify_panic(FORTIFY_FUNC_memchr, FORTIFY_READ, p_size, size, NULL);
739 return __underlying_memchr(p, c, size);
740}
741
742void *__real_memchr_inv(const void *s, int c, size_t n) __RENAME(memchr_inv);
743__FORTIFY_INLINE void *memchr_inv(const void * const POS0 p, int c, size_t size)
744{
745 const size_t p_size = __struct_size(p);
746
747 if (__compiletime_lessthan(p_size, size))
748 __read_overflow();
749 if (p_size < size)
750 fortify_panic(FORTIFY_FUNC_memchr_inv, FORTIFY_READ, p_size, size, NULL);
751 return __real_memchr_inv(p, c, size);
752}
753
754extern void *__real_kmemdup(const void *src, size_t len, gfp_t gfp) __RENAME(kmemdup_noprof)
755 __realloc_size(2);
756__FORTIFY_INLINE void *kmemdup_noprof(const void * const POS0 p, size_t size, gfp_t gfp)
757{
758 const size_t p_size = __struct_size(p);
759
760 if (__compiletime_lessthan(p_size, size))
761 __read_overflow();
762 if (p_size < size)
763 fortify_panic(FORTIFY_FUNC_kmemdup, FORTIFY_READ, p_size, size,
764 __real_kmemdup(p, 0, gfp));
765 return __real_kmemdup(p, size, gfp);
766}
767#define kmemdup(...) alloc_hooks(kmemdup_noprof(__VA_ARGS__))
768
769/**
770 * strcpy - Copy a string into another string buffer
771 *
772 * @p: pointer to destination of copy
773 * @q: pointer to NUL-terminated source string to copy
774 *
775 * Do not use this function. While FORTIFY_SOURCE tries to avoid
776 * overflows, this is only possible when the sizes of @q and @p are
777 * known to the compiler. Prefer strscpy(), though note its different
778 * return values for detecting truncation.
779 *
780 * Returns @p.
781 *
782 */
783/* Defined after fortified strlen to reuse it. */
784__FORTIFY_INLINE __diagnose_as(__builtin_strcpy, 1, 2)
785char *strcpy(char * const POS p, const char * const POS q)
786{
787 const size_t p_size = __member_size(p);
788 const size_t q_size = __member_size(q);
789 size_t size;
790
791 /* If neither buffer size is known, immediately give up. */
792 if (__builtin_constant_p(p_size) &&
793 __builtin_constant_p(q_size) &&
794 p_size == SIZE_MAX && q_size == SIZE_MAX)
795 return __underlying_strcpy(p, q);
796 size = strlen(q) + 1;
797 /* Compile-time check for const size overflow. */
798 if (__compiletime_lessthan(p_size, size))
799 __write_overflow();
800 /* Run-time check for dynamic size overflow. */
801 if (p_size < size)
802 fortify_panic(FORTIFY_FUNC_strcpy, FORTIFY_WRITE, p_size, size, p);
803 __underlying_memcpy(p, q, size);
804 return p;
805}
806
807/* Don't use these outside the FORITFY_SOURCE implementation */
808#undef __underlying_memchr
809#undef __underlying_memcmp
810#undef __underlying_strcat
811#undef __underlying_strcpy
812#undef __underlying_strlen
813#undef __underlying_strncat
814#undef __underlying_strncpy
815
816#undef POS
817#undef POS0
818
819#endif /* _LINUX_FORTIFY_STRING_H_ */