Loading...
1/*
2 * User address space access functions.
3 * The non inlined parts of asm-i386/uaccess.h are here.
4 *
5 * Copyright 1997 Andi Kleen <ak@muc.de>
6 * Copyright 1997 Linus Torvalds
7 */
8#include <linux/mm.h>
9#include <linux/highmem.h>
10#include <linux/blkdev.h>
11#include <linux/module.h>
12#include <linux/backing-dev.h>
13#include <linux/interrupt.h>
14#include <asm/uaccess.h>
15#include <asm/mmx.h>
16#include <asm/asm.h>
17
18#ifdef CONFIG_X86_INTEL_USERCOPY
19/*
20 * Alignment at which movsl is preferred for bulk memory copies.
21 */
22struct movsl_mask movsl_mask __read_mostly;
23#endif
24
25static inline int __movsl_is_ok(unsigned long a1, unsigned long a2, unsigned long n)
26{
27#ifdef CONFIG_X86_INTEL_USERCOPY
28 if (n >= 64 && ((a1 ^ a2) & movsl_mask.mask))
29 return 0;
30#endif
31 return 1;
32}
33#define movsl_is_ok(a1, a2, n) \
34 __movsl_is_ok((unsigned long)(a1), (unsigned long)(a2), (n))
35
36/*
37 * Zero Userspace
38 */
39
40#define __do_clear_user(addr,size) \
41do { \
42 int __d0; \
43 might_fault(); \
44 __asm__ __volatile__( \
45 ASM_STAC "\n" \
46 "0: rep; stosl\n" \
47 " movl %2,%0\n" \
48 "1: rep; stosb\n" \
49 "2: " ASM_CLAC "\n" \
50 ".section .fixup,\"ax\"\n" \
51 "3: lea 0(%2,%0,4),%0\n" \
52 " jmp 2b\n" \
53 ".previous\n" \
54 _ASM_EXTABLE(0b,3b) \
55 _ASM_EXTABLE(1b,2b) \
56 : "=&c"(size), "=&D" (__d0) \
57 : "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0)); \
58} while (0)
59
60/**
61 * clear_user: - Zero a block of memory in user space.
62 * @to: Destination address, in user space.
63 * @n: Number of bytes to zero.
64 *
65 * Zero a block of memory in user space.
66 *
67 * Returns number of bytes that could not be cleared.
68 * On success, this will be zero.
69 */
70unsigned long
71clear_user(void __user *to, unsigned long n)
72{
73 might_fault();
74 if (access_ok(VERIFY_WRITE, to, n))
75 __do_clear_user(to, n);
76 return n;
77}
78EXPORT_SYMBOL(clear_user);
79
80/**
81 * __clear_user: - Zero a block of memory in user space, with less checking.
82 * @to: Destination address, in user space.
83 * @n: Number of bytes to zero.
84 *
85 * Zero a block of memory in user space. Caller must check
86 * the specified block with access_ok() before calling this function.
87 *
88 * Returns number of bytes that could not be cleared.
89 * On success, this will be zero.
90 */
91unsigned long
92__clear_user(void __user *to, unsigned long n)
93{
94 __do_clear_user(to, n);
95 return n;
96}
97EXPORT_SYMBOL(__clear_user);
98
99#ifdef CONFIG_X86_INTEL_USERCOPY
100static unsigned long
101__copy_user_intel(void __user *to, const void *from, unsigned long size)
102{
103 int d0, d1;
104 __asm__ __volatile__(
105 " .align 2,0x90\n"
106 "1: movl 32(%4), %%eax\n"
107 " cmpl $67, %0\n"
108 " jbe 3f\n"
109 "2: movl 64(%4), %%eax\n"
110 " .align 2,0x90\n"
111 "3: movl 0(%4), %%eax\n"
112 "4: movl 4(%4), %%edx\n"
113 "5: movl %%eax, 0(%3)\n"
114 "6: movl %%edx, 4(%3)\n"
115 "7: movl 8(%4), %%eax\n"
116 "8: movl 12(%4),%%edx\n"
117 "9: movl %%eax, 8(%3)\n"
118 "10: movl %%edx, 12(%3)\n"
119 "11: movl 16(%4), %%eax\n"
120 "12: movl 20(%4), %%edx\n"
121 "13: movl %%eax, 16(%3)\n"
122 "14: movl %%edx, 20(%3)\n"
123 "15: movl 24(%4), %%eax\n"
124 "16: movl 28(%4), %%edx\n"
125 "17: movl %%eax, 24(%3)\n"
126 "18: movl %%edx, 28(%3)\n"
127 "19: movl 32(%4), %%eax\n"
128 "20: movl 36(%4), %%edx\n"
129 "21: movl %%eax, 32(%3)\n"
130 "22: movl %%edx, 36(%3)\n"
131 "23: movl 40(%4), %%eax\n"
132 "24: movl 44(%4), %%edx\n"
133 "25: movl %%eax, 40(%3)\n"
134 "26: movl %%edx, 44(%3)\n"
135 "27: movl 48(%4), %%eax\n"
136 "28: movl 52(%4), %%edx\n"
137 "29: movl %%eax, 48(%3)\n"
138 "30: movl %%edx, 52(%3)\n"
139 "31: movl 56(%4), %%eax\n"
140 "32: movl 60(%4), %%edx\n"
141 "33: movl %%eax, 56(%3)\n"
142 "34: movl %%edx, 60(%3)\n"
143 " addl $-64, %0\n"
144 " addl $64, %4\n"
145 " addl $64, %3\n"
146 " cmpl $63, %0\n"
147 " ja 1b\n"
148 "35: movl %0, %%eax\n"
149 " shrl $2, %0\n"
150 " andl $3, %%eax\n"
151 " cld\n"
152 "99: rep; movsl\n"
153 "36: movl %%eax, %0\n"
154 "37: rep; movsb\n"
155 "100:\n"
156 ".section .fixup,\"ax\"\n"
157 "101: lea 0(%%eax,%0,4),%0\n"
158 " jmp 100b\n"
159 ".previous\n"
160 _ASM_EXTABLE(1b,100b)
161 _ASM_EXTABLE(2b,100b)
162 _ASM_EXTABLE(3b,100b)
163 _ASM_EXTABLE(4b,100b)
164 _ASM_EXTABLE(5b,100b)
165 _ASM_EXTABLE(6b,100b)
166 _ASM_EXTABLE(7b,100b)
167 _ASM_EXTABLE(8b,100b)
168 _ASM_EXTABLE(9b,100b)
169 _ASM_EXTABLE(10b,100b)
170 _ASM_EXTABLE(11b,100b)
171 _ASM_EXTABLE(12b,100b)
172 _ASM_EXTABLE(13b,100b)
173 _ASM_EXTABLE(14b,100b)
174 _ASM_EXTABLE(15b,100b)
175 _ASM_EXTABLE(16b,100b)
176 _ASM_EXTABLE(17b,100b)
177 _ASM_EXTABLE(18b,100b)
178 _ASM_EXTABLE(19b,100b)
179 _ASM_EXTABLE(20b,100b)
180 _ASM_EXTABLE(21b,100b)
181 _ASM_EXTABLE(22b,100b)
182 _ASM_EXTABLE(23b,100b)
183 _ASM_EXTABLE(24b,100b)
184 _ASM_EXTABLE(25b,100b)
185 _ASM_EXTABLE(26b,100b)
186 _ASM_EXTABLE(27b,100b)
187 _ASM_EXTABLE(28b,100b)
188 _ASM_EXTABLE(29b,100b)
189 _ASM_EXTABLE(30b,100b)
190 _ASM_EXTABLE(31b,100b)
191 _ASM_EXTABLE(32b,100b)
192 _ASM_EXTABLE(33b,100b)
193 _ASM_EXTABLE(34b,100b)
194 _ASM_EXTABLE(35b,100b)
195 _ASM_EXTABLE(36b,100b)
196 _ASM_EXTABLE(37b,100b)
197 _ASM_EXTABLE(99b,101b)
198 : "=&c"(size), "=&D" (d0), "=&S" (d1)
199 : "1"(to), "2"(from), "0"(size)
200 : "eax", "edx", "memory");
201 return size;
202}
203
204static unsigned long
205__copy_user_zeroing_intel(void *to, const void __user *from, unsigned long size)
206{
207 int d0, d1;
208 __asm__ __volatile__(
209 " .align 2,0x90\n"
210 "0: movl 32(%4), %%eax\n"
211 " cmpl $67, %0\n"
212 " jbe 2f\n"
213 "1: movl 64(%4), %%eax\n"
214 " .align 2,0x90\n"
215 "2: movl 0(%4), %%eax\n"
216 "21: movl 4(%4), %%edx\n"
217 " movl %%eax, 0(%3)\n"
218 " movl %%edx, 4(%3)\n"
219 "3: movl 8(%4), %%eax\n"
220 "31: movl 12(%4),%%edx\n"
221 " movl %%eax, 8(%3)\n"
222 " movl %%edx, 12(%3)\n"
223 "4: movl 16(%4), %%eax\n"
224 "41: movl 20(%4), %%edx\n"
225 " movl %%eax, 16(%3)\n"
226 " movl %%edx, 20(%3)\n"
227 "10: movl 24(%4), %%eax\n"
228 "51: movl 28(%4), %%edx\n"
229 " movl %%eax, 24(%3)\n"
230 " movl %%edx, 28(%3)\n"
231 "11: movl 32(%4), %%eax\n"
232 "61: movl 36(%4), %%edx\n"
233 " movl %%eax, 32(%3)\n"
234 " movl %%edx, 36(%3)\n"
235 "12: movl 40(%4), %%eax\n"
236 "71: movl 44(%4), %%edx\n"
237 " movl %%eax, 40(%3)\n"
238 " movl %%edx, 44(%3)\n"
239 "13: movl 48(%4), %%eax\n"
240 "81: movl 52(%4), %%edx\n"
241 " movl %%eax, 48(%3)\n"
242 " movl %%edx, 52(%3)\n"
243 "14: movl 56(%4), %%eax\n"
244 "91: movl 60(%4), %%edx\n"
245 " movl %%eax, 56(%3)\n"
246 " movl %%edx, 60(%3)\n"
247 " addl $-64, %0\n"
248 " addl $64, %4\n"
249 " addl $64, %3\n"
250 " cmpl $63, %0\n"
251 " ja 0b\n"
252 "5: movl %0, %%eax\n"
253 " shrl $2, %0\n"
254 " andl $3, %%eax\n"
255 " cld\n"
256 "6: rep; movsl\n"
257 " movl %%eax,%0\n"
258 "7: rep; movsb\n"
259 "8:\n"
260 ".section .fixup,\"ax\"\n"
261 "9: lea 0(%%eax,%0,4),%0\n"
262 "16: pushl %0\n"
263 " pushl %%eax\n"
264 " xorl %%eax,%%eax\n"
265 " rep; stosb\n"
266 " popl %%eax\n"
267 " popl %0\n"
268 " jmp 8b\n"
269 ".previous\n"
270 _ASM_EXTABLE(0b,16b)
271 _ASM_EXTABLE(1b,16b)
272 _ASM_EXTABLE(2b,16b)
273 _ASM_EXTABLE(21b,16b)
274 _ASM_EXTABLE(3b,16b)
275 _ASM_EXTABLE(31b,16b)
276 _ASM_EXTABLE(4b,16b)
277 _ASM_EXTABLE(41b,16b)
278 _ASM_EXTABLE(10b,16b)
279 _ASM_EXTABLE(51b,16b)
280 _ASM_EXTABLE(11b,16b)
281 _ASM_EXTABLE(61b,16b)
282 _ASM_EXTABLE(12b,16b)
283 _ASM_EXTABLE(71b,16b)
284 _ASM_EXTABLE(13b,16b)
285 _ASM_EXTABLE(81b,16b)
286 _ASM_EXTABLE(14b,16b)
287 _ASM_EXTABLE(91b,16b)
288 _ASM_EXTABLE(6b,9b)
289 _ASM_EXTABLE(7b,16b)
290 : "=&c"(size), "=&D" (d0), "=&S" (d1)
291 : "1"(to), "2"(from), "0"(size)
292 : "eax", "edx", "memory");
293 return size;
294}
295
296/*
297 * Non Temporal Hint version of __copy_user_zeroing_intel. It is cache aware.
298 * hyoshiok@miraclelinux.com
299 */
300
301static unsigned long __copy_user_zeroing_intel_nocache(void *to,
302 const void __user *from, unsigned long size)
303{
304 int d0, d1;
305
306 __asm__ __volatile__(
307 " .align 2,0x90\n"
308 "0: movl 32(%4), %%eax\n"
309 " cmpl $67, %0\n"
310 " jbe 2f\n"
311 "1: movl 64(%4), %%eax\n"
312 " .align 2,0x90\n"
313 "2: movl 0(%4), %%eax\n"
314 "21: movl 4(%4), %%edx\n"
315 " movnti %%eax, 0(%3)\n"
316 " movnti %%edx, 4(%3)\n"
317 "3: movl 8(%4), %%eax\n"
318 "31: movl 12(%4),%%edx\n"
319 " movnti %%eax, 8(%3)\n"
320 " movnti %%edx, 12(%3)\n"
321 "4: movl 16(%4), %%eax\n"
322 "41: movl 20(%4), %%edx\n"
323 " movnti %%eax, 16(%3)\n"
324 " movnti %%edx, 20(%3)\n"
325 "10: movl 24(%4), %%eax\n"
326 "51: movl 28(%4), %%edx\n"
327 " movnti %%eax, 24(%3)\n"
328 " movnti %%edx, 28(%3)\n"
329 "11: movl 32(%4), %%eax\n"
330 "61: movl 36(%4), %%edx\n"
331 " movnti %%eax, 32(%3)\n"
332 " movnti %%edx, 36(%3)\n"
333 "12: movl 40(%4), %%eax\n"
334 "71: movl 44(%4), %%edx\n"
335 " movnti %%eax, 40(%3)\n"
336 " movnti %%edx, 44(%3)\n"
337 "13: movl 48(%4), %%eax\n"
338 "81: movl 52(%4), %%edx\n"
339 " movnti %%eax, 48(%3)\n"
340 " movnti %%edx, 52(%3)\n"
341 "14: movl 56(%4), %%eax\n"
342 "91: movl 60(%4), %%edx\n"
343 " movnti %%eax, 56(%3)\n"
344 " movnti %%edx, 60(%3)\n"
345 " addl $-64, %0\n"
346 " addl $64, %4\n"
347 " addl $64, %3\n"
348 " cmpl $63, %0\n"
349 " ja 0b\n"
350 " sfence \n"
351 "5: movl %0, %%eax\n"
352 " shrl $2, %0\n"
353 " andl $3, %%eax\n"
354 " cld\n"
355 "6: rep; movsl\n"
356 " movl %%eax,%0\n"
357 "7: rep; movsb\n"
358 "8:\n"
359 ".section .fixup,\"ax\"\n"
360 "9: lea 0(%%eax,%0,4),%0\n"
361 "16: pushl %0\n"
362 " pushl %%eax\n"
363 " xorl %%eax,%%eax\n"
364 " rep; stosb\n"
365 " popl %%eax\n"
366 " popl %0\n"
367 " jmp 8b\n"
368 ".previous\n"
369 _ASM_EXTABLE(0b,16b)
370 _ASM_EXTABLE(1b,16b)
371 _ASM_EXTABLE(2b,16b)
372 _ASM_EXTABLE(21b,16b)
373 _ASM_EXTABLE(3b,16b)
374 _ASM_EXTABLE(31b,16b)
375 _ASM_EXTABLE(4b,16b)
376 _ASM_EXTABLE(41b,16b)
377 _ASM_EXTABLE(10b,16b)
378 _ASM_EXTABLE(51b,16b)
379 _ASM_EXTABLE(11b,16b)
380 _ASM_EXTABLE(61b,16b)
381 _ASM_EXTABLE(12b,16b)
382 _ASM_EXTABLE(71b,16b)
383 _ASM_EXTABLE(13b,16b)
384 _ASM_EXTABLE(81b,16b)
385 _ASM_EXTABLE(14b,16b)
386 _ASM_EXTABLE(91b,16b)
387 _ASM_EXTABLE(6b,9b)
388 _ASM_EXTABLE(7b,16b)
389 : "=&c"(size), "=&D" (d0), "=&S" (d1)
390 : "1"(to), "2"(from), "0"(size)
391 : "eax", "edx", "memory");
392 return size;
393}
394
395static unsigned long __copy_user_intel_nocache(void *to,
396 const void __user *from, unsigned long size)
397{
398 int d0, d1;
399
400 __asm__ __volatile__(
401 " .align 2,0x90\n"
402 "0: movl 32(%4), %%eax\n"
403 " cmpl $67, %0\n"
404 " jbe 2f\n"
405 "1: movl 64(%4), %%eax\n"
406 " .align 2,0x90\n"
407 "2: movl 0(%4), %%eax\n"
408 "21: movl 4(%4), %%edx\n"
409 " movnti %%eax, 0(%3)\n"
410 " movnti %%edx, 4(%3)\n"
411 "3: movl 8(%4), %%eax\n"
412 "31: movl 12(%4),%%edx\n"
413 " movnti %%eax, 8(%3)\n"
414 " movnti %%edx, 12(%3)\n"
415 "4: movl 16(%4), %%eax\n"
416 "41: movl 20(%4), %%edx\n"
417 " movnti %%eax, 16(%3)\n"
418 " movnti %%edx, 20(%3)\n"
419 "10: movl 24(%4), %%eax\n"
420 "51: movl 28(%4), %%edx\n"
421 " movnti %%eax, 24(%3)\n"
422 " movnti %%edx, 28(%3)\n"
423 "11: movl 32(%4), %%eax\n"
424 "61: movl 36(%4), %%edx\n"
425 " movnti %%eax, 32(%3)\n"
426 " movnti %%edx, 36(%3)\n"
427 "12: movl 40(%4), %%eax\n"
428 "71: movl 44(%4), %%edx\n"
429 " movnti %%eax, 40(%3)\n"
430 " movnti %%edx, 44(%3)\n"
431 "13: movl 48(%4), %%eax\n"
432 "81: movl 52(%4), %%edx\n"
433 " movnti %%eax, 48(%3)\n"
434 " movnti %%edx, 52(%3)\n"
435 "14: movl 56(%4), %%eax\n"
436 "91: movl 60(%4), %%edx\n"
437 " movnti %%eax, 56(%3)\n"
438 " movnti %%edx, 60(%3)\n"
439 " addl $-64, %0\n"
440 " addl $64, %4\n"
441 " addl $64, %3\n"
442 " cmpl $63, %0\n"
443 " ja 0b\n"
444 " sfence \n"
445 "5: movl %0, %%eax\n"
446 " shrl $2, %0\n"
447 " andl $3, %%eax\n"
448 " cld\n"
449 "6: rep; movsl\n"
450 " movl %%eax,%0\n"
451 "7: rep; movsb\n"
452 "8:\n"
453 ".section .fixup,\"ax\"\n"
454 "9: lea 0(%%eax,%0,4),%0\n"
455 "16: jmp 8b\n"
456 ".previous\n"
457 _ASM_EXTABLE(0b,16b)
458 _ASM_EXTABLE(1b,16b)
459 _ASM_EXTABLE(2b,16b)
460 _ASM_EXTABLE(21b,16b)
461 _ASM_EXTABLE(3b,16b)
462 _ASM_EXTABLE(31b,16b)
463 _ASM_EXTABLE(4b,16b)
464 _ASM_EXTABLE(41b,16b)
465 _ASM_EXTABLE(10b,16b)
466 _ASM_EXTABLE(51b,16b)
467 _ASM_EXTABLE(11b,16b)
468 _ASM_EXTABLE(61b,16b)
469 _ASM_EXTABLE(12b,16b)
470 _ASM_EXTABLE(71b,16b)
471 _ASM_EXTABLE(13b,16b)
472 _ASM_EXTABLE(81b,16b)
473 _ASM_EXTABLE(14b,16b)
474 _ASM_EXTABLE(91b,16b)
475 _ASM_EXTABLE(6b,9b)
476 _ASM_EXTABLE(7b,16b)
477 : "=&c"(size), "=&D" (d0), "=&S" (d1)
478 : "1"(to), "2"(from), "0"(size)
479 : "eax", "edx", "memory");
480 return size;
481}
482
483#else
484
485/*
486 * Leave these declared but undefined. They should not be any references to
487 * them
488 */
489unsigned long __copy_user_zeroing_intel(void *to, const void __user *from,
490 unsigned long size);
491unsigned long __copy_user_intel(void __user *to, const void *from,
492 unsigned long size);
493unsigned long __copy_user_zeroing_intel_nocache(void *to,
494 const void __user *from, unsigned long size);
495#endif /* CONFIG_X86_INTEL_USERCOPY */
496
497/* Generic arbitrary sized copy. */
498#define __copy_user(to, from, size) \
499do { \
500 int __d0, __d1, __d2; \
501 __asm__ __volatile__( \
502 " cmp $7,%0\n" \
503 " jbe 1f\n" \
504 " movl %1,%0\n" \
505 " negl %0\n" \
506 " andl $7,%0\n" \
507 " subl %0,%3\n" \
508 "4: rep; movsb\n" \
509 " movl %3,%0\n" \
510 " shrl $2,%0\n" \
511 " andl $3,%3\n" \
512 " .align 2,0x90\n" \
513 "0: rep; movsl\n" \
514 " movl %3,%0\n" \
515 "1: rep; movsb\n" \
516 "2:\n" \
517 ".section .fixup,\"ax\"\n" \
518 "5: addl %3,%0\n" \
519 " jmp 2b\n" \
520 "3: lea 0(%3,%0,4),%0\n" \
521 " jmp 2b\n" \
522 ".previous\n" \
523 _ASM_EXTABLE(4b,5b) \
524 _ASM_EXTABLE(0b,3b) \
525 _ASM_EXTABLE(1b,2b) \
526 : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) \
527 : "3"(size), "0"(size), "1"(to), "2"(from) \
528 : "memory"); \
529} while (0)
530
531#define __copy_user_zeroing(to, from, size) \
532do { \
533 int __d0, __d1, __d2; \
534 __asm__ __volatile__( \
535 " cmp $7,%0\n" \
536 " jbe 1f\n" \
537 " movl %1,%0\n" \
538 " negl %0\n" \
539 " andl $7,%0\n" \
540 " subl %0,%3\n" \
541 "4: rep; movsb\n" \
542 " movl %3,%0\n" \
543 " shrl $2,%0\n" \
544 " andl $3,%3\n" \
545 " .align 2,0x90\n" \
546 "0: rep; movsl\n" \
547 " movl %3,%0\n" \
548 "1: rep; movsb\n" \
549 "2:\n" \
550 ".section .fixup,\"ax\"\n" \
551 "5: addl %3,%0\n" \
552 " jmp 6f\n" \
553 "3: lea 0(%3,%0,4),%0\n" \
554 "6: pushl %0\n" \
555 " pushl %%eax\n" \
556 " xorl %%eax,%%eax\n" \
557 " rep; stosb\n" \
558 " popl %%eax\n" \
559 " popl %0\n" \
560 " jmp 2b\n" \
561 ".previous\n" \
562 _ASM_EXTABLE(4b,5b) \
563 _ASM_EXTABLE(0b,3b) \
564 _ASM_EXTABLE(1b,6b) \
565 : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) \
566 : "3"(size), "0"(size), "1"(to), "2"(from) \
567 : "memory"); \
568} while (0)
569
570unsigned long __copy_to_user_ll(void __user *to, const void *from,
571 unsigned long n)
572{
573 stac();
574 if (movsl_is_ok(to, from, n))
575 __copy_user(to, from, n);
576 else
577 n = __copy_user_intel(to, from, n);
578 clac();
579 return n;
580}
581EXPORT_SYMBOL(__copy_to_user_ll);
582
583unsigned long __copy_from_user_ll(void *to, const void __user *from,
584 unsigned long n)
585{
586 stac();
587 if (movsl_is_ok(to, from, n))
588 __copy_user_zeroing(to, from, n);
589 else
590 n = __copy_user_zeroing_intel(to, from, n);
591 clac();
592 return n;
593}
594EXPORT_SYMBOL(__copy_from_user_ll);
595
596unsigned long __copy_from_user_ll_nozero(void *to, const void __user *from,
597 unsigned long n)
598{
599 stac();
600 if (movsl_is_ok(to, from, n))
601 __copy_user(to, from, n);
602 else
603 n = __copy_user_intel((void __user *)to,
604 (const void *)from, n);
605 clac();
606 return n;
607}
608EXPORT_SYMBOL(__copy_from_user_ll_nozero);
609
610unsigned long __copy_from_user_ll_nocache(void *to, const void __user *from,
611 unsigned long n)
612{
613 stac();
614#ifdef CONFIG_X86_INTEL_USERCOPY
615 if (n > 64 && cpu_has_xmm2)
616 n = __copy_user_zeroing_intel_nocache(to, from, n);
617 else
618 __copy_user_zeroing(to, from, n);
619#else
620 __copy_user_zeroing(to, from, n);
621#endif
622 clac();
623 return n;
624}
625EXPORT_SYMBOL(__copy_from_user_ll_nocache);
626
627unsigned long __copy_from_user_ll_nocache_nozero(void *to, const void __user *from,
628 unsigned long n)
629{
630 stac();
631#ifdef CONFIG_X86_INTEL_USERCOPY
632 if (n > 64 && cpu_has_xmm2)
633 n = __copy_user_intel_nocache(to, from, n);
634 else
635 __copy_user(to, from, n);
636#else
637 __copy_user(to, from, n);
638#endif
639 clac();
640 return n;
641}
642EXPORT_SYMBOL(__copy_from_user_ll_nocache_nozero);
643
644/**
645 * copy_to_user: - Copy a block of data into user space.
646 * @to: Destination address, in user space.
647 * @from: Source address, in kernel space.
648 * @n: Number of bytes to copy.
649 *
650 * Context: User context only. This function may sleep.
651 *
652 * Copy data from kernel space to user space.
653 *
654 * Returns number of bytes that could not be copied.
655 * On success, this will be zero.
656 */
657unsigned long _copy_to_user(void __user *to, const void *from, unsigned n)
658{
659 if (access_ok(VERIFY_WRITE, to, n))
660 n = __copy_to_user(to, from, n);
661 return n;
662}
663EXPORT_SYMBOL(_copy_to_user);
664
665/**
666 * copy_from_user: - Copy a block of data from user space.
667 * @to: Destination address, in kernel space.
668 * @from: Source address, in user space.
669 * @n: Number of bytes to copy.
670 *
671 * Context: User context only. This function may sleep.
672 *
673 * Copy data from user space to kernel space.
674 *
675 * Returns number of bytes that could not be copied.
676 * On success, this will be zero.
677 *
678 * If some data could not be copied, this function will pad the copied
679 * data to the requested size using zero bytes.
680 */
681unsigned long _copy_from_user(void *to, const void __user *from, unsigned n)
682{
683 if (access_ok(VERIFY_READ, from, n))
684 n = __copy_from_user(to, from, n);
685 else
686 memset(to, 0, n);
687 return n;
688}
689EXPORT_SYMBOL(_copy_from_user);
1// SPDX-License-Identifier: GPL-2.0
2/*
3 * User address space access functions.
4 * The non inlined parts of asm-i386/uaccess.h are here.
5 *
6 * Copyright 1997 Andi Kleen <ak@muc.de>
7 * Copyright 1997 Linus Torvalds
8 */
9#include <linux/export.h>
10#include <linux/uaccess.h>
11#include <asm/mmx.h>
12#include <asm/asm.h>
13
14#ifdef CONFIG_X86_INTEL_USERCOPY
15/*
16 * Alignment at which movsl is preferred for bulk memory copies.
17 */
18struct movsl_mask movsl_mask __read_mostly;
19#endif
20
21static inline int __movsl_is_ok(unsigned long a1, unsigned long a2, unsigned long n)
22{
23#ifdef CONFIG_X86_INTEL_USERCOPY
24 if (n >= 64 && ((a1 ^ a2) & movsl_mask.mask))
25 return 0;
26#endif
27 return 1;
28}
29#define movsl_is_ok(a1, a2, n) \
30 __movsl_is_ok((unsigned long)(a1), (unsigned long)(a2), (n))
31
32/*
33 * Zero Userspace
34 */
35
36#define __do_clear_user(addr,size) \
37do { \
38 int __d0; \
39 might_fault(); \
40 __asm__ __volatile__( \
41 ASM_STAC "\n" \
42 "0: rep; stosl\n" \
43 " movl %2,%0\n" \
44 "1: rep; stosb\n" \
45 "2: " ASM_CLAC "\n" \
46 ".section .fixup,\"ax\"\n" \
47 "3: lea 0(%2,%0,4),%0\n" \
48 " jmp 2b\n" \
49 ".previous\n" \
50 _ASM_EXTABLE_UA(0b, 3b) \
51 _ASM_EXTABLE_UA(1b, 2b) \
52 : "=&c"(size), "=&D" (__d0) \
53 : "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0)); \
54} while (0)
55
56/**
57 * clear_user - Zero a block of memory in user space.
58 * @to: Destination address, in user space.
59 * @n: Number of bytes to zero.
60 *
61 * Zero a block of memory in user space.
62 *
63 * Return: number of bytes that could not be cleared.
64 * On success, this will be zero.
65 */
66unsigned long
67clear_user(void __user *to, unsigned long n)
68{
69 might_fault();
70 if (access_ok(to, n))
71 __do_clear_user(to, n);
72 return n;
73}
74EXPORT_SYMBOL(clear_user);
75
76/**
77 * __clear_user - Zero a block of memory in user space, with less checking.
78 * @to: Destination address, in user space.
79 * @n: Number of bytes to zero.
80 *
81 * Zero a block of memory in user space. Caller must check
82 * the specified block with access_ok() before calling this function.
83 *
84 * Return: number of bytes that could not be cleared.
85 * On success, this will be zero.
86 */
87unsigned long
88__clear_user(void __user *to, unsigned long n)
89{
90 __do_clear_user(to, n);
91 return n;
92}
93EXPORT_SYMBOL(__clear_user);
94
95#ifdef CONFIG_X86_INTEL_USERCOPY
96static unsigned long
97__copy_user_intel(void __user *to, const void *from, unsigned long size)
98{
99 int d0, d1;
100 __asm__ __volatile__(
101 " .align 2,0x90\n"
102 "1: movl 32(%4), %%eax\n"
103 " cmpl $67, %0\n"
104 " jbe 3f\n"
105 "2: movl 64(%4), %%eax\n"
106 " .align 2,0x90\n"
107 "3: movl 0(%4), %%eax\n"
108 "4: movl 4(%4), %%edx\n"
109 "5: movl %%eax, 0(%3)\n"
110 "6: movl %%edx, 4(%3)\n"
111 "7: movl 8(%4), %%eax\n"
112 "8: movl 12(%4),%%edx\n"
113 "9: movl %%eax, 8(%3)\n"
114 "10: movl %%edx, 12(%3)\n"
115 "11: movl 16(%4), %%eax\n"
116 "12: movl 20(%4), %%edx\n"
117 "13: movl %%eax, 16(%3)\n"
118 "14: movl %%edx, 20(%3)\n"
119 "15: movl 24(%4), %%eax\n"
120 "16: movl 28(%4), %%edx\n"
121 "17: movl %%eax, 24(%3)\n"
122 "18: movl %%edx, 28(%3)\n"
123 "19: movl 32(%4), %%eax\n"
124 "20: movl 36(%4), %%edx\n"
125 "21: movl %%eax, 32(%3)\n"
126 "22: movl %%edx, 36(%3)\n"
127 "23: movl 40(%4), %%eax\n"
128 "24: movl 44(%4), %%edx\n"
129 "25: movl %%eax, 40(%3)\n"
130 "26: movl %%edx, 44(%3)\n"
131 "27: movl 48(%4), %%eax\n"
132 "28: movl 52(%4), %%edx\n"
133 "29: movl %%eax, 48(%3)\n"
134 "30: movl %%edx, 52(%3)\n"
135 "31: movl 56(%4), %%eax\n"
136 "32: movl 60(%4), %%edx\n"
137 "33: movl %%eax, 56(%3)\n"
138 "34: movl %%edx, 60(%3)\n"
139 " addl $-64, %0\n"
140 " addl $64, %4\n"
141 " addl $64, %3\n"
142 " cmpl $63, %0\n"
143 " ja 1b\n"
144 "35: movl %0, %%eax\n"
145 " shrl $2, %0\n"
146 " andl $3, %%eax\n"
147 " cld\n"
148 "99: rep; movsl\n"
149 "36: movl %%eax, %0\n"
150 "37: rep; movsb\n"
151 "100:\n"
152 ".section .fixup,\"ax\"\n"
153 "101: lea 0(%%eax,%0,4),%0\n"
154 " jmp 100b\n"
155 ".previous\n"
156 _ASM_EXTABLE_UA(1b, 100b)
157 _ASM_EXTABLE_UA(2b, 100b)
158 _ASM_EXTABLE_UA(3b, 100b)
159 _ASM_EXTABLE_UA(4b, 100b)
160 _ASM_EXTABLE_UA(5b, 100b)
161 _ASM_EXTABLE_UA(6b, 100b)
162 _ASM_EXTABLE_UA(7b, 100b)
163 _ASM_EXTABLE_UA(8b, 100b)
164 _ASM_EXTABLE_UA(9b, 100b)
165 _ASM_EXTABLE_UA(10b, 100b)
166 _ASM_EXTABLE_UA(11b, 100b)
167 _ASM_EXTABLE_UA(12b, 100b)
168 _ASM_EXTABLE_UA(13b, 100b)
169 _ASM_EXTABLE_UA(14b, 100b)
170 _ASM_EXTABLE_UA(15b, 100b)
171 _ASM_EXTABLE_UA(16b, 100b)
172 _ASM_EXTABLE_UA(17b, 100b)
173 _ASM_EXTABLE_UA(18b, 100b)
174 _ASM_EXTABLE_UA(19b, 100b)
175 _ASM_EXTABLE_UA(20b, 100b)
176 _ASM_EXTABLE_UA(21b, 100b)
177 _ASM_EXTABLE_UA(22b, 100b)
178 _ASM_EXTABLE_UA(23b, 100b)
179 _ASM_EXTABLE_UA(24b, 100b)
180 _ASM_EXTABLE_UA(25b, 100b)
181 _ASM_EXTABLE_UA(26b, 100b)
182 _ASM_EXTABLE_UA(27b, 100b)
183 _ASM_EXTABLE_UA(28b, 100b)
184 _ASM_EXTABLE_UA(29b, 100b)
185 _ASM_EXTABLE_UA(30b, 100b)
186 _ASM_EXTABLE_UA(31b, 100b)
187 _ASM_EXTABLE_UA(32b, 100b)
188 _ASM_EXTABLE_UA(33b, 100b)
189 _ASM_EXTABLE_UA(34b, 100b)
190 _ASM_EXTABLE_UA(35b, 100b)
191 _ASM_EXTABLE_UA(36b, 100b)
192 _ASM_EXTABLE_UA(37b, 100b)
193 _ASM_EXTABLE_UA(99b, 101b)
194 : "=&c"(size), "=&D" (d0), "=&S" (d1)
195 : "1"(to), "2"(from), "0"(size)
196 : "eax", "edx", "memory");
197 return size;
198}
199
200static unsigned long __copy_user_intel_nocache(void *to,
201 const void __user *from, unsigned long size)
202{
203 int d0, d1;
204
205 __asm__ __volatile__(
206 " .align 2,0x90\n"
207 "0: movl 32(%4), %%eax\n"
208 " cmpl $67, %0\n"
209 " jbe 2f\n"
210 "1: movl 64(%4), %%eax\n"
211 " .align 2,0x90\n"
212 "2: movl 0(%4), %%eax\n"
213 "21: movl 4(%4), %%edx\n"
214 " movnti %%eax, 0(%3)\n"
215 " movnti %%edx, 4(%3)\n"
216 "3: movl 8(%4), %%eax\n"
217 "31: movl 12(%4),%%edx\n"
218 " movnti %%eax, 8(%3)\n"
219 " movnti %%edx, 12(%3)\n"
220 "4: movl 16(%4), %%eax\n"
221 "41: movl 20(%4), %%edx\n"
222 " movnti %%eax, 16(%3)\n"
223 " movnti %%edx, 20(%3)\n"
224 "10: movl 24(%4), %%eax\n"
225 "51: movl 28(%4), %%edx\n"
226 " movnti %%eax, 24(%3)\n"
227 " movnti %%edx, 28(%3)\n"
228 "11: movl 32(%4), %%eax\n"
229 "61: movl 36(%4), %%edx\n"
230 " movnti %%eax, 32(%3)\n"
231 " movnti %%edx, 36(%3)\n"
232 "12: movl 40(%4), %%eax\n"
233 "71: movl 44(%4), %%edx\n"
234 " movnti %%eax, 40(%3)\n"
235 " movnti %%edx, 44(%3)\n"
236 "13: movl 48(%4), %%eax\n"
237 "81: movl 52(%4), %%edx\n"
238 " movnti %%eax, 48(%3)\n"
239 " movnti %%edx, 52(%3)\n"
240 "14: movl 56(%4), %%eax\n"
241 "91: movl 60(%4), %%edx\n"
242 " movnti %%eax, 56(%3)\n"
243 " movnti %%edx, 60(%3)\n"
244 " addl $-64, %0\n"
245 " addl $64, %4\n"
246 " addl $64, %3\n"
247 " cmpl $63, %0\n"
248 " ja 0b\n"
249 " sfence \n"
250 "5: movl %0, %%eax\n"
251 " shrl $2, %0\n"
252 " andl $3, %%eax\n"
253 " cld\n"
254 "6: rep; movsl\n"
255 " movl %%eax,%0\n"
256 "7: rep; movsb\n"
257 "8:\n"
258 ".section .fixup,\"ax\"\n"
259 "9: lea 0(%%eax,%0,4),%0\n"
260 "16: jmp 8b\n"
261 ".previous\n"
262 _ASM_EXTABLE_UA(0b, 16b)
263 _ASM_EXTABLE_UA(1b, 16b)
264 _ASM_EXTABLE_UA(2b, 16b)
265 _ASM_EXTABLE_UA(21b, 16b)
266 _ASM_EXTABLE_UA(3b, 16b)
267 _ASM_EXTABLE_UA(31b, 16b)
268 _ASM_EXTABLE_UA(4b, 16b)
269 _ASM_EXTABLE_UA(41b, 16b)
270 _ASM_EXTABLE_UA(10b, 16b)
271 _ASM_EXTABLE_UA(51b, 16b)
272 _ASM_EXTABLE_UA(11b, 16b)
273 _ASM_EXTABLE_UA(61b, 16b)
274 _ASM_EXTABLE_UA(12b, 16b)
275 _ASM_EXTABLE_UA(71b, 16b)
276 _ASM_EXTABLE_UA(13b, 16b)
277 _ASM_EXTABLE_UA(81b, 16b)
278 _ASM_EXTABLE_UA(14b, 16b)
279 _ASM_EXTABLE_UA(91b, 16b)
280 _ASM_EXTABLE_UA(6b, 9b)
281 _ASM_EXTABLE_UA(7b, 16b)
282 : "=&c"(size), "=&D" (d0), "=&S" (d1)
283 : "1"(to), "2"(from), "0"(size)
284 : "eax", "edx", "memory");
285 return size;
286}
287
288#else
289
290/*
291 * Leave these declared but undefined. They should not be any references to
292 * them
293 */
294unsigned long __copy_user_intel(void __user *to, const void *from,
295 unsigned long size);
296#endif /* CONFIG_X86_INTEL_USERCOPY */
297
298/* Generic arbitrary sized copy. */
299#define __copy_user(to, from, size) \
300do { \
301 int __d0, __d1, __d2; \
302 __asm__ __volatile__( \
303 " cmp $7,%0\n" \
304 " jbe 1f\n" \
305 " movl %1,%0\n" \
306 " negl %0\n" \
307 " andl $7,%0\n" \
308 " subl %0,%3\n" \
309 "4: rep; movsb\n" \
310 " movl %3,%0\n" \
311 " shrl $2,%0\n" \
312 " andl $3,%3\n" \
313 " .align 2,0x90\n" \
314 "0: rep; movsl\n" \
315 " movl %3,%0\n" \
316 "1: rep; movsb\n" \
317 "2:\n" \
318 ".section .fixup,\"ax\"\n" \
319 "5: addl %3,%0\n" \
320 " jmp 2b\n" \
321 "3: lea 0(%3,%0,4),%0\n" \
322 " jmp 2b\n" \
323 ".previous\n" \
324 _ASM_EXTABLE_UA(4b, 5b) \
325 _ASM_EXTABLE_UA(0b, 3b) \
326 _ASM_EXTABLE_UA(1b, 2b) \
327 : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) \
328 : "3"(size), "0"(size), "1"(to), "2"(from) \
329 : "memory"); \
330} while (0)
331
332unsigned long __copy_user_ll(void *to, const void *from, unsigned long n)
333{
334 __uaccess_begin_nospec();
335 if (movsl_is_ok(to, from, n))
336 __copy_user(to, from, n);
337 else
338 n = __copy_user_intel(to, from, n);
339 __uaccess_end();
340 return n;
341}
342EXPORT_SYMBOL(__copy_user_ll);
343
344unsigned long __copy_from_user_ll_nocache_nozero(void *to, const void __user *from,
345 unsigned long n)
346{
347 __uaccess_begin_nospec();
348#ifdef CONFIG_X86_INTEL_USERCOPY
349 if (n > 64 && static_cpu_has(X86_FEATURE_XMM2))
350 n = __copy_user_intel_nocache(to, from, n);
351 else
352 __copy_user(to, from, n);
353#else
354 __copy_user(to, from, n);
355#endif
356 __uaccess_end();
357 return n;
358}
359EXPORT_SYMBOL(__copy_from_user_ll_nocache_nozero);