Loading...
Note: File does not exist in v6.8.
1/*
2 * Copyright 2008 Vitaly Mayatskikh <vmayatsk@redhat.com>
3 * Copyright 2002 Andi Kleen, SuSE Labs.
4 * Subject to the GNU Public License v2.
5 *
6 * Functions to copy from and to user space.
7 */
8
9#include <linux/linkage.h>
10#include <asm/dwarf2.h>
11
12#define FIX_ALIGNMENT 1
13
14#include <asm/current.h>
15#include <asm/asm-offsets.h>
16#include <asm/thread_info.h>
17#include <asm/asm.h>
18
19 .macro ALIGN_DESTINATION
20#ifdef FIX_ALIGNMENT
21 /* check for bad alignment of destination */
22 movl %edi,%ecx
23 andl $7,%ecx
24 jz 102f /* already aligned */
25 subl $8,%ecx
26 negl %ecx
27 subl %ecx,%edx
28100: movb (%rsi),%al
29101: movb %al,(%rdi)
30 incq %rsi
31 incq %rdi
32 decl %ecx
33 jnz 100b
34102:
35 .section .fixup,"ax"
36103: addl %ecx,%edx /* ecx is zerorest also */
37 jmp copy_user_handle_tail
38 .previous
39
40 _ASM_EXTABLE(100b,103b)
41 _ASM_EXTABLE(101b,103b)
42#endif
43 .endm
44
45/*
46 * copy_user_nocache - Uncached memory copy with exception handling
47 * This will force destination/source out of cache for more performance.
48 */
49ENTRY(__copy_user_nocache)
50 CFI_STARTPROC
51 cmpl $8,%edx
52 jb 20f /* less then 8 bytes, go to byte copy loop */
53 ALIGN_DESTINATION
54 movl %edx,%ecx
55 andl $63,%edx
56 shrl $6,%ecx
57 jz 17f
581: movq (%rsi),%r8
592: movq 1*8(%rsi),%r9
603: movq 2*8(%rsi),%r10
614: movq 3*8(%rsi),%r11
625: movnti %r8,(%rdi)
636: movnti %r9,1*8(%rdi)
647: movnti %r10,2*8(%rdi)
658: movnti %r11,3*8(%rdi)
669: movq 4*8(%rsi),%r8
6710: movq 5*8(%rsi),%r9
6811: movq 6*8(%rsi),%r10
6912: movq 7*8(%rsi),%r11
7013: movnti %r8,4*8(%rdi)
7114: movnti %r9,5*8(%rdi)
7215: movnti %r10,6*8(%rdi)
7316: movnti %r11,7*8(%rdi)
74 leaq 64(%rsi),%rsi
75 leaq 64(%rdi),%rdi
76 decl %ecx
77 jnz 1b
7817: movl %edx,%ecx
79 andl $7,%edx
80 shrl $3,%ecx
81 jz 20f
8218: movq (%rsi),%r8
8319: movnti %r8,(%rdi)
84 leaq 8(%rsi),%rsi
85 leaq 8(%rdi),%rdi
86 decl %ecx
87 jnz 18b
8820: andl %edx,%edx
89 jz 23f
90 movl %edx,%ecx
9121: movb (%rsi),%al
9222: movb %al,(%rdi)
93 incq %rsi
94 incq %rdi
95 decl %ecx
96 jnz 21b
9723: xorl %eax,%eax
98 sfence
99 ret
100
101 .section .fixup,"ax"
10230: shll $6,%ecx
103 addl %ecx,%edx
104 jmp 60f
10540: lea (%rdx,%rcx,8),%rdx
106 jmp 60f
10750: movl %ecx,%edx
10860: sfence
109 jmp copy_user_handle_tail
110 .previous
111
112 _ASM_EXTABLE(1b,30b)
113 _ASM_EXTABLE(2b,30b)
114 _ASM_EXTABLE(3b,30b)
115 _ASM_EXTABLE(4b,30b)
116 _ASM_EXTABLE(5b,30b)
117 _ASM_EXTABLE(6b,30b)
118 _ASM_EXTABLE(7b,30b)
119 _ASM_EXTABLE(8b,30b)
120 _ASM_EXTABLE(9b,30b)
121 _ASM_EXTABLE(10b,30b)
122 _ASM_EXTABLE(11b,30b)
123 _ASM_EXTABLE(12b,30b)
124 _ASM_EXTABLE(13b,30b)
125 _ASM_EXTABLE(14b,30b)
126 _ASM_EXTABLE(15b,30b)
127 _ASM_EXTABLE(16b,30b)
128 _ASM_EXTABLE(18b,40b)
129 _ASM_EXTABLE(19b,40b)
130 _ASM_EXTABLE(21b,50b)
131 _ASM_EXTABLE(22b,50b)
132 CFI_ENDPROC
133ENDPROC(__copy_user_nocache)