Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * __get_user functions.
4 *
5 * (C) Copyright 1998 Linus Torvalds
6 * (C) Copyright 2005 Andi Kleen
7 * (C) Copyright 2008 Glauber Costa
8 *
9 * These functions have a non-standard call interface
10 * to make them more efficient, especially as they
11 * return an error value in addition to the "real"
12 * return value.
13 */
14
15/*
16 * __get_user_X
17 *
18 * Inputs: %[r|e]ax contains the address.
19 *
20 * Outputs: %[r|e]ax is error code (0 or -EFAULT)
21 * %[r|e]dx contains zero-extended value
22 * %ecx contains the high half for 32-bit __get_user_8
23 *
24 *
25 * These functions should not modify any other registers,
26 * as they get called from within inline assembly.
27 */
28
29#include <linux/export.h>
30#include <linux/linkage.h>
31#include <asm/page_types.h>
32#include <asm/errno.h>
33#include <asm/asm-offsets.h>
34#include <asm/thread_info.h>
35#include <asm/asm.h>
36#include <asm/smap.h>
37
38#define ASM_BARRIER_NOSPEC ALTERNATIVE "", "lfence", X86_FEATURE_LFENCE_RDTSC
39
40.macro check_range size:req
41.if IS_ENABLED(CONFIG_X86_64)
42 mov %rax, %rdx
43 sar $63, %rdx
44 or %rdx, %rax
45.else
46 cmp $TASK_SIZE_MAX-\size+1, %eax
47 jae .Lbad_get_user
48 sbb %edx, %edx /* array_index_mask_nospec() */
49 and %edx, %eax
50.endif
51.endm
52
53 .text
54SYM_FUNC_START(__get_user_1)
55 check_range size=1
56 ASM_STAC
571: movzbl (%_ASM_AX),%edx
58 xor %eax,%eax
59 ASM_CLAC
60 RET
61SYM_FUNC_END(__get_user_1)
62EXPORT_SYMBOL(__get_user_1)
63
64SYM_FUNC_START(__get_user_2)
65 check_range size=2
66 ASM_STAC
672: movzwl (%_ASM_AX),%edx
68 xor %eax,%eax
69 ASM_CLAC
70 RET
71SYM_FUNC_END(__get_user_2)
72EXPORT_SYMBOL(__get_user_2)
73
74SYM_FUNC_START(__get_user_4)
75 check_range size=4
76 ASM_STAC
773: movl (%_ASM_AX),%edx
78 xor %eax,%eax
79 ASM_CLAC
80 RET
81SYM_FUNC_END(__get_user_4)
82EXPORT_SYMBOL(__get_user_4)
83
84SYM_FUNC_START(__get_user_8)
85 check_range size=8
86 ASM_STAC
87#ifdef CONFIG_X86_64
884: movq (%_ASM_AX),%rdx
89#else
904: movl (%_ASM_AX),%edx
915: movl 4(%_ASM_AX),%ecx
92#endif
93 xor %eax,%eax
94 ASM_CLAC
95 RET
96SYM_FUNC_END(__get_user_8)
97EXPORT_SYMBOL(__get_user_8)
98
99/* .. and the same for __get_user, just without the range checks */
100SYM_FUNC_START(__get_user_nocheck_1)
101 ASM_STAC
102 ASM_BARRIER_NOSPEC
1036: movzbl (%_ASM_AX),%edx
104 xor %eax,%eax
105 ASM_CLAC
106 RET
107SYM_FUNC_END(__get_user_nocheck_1)
108EXPORT_SYMBOL(__get_user_nocheck_1)
109
110SYM_FUNC_START(__get_user_nocheck_2)
111 ASM_STAC
112 ASM_BARRIER_NOSPEC
1137: movzwl (%_ASM_AX),%edx
114 xor %eax,%eax
115 ASM_CLAC
116 RET
117SYM_FUNC_END(__get_user_nocheck_2)
118EXPORT_SYMBOL(__get_user_nocheck_2)
119
120SYM_FUNC_START(__get_user_nocheck_4)
121 ASM_STAC
122 ASM_BARRIER_NOSPEC
1238: movl (%_ASM_AX),%edx
124 xor %eax,%eax
125 ASM_CLAC
126 RET
127SYM_FUNC_END(__get_user_nocheck_4)
128EXPORT_SYMBOL(__get_user_nocheck_4)
129
130SYM_FUNC_START(__get_user_nocheck_8)
131 ASM_STAC
132 ASM_BARRIER_NOSPEC
133#ifdef CONFIG_X86_64
1349: movq (%_ASM_AX),%rdx
135#else
1369: movl (%_ASM_AX),%edx
13710: movl 4(%_ASM_AX),%ecx
138#endif
139 xor %eax,%eax
140 ASM_CLAC
141 RET
142SYM_FUNC_END(__get_user_nocheck_8)
143EXPORT_SYMBOL(__get_user_nocheck_8)
144
145
146SYM_CODE_START_LOCAL(__get_user_handle_exception)
147 ASM_CLAC
148.Lbad_get_user:
149 xor %edx,%edx
150 mov $(-EFAULT),%_ASM_AX
151 RET
152SYM_CODE_END(__get_user_handle_exception)
153
154#ifdef CONFIG_X86_32
155SYM_CODE_START_LOCAL(__get_user_8_handle_exception)
156 ASM_CLAC
157bad_get_user_8:
158 xor %edx,%edx
159 xor %ecx,%ecx
160 mov $(-EFAULT),%_ASM_AX
161 RET
162SYM_CODE_END(__get_user_8_handle_exception)
163#endif
164
165/* get_user */
166 _ASM_EXTABLE_UA(1b, __get_user_handle_exception)
167 _ASM_EXTABLE_UA(2b, __get_user_handle_exception)
168 _ASM_EXTABLE_UA(3b, __get_user_handle_exception)
169#ifdef CONFIG_X86_64
170 _ASM_EXTABLE_UA(4b, __get_user_handle_exception)
171#else
172 _ASM_EXTABLE_UA(4b, __get_user_8_handle_exception)
173 _ASM_EXTABLE_UA(5b, __get_user_8_handle_exception)
174#endif
175
176/* __get_user */
177 _ASM_EXTABLE_UA(6b, __get_user_handle_exception)
178 _ASM_EXTABLE_UA(7b, __get_user_handle_exception)
179 _ASM_EXTABLE_UA(8b, __get_user_handle_exception)
180#ifdef CONFIG_X86_64
181 _ASM_EXTABLE_UA(9b, __get_user_handle_exception)
182#else
183 _ASM_EXTABLE_UA(9b, __get_user_8_handle_exception)
184 _ASM_EXTABLE_UA(10b, __get_user_8_handle_exception)
185#endif
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * __get_user functions.
4 *
5 * (C) Copyright 1998 Linus Torvalds
6 * (C) Copyright 2005 Andi Kleen
7 * (C) Copyright 2008 Glauber Costa
8 *
9 * These functions have a non-standard call interface
10 * to make them more efficient, especially as they
11 * return an error value in addition to the "real"
12 * return value.
13 */
14
15/*
16 * __get_user_X
17 *
18 * Inputs: %[r|e]ax contains the address.
19 *
20 * Outputs: %[r|e]ax is error code (0 or -EFAULT)
21 * %[r|e]dx contains zero-extended value
22 * %ecx contains the high half for 32-bit __get_user_8
23 *
24 *
25 * These functions should not modify any other registers,
26 * as they get called from within inline assembly.
27 */
28
29#include <linux/linkage.h>
30#include <asm/page_types.h>
31#include <asm/errno.h>
32#include <asm/asm-offsets.h>
33#include <asm/thread_info.h>
34#include <asm/asm.h>
35#include <asm/smap.h>
36#include <asm/export.h>
37
38 .text
39SYM_FUNC_START(__get_user_1)
40 mov PER_CPU_VAR(current_task), %_ASM_DX
41 cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
42 jae bad_get_user
43 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
44 and %_ASM_DX, %_ASM_AX
45 ASM_STAC
461: movzbl (%_ASM_AX),%edx
47 xor %eax,%eax
48 ASM_CLAC
49 ret
50SYM_FUNC_END(__get_user_1)
51EXPORT_SYMBOL(__get_user_1)
52
53SYM_FUNC_START(__get_user_2)
54 add $1,%_ASM_AX
55 jc bad_get_user
56 mov PER_CPU_VAR(current_task), %_ASM_DX
57 cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
58 jae bad_get_user
59 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
60 and %_ASM_DX, %_ASM_AX
61 ASM_STAC
622: movzwl -1(%_ASM_AX),%edx
63 xor %eax,%eax
64 ASM_CLAC
65 ret
66SYM_FUNC_END(__get_user_2)
67EXPORT_SYMBOL(__get_user_2)
68
69SYM_FUNC_START(__get_user_4)
70 add $3,%_ASM_AX
71 jc bad_get_user
72 mov PER_CPU_VAR(current_task), %_ASM_DX
73 cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
74 jae bad_get_user
75 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
76 and %_ASM_DX, %_ASM_AX
77 ASM_STAC
783: movl -3(%_ASM_AX),%edx
79 xor %eax,%eax
80 ASM_CLAC
81 ret
82SYM_FUNC_END(__get_user_4)
83EXPORT_SYMBOL(__get_user_4)
84
85SYM_FUNC_START(__get_user_8)
86#ifdef CONFIG_X86_64
87 add $7,%_ASM_AX
88 jc bad_get_user
89 mov PER_CPU_VAR(current_task), %_ASM_DX
90 cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
91 jae bad_get_user
92 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
93 and %_ASM_DX, %_ASM_AX
94 ASM_STAC
954: movq -7(%_ASM_AX),%rdx
96 xor %eax,%eax
97 ASM_CLAC
98 ret
99#else
100 add $7,%_ASM_AX
101 jc bad_get_user_8
102 mov PER_CPU_VAR(current_task), %_ASM_DX
103 cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
104 jae bad_get_user_8
105 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
106 and %_ASM_DX, %_ASM_AX
107 ASM_STAC
1084: movl -7(%_ASM_AX),%edx
1095: movl -3(%_ASM_AX),%ecx
110 xor %eax,%eax
111 ASM_CLAC
112 ret
113#endif
114SYM_FUNC_END(__get_user_8)
115EXPORT_SYMBOL(__get_user_8)
116
117
118SYM_CODE_START_LOCAL(.Lbad_get_user_clac)
119 ASM_CLAC
120bad_get_user:
121 xor %edx,%edx
122 mov $(-EFAULT),%_ASM_AX
123 ret
124SYM_CODE_END(.Lbad_get_user_clac)
125
126#ifdef CONFIG_X86_32
127SYM_CODE_START_LOCAL(.Lbad_get_user_8_clac)
128 ASM_CLAC
129bad_get_user_8:
130 xor %edx,%edx
131 xor %ecx,%ecx
132 mov $(-EFAULT),%_ASM_AX
133 ret
134SYM_CODE_END(.Lbad_get_user_8_clac)
135#endif
136
137 _ASM_EXTABLE_UA(1b, .Lbad_get_user_clac)
138 _ASM_EXTABLE_UA(2b, .Lbad_get_user_clac)
139 _ASM_EXTABLE_UA(3b, .Lbad_get_user_clac)
140#ifdef CONFIG_X86_64
141 _ASM_EXTABLE_UA(4b, .Lbad_get_user_clac)
142#else
143 _ASM_EXTABLE_UA(4b, .Lbad_get_user_8_clac)
144 _ASM_EXTABLE_UA(5b, .Lbad_get_user_8_clac)
145#endif