Loading...
1/*
2 * __get_user functions.
3 *
4 * (C) Copyright 1998 Linus Torvalds
5 * (C) Copyright 2005 Andi Kleen
6 * (C) Copyright 2008 Glauber Costa
7 *
8 * These functions have a non-standard call interface
9 * to make them more efficient, especially as they
10 * return an error value in addition to the "real"
11 * return value.
12 */
13
14/*
15 * __get_user_X
16 *
17 * Inputs: %[r|e]ax contains the address.
18 *
19 * Outputs: %[r|e]ax is error code (0 or -EFAULT)
20 * %[r|e]dx contains zero-extended value
21 * %ecx contains the high half for 32-bit __get_user_8
22 *
23 *
24 * These functions should not modify any other registers,
25 * as they get called from within inline assembly.
26 */
27
28#include <linux/linkage.h>
29#include <asm/dwarf2.h>
30#include <asm/page_types.h>
31#include <asm/errno.h>
32#include <asm/asm-offsets.h>
33#include <asm/thread_info.h>
34#include <asm/asm.h>
35#include <asm/smap.h>
36
37 .text
38ENTRY(__get_user_1)
39 CFI_STARTPROC
40 GET_THREAD_INFO(%_ASM_DX)
41 cmp TI_addr_limit(%_ASM_DX),%_ASM_AX
42 jae bad_get_user
43 ASM_STAC
441: movzbl (%_ASM_AX),%edx
45 xor %eax,%eax
46 ASM_CLAC
47 ret
48 CFI_ENDPROC
49ENDPROC(__get_user_1)
50
51ENTRY(__get_user_2)
52 CFI_STARTPROC
53 add $1,%_ASM_AX
54 jc bad_get_user
55 GET_THREAD_INFO(%_ASM_DX)
56 cmp TI_addr_limit(%_ASM_DX),%_ASM_AX
57 jae bad_get_user
58 ASM_STAC
592: movzwl -1(%_ASM_AX),%edx
60 xor %eax,%eax
61 ASM_CLAC
62 ret
63 CFI_ENDPROC
64ENDPROC(__get_user_2)
65
66ENTRY(__get_user_4)
67 CFI_STARTPROC
68 add $3,%_ASM_AX
69 jc bad_get_user
70 GET_THREAD_INFO(%_ASM_DX)
71 cmp TI_addr_limit(%_ASM_DX),%_ASM_AX
72 jae bad_get_user
73 ASM_STAC
743: movl -3(%_ASM_AX),%edx
75 xor %eax,%eax
76 ASM_CLAC
77 ret
78 CFI_ENDPROC
79ENDPROC(__get_user_4)
80
81ENTRY(__get_user_8)
82 CFI_STARTPROC
83#ifdef CONFIG_X86_64
84 add $7,%_ASM_AX
85 jc bad_get_user
86 GET_THREAD_INFO(%_ASM_DX)
87 cmp TI_addr_limit(%_ASM_DX),%_ASM_AX
88 jae bad_get_user
89 ASM_STAC
904: movq -7(%_ASM_AX),%rdx
91 xor %eax,%eax
92 ASM_CLAC
93 ret
94#else
95 add $7,%_ASM_AX
96 jc bad_get_user_8
97 GET_THREAD_INFO(%_ASM_DX)
98 cmp TI_addr_limit(%_ASM_DX),%_ASM_AX
99 jae bad_get_user_8
100 ASM_STAC
1014: movl -7(%_ASM_AX),%edx
1025: movl -3(%_ASM_AX),%ecx
103 xor %eax,%eax
104 ASM_CLAC
105 ret
106#endif
107 CFI_ENDPROC
108ENDPROC(__get_user_8)
109
110
111bad_get_user:
112 CFI_STARTPROC
113 xor %edx,%edx
114 mov $(-EFAULT),%_ASM_AX
115 ASM_CLAC
116 ret
117 CFI_ENDPROC
118END(bad_get_user)
119
120#ifdef CONFIG_X86_32
121bad_get_user_8:
122 CFI_STARTPROC
123 xor %edx,%edx
124 xor %ecx,%ecx
125 mov $(-EFAULT),%_ASM_AX
126 ASM_CLAC
127 ret
128 CFI_ENDPROC
129END(bad_get_user_8)
130#endif
131
132 _ASM_EXTABLE(1b,bad_get_user)
133 _ASM_EXTABLE(2b,bad_get_user)
134 _ASM_EXTABLE(3b,bad_get_user)
135#ifdef CONFIG_X86_64
136 _ASM_EXTABLE(4b,bad_get_user)
137#else
138 _ASM_EXTABLE(4b,bad_get_user_8)
139 _ASM_EXTABLE(5b,bad_get_user_8)
140#endif
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * __get_user functions.
4 *
5 * (C) Copyright 1998 Linus Torvalds
6 * (C) Copyright 2005 Andi Kleen
7 * (C) Copyright 2008 Glauber Costa
8 *
9 * These functions have a non-standard call interface
10 * to make them more efficient, especially as they
11 * return an error value in addition to the "real"
12 * return value.
13 */
14
15/*
16 * __get_user_X
17 *
18 * Inputs: %[r|e]ax contains the address.
19 *
20 * Outputs: %[r|e]ax is error code (0 or -EFAULT)
21 * %[r|e]dx contains zero-extended value
22 * %ecx contains the high half for 32-bit __get_user_8
23 *
24 *
25 * These functions should not modify any other registers,
26 * as they get called from within inline assembly.
27 */
28
29#include <linux/linkage.h>
30#include <asm/page_types.h>
31#include <asm/errno.h>
32#include <asm/asm-offsets.h>
33#include <asm/thread_info.h>
34#include <asm/asm.h>
35#include <asm/smap.h>
36#include <asm/export.h>
37
38#define ASM_BARRIER_NOSPEC ALTERNATIVE "", "lfence", X86_FEATURE_LFENCE_RDTSC
39
40#ifdef CONFIG_X86_5LEVEL
41#define LOAD_TASK_SIZE_MINUS_N(n) \
42 ALTERNATIVE __stringify(mov $((1 << 47) - 4096 - (n)),%rdx), \
43 __stringify(mov $((1 << 56) - 4096 - (n)),%rdx), X86_FEATURE_LA57
44#else
45#define LOAD_TASK_SIZE_MINUS_N(n) \
46 mov $(TASK_SIZE_MAX - (n)),%_ASM_DX
47#endif
48
49 .text
50SYM_FUNC_START(__get_user_1)
51 LOAD_TASK_SIZE_MINUS_N(0)
52 cmp %_ASM_DX,%_ASM_AX
53 jae bad_get_user
54 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
55 and %_ASM_DX, %_ASM_AX
56 ASM_STAC
571: movzbl (%_ASM_AX),%edx
58 xor %eax,%eax
59 ASM_CLAC
60 RET
61SYM_FUNC_END(__get_user_1)
62EXPORT_SYMBOL(__get_user_1)
63
64SYM_FUNC_START(__get_user_2)
65 LOAD_TASK_SIZE_MINUS_N(1)
66 cmp %_ASM_DX,%_ASM_AX
67 jae bad_get_user
68 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
69 and %_ASM_DX, %_ASM_AX
70 ASM_STAC
712: movzwl (%_ASM_AX),%edx
72 xor %eax,%eax
73 ASM_CLAC
74 RET
75SYM_FUNC_END(__get_user_2)
76EXPORT_SYMBOL(__get_user_2)
77
78SYM_FUNC_START(__get_user_4)
79 LOAD_TASK_SIZE_MINUS_N(3)
80 cmp %_ASM_DX,%_ASM_AX
81 jae bad_get_user
82 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
83 and %_ASM_DX, %_ASM_AX
84 ASM_STAC
853: movl (%_ASM_AX),%edx
86 xor %eax,%eax
87 ASM_CLAC
88 RET
89SYM_FUNC_END(__get_user_4)
90EXPORT_SYMBOL(__get_user_4)
91
92SYM_FUNC_START(__get_user_8)
93#ifdef CONFIG_X86_64
94 LOAD_TASK_SIZE_MINUS_N(7)
95 cmp %_ASM_DX,%_ASM_AX
96 jae bad_get_user
97 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
98 and %_ASM_DX, %_ASM_AX
99 ASM_STAC
1004: movq (%_ASM_AX),%rdx
101 xor %eax,%eax
102 ASM_CLAC
103 RET
104#else
105 LOAD_TASK_SIZE_MINUS_N(7)
106 cmp %_ASM_DX,%_ASM_AX
107 jae bad_get_user_8
108 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
109 and %_ASM_DX, %_ASM_AX
110 ASM_STAC
1114: movl (%_ASM_AX),%edx
1125: movl 4(%_ASM_AX),%ecx
113 xor %eax,%eax
114 ASM_CLAC
115 RET
116#endif
117SYM_FUNC_END(__get_user_8)
118EXPORT_SYMBOL(__get_user_8)
119
120/* .. and the same for __get_user, just without the range checks */
121SYM_FUNC_START(__get_user_nocheck_1)
122 ASM_STAC
123 ASM_BARRIER_NOSPEC
1246: movzbl (%_ASM_AX),%edx
125 xor %eax,%eax
126 ASM_CLAC
127 RET
128SYM_FUNC_END(__get_user_nocheck_1)
129EXPORT_SYMBOL(__get_user_nocheck_1)
130
131SYM_FUNC_START(__get_user_nocheck_2)
132 ASM_STAC
133 ASM_BARRIER_NOSPEC
1347: movzwl (%_ASM_AX),%edx
135 xor %eax,%eax
136 ASM_CLAC
137 RET
138SYM_FUNC_END(__get_user_nocheck_2)
139EXPORT_SYMBOL(__get_user_nocheck_2)
140
141SYM_FUNC_START(__get_user_nocheck_4)
142 ASM_STAC
143 ASM_BARRIER_NOSPEC
1448: movl (%_ASM_AX),%edx
145 xor %eax,%eax
146 ASM_CLAC
147 RET
148SYM_FUNC_END(__get_user_nocheck_4)
149EXPORT_SYMBOL(__get_user_nocheck_4)
150
151SYM_FUNC_START(__get_user_nocheck_8)
152 ASM_STAC
153 ASM_BARRIER_NOSPEC
154#ifdef CONFIG_X86_64
1559: movq (%_ASM_AX),%rdx
156#else
1579: movl (%_ASM_AX),%edx
15810: movl 4(%_ASM_AX),%ecx
159#endif
160 xor %eax,%eax
161 ASM_CLAC
162 RET
163SYM_FUNC_END(__get_user_nocheck_8)
164EXPORT_SYMBOL(__get_user_nocheck_8)
165
166
167SYM_CODE_START_LOCAL(.Lbad_get_user_clac)
168 ASM_CLAC
169bad_get_user:
170 xor %edx,%edx
171 mov $(-EFAULT),%_ASM_AX
172 RET
173SYM_CODE_END(.Lbad_get_user_clac)
174
175#ifdef CONFIG_X86_32
176SYM_CODE_START_LOCAL(.Lbad_get_user_8_clac)
177 ASM_CLAC
178bad_get_user_8:
179 xor %edx,%edx
180 xor %ecx,%ecx
181 mov $(-EFAULT),%_ASM_AX
182 RET
183SYM_CODE_END(.Lbad_get_user_8_clac)
184#endif
185
186/* get_user */
187 _ASM_EXTABLE_UA(1b, .Lbad_get_user_clac)
188 _ASM_EXTABLE_UA(2b, .Lbad_get_user_clac)
189 _ASM_EXTABLE_UA(3b, .Lbad_get_user_clac)
190#ifdef CONFIG_X86_64
191 _ASM_EXTABLE_UA(4b, .Lbad_get_user_clac)
192#else
193 _ASM_EXTABLE_UA(4b, .Lbad_get_user_8_clac)
194 _ASM_EXTABLE_UA(5b, .Lbad_get_user_8_clac)
195#endif
196
197/* __get_user */
198 _ASM_EXTABLE_UA(6b, .Lbad_get_user_clac)
199 _ASM_EXTABLE_UA(7b, .Lbad_get_user_clac)
200 _ASM_EXTABLE_UA(8b, .Lbad_get_user_clac)
201#ifdef CONFIG_X86_64
202 _ASM_EXTABLE_UA(9b, .Lbad_get_user_clac)
203#else
204 _ASM_EXTABLE_UA(9b, .Lbad_get_user_8_clac)
205 _ASM_EXTABLE_UA(10b, .Lbad_get_user_8_clac)
206#endif