Loading...
1/*
2 * __get_user functions.
3 *
4 * (C) Copyright 1998 Linus Torvalds
5 * (C) Copyright 2005 Andi Kleen
6 * (C) Copyright 2008 Glauber Costa
7 *
8 * These functions have a non-standard call interface
9 * to make them more efficient, especially as they
10 * return an error value in addition to the "real"
11 * return value.
12 */
13
14/*
15 * __get_user_X
16 *
17 * Inputs: %[r|e]ax contains the address.
18 * The register is modified, but all changes are undone
19 * before returning because the C code doesn't know about it.
20 *
21 * Outputs: %[r|e]ax is error code (0 or -EFAULT)
22 * %[r|e]dx contains zero-extended value
23 *
24 *
25 * These functions should not modify any other registers,
26 * as they get called from within inline assembly.
27 */
28
29#include <linux/linkage.h>
30#include <asm/dwarf2.h>
31#include <asm/page_types.h>
32#include <asm/errno.h>
33#include <asm/asm-offsets.h>
34#include <asm/thread_info.h>
35#include <asm/asm.h>
36
37 .text
38ENTRY(__get_user_1)
39 CFI_STARTPROC
40 GET_THREAD_INFO(%_ASM_DX)
41 cmp TI_addr_limit(%_ASM_DX),%_ASM_AX
42 jae bad_get_user
431: movzb (%_ASM_AX),%edx
44 xor %eax,%eax
45 ret
46 CFI_ENDPROC
47ENDPROC(__get_user_1)
48
49ENTRY(__get_user_2)
50 CFI_STARTPROC
51 add $1,%_ASM_AX
52 jc bad_get_user
53 GET_THREAD_INFO(%_ASM_DX)
54 cmp TI_addr_limit(%_ASM_DX),%_ASM_AX
55 jae bad_get_user
562: movzwl -1(%_ASM_AX),%edx
57 xor %eax,%eax
58 ret
59 CFI_ENDPROC
60ENDPROC(__get_user_2)
61
62ENTRY(__get_user_4)
63 CFI_STARTPROC
64 add $3,%_ASM_AX
65 jc bad_get_user
66 GET_THREAD_INFO(%_ASM_DX)
67 cmp TI_addr_limit(%_ASM_DX),%_ASM_AX
68 jae bad_get_user
693: mov -3(%_ASM_AX),%edx
70 xor %eax,%eax
71 ret
72 CFI_ENDPROC
73ENDPROC(__get_user_4)
74
75#ifdef CONFIG_X86_64
76ENTRY(__get_user_8)
77 CFI_STARTPROC
78 add $7,%_ASM_AX
79 jc bad_get_user
80 GET_THREAD_INFO(%_ASM_DX)
81 cmp TI_addr_limit(%_ASM_DX),%_ASM_AX
82 jae bad_get_user
834: movq -7(%_ASM_AX),%_ASM_DX
84 xor %eax,%eax
85 ret
86 CFI_ENDPROC
87ENDPROC(__get_user_8)
88#endif
89
90bad_get_user:
91 CFI_STARTPROC
92 xor %edx,%edx
93 mov $(-EFAULT),%_ASM_AX
94 ret
95 CFI_ENDPROC
96END(bad_get_user)
97
98.section __ex_table,"a"
99 _ASM_PTR 1b,bad_get_user
100 _ASM_PTR 2b,bad_get_user
101 _ASM_PTR 3b,bad_get_user
102#ifdef CONFIG_X86_64
103 _ASM_PTR 4b,bad_get_user
104#endif
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * __get_user functions.
4 *
5 * (C) Copyright 1998 Linus Torvalds
6 * (C) Copyright 2005 Andi Kleen
7 * (C) Copyright 2008 Glauber Costa
8 *
9 * These functions have a non-standard call interface
10 * to make them more efficient, especially as they
11 * return an error value in addition to the "real"
12 * return value.
13 */
14
15/*
16 * __get_user_X
17 *
18 * Inputs: %[r|e]ax contains the address.
19 *
20 * Outputs: %[r|e]ax is error code (0 or -EFAULT)
21 * %[r|e]dx contains zero-extended value
22 * %ecx contains the high half for 32-bit __get_user_8
23 *
24 *
25 * These functions should not modify any other registers,
26 * as they get called from within inline assembly.
27 */
28
29#include <linux/linkage.h>
30#include <asm/page_types.h>
31#include <asm/errno.h>
32#include <asm/asm-offsets.h>
33#include <asm/thread_info.h>
34#include <asm/asm.h>
35#include <asm/smap.h>
36#include <asm/export.h>
37
38 .text
39ENTRY(__get_user_1)
40 mov PER_CPU_VAR(current_task), %_ASM_DX
41 cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
42 jae bad_get_user
43 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
44 and %_ASM_DX, %_ASM_AX
45 ASM_STAC
461: movzbl (%_ASM_AX),%edx
47 xor %eax,%eax
48 ASM_CLAC
49 ret
50ENDPROC(__get_user_1)
51EXPORT_SYMBOL(__get_user_1)
52
53ENTRY(__get_user_2)
54 add $1,%_ASM_AX
55 jc bad_get_user
56 mov PER_CPU_VAR(current_task), %_ASM_DX
57 cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
58 jae bad_get_user
59 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
60 and %_ASM_DX, %_ASM_AX
61 ASM_STAC
622: movzwl -1(%_ASM_AX),%edx
63 xor %eax,%eax
64 ASM_CLAC
65 ret
66ENDPROC(__get_user_2)
67EXPORT_SYMBOL(__get_user_2)
68
69ENTRY(__get_user_4)
70 add $3,%_ASM_AX
71 jc bad_get_user
72 mov PER_CPU_VAR(current_task), %_ASM_DX
73 cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
74 jae bad_get_user
75 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
76 and %_ASM_DX, %_ASM_AX
77 ASM_STAC
783: movl -3(%_ASM_AX),%edx
79 xor %eax,%eax
80 ASM_CLAC
81 ret
82ENDPROC(__get_user_4)
83EXPORT_SYMBOL(__get_user_4)
84
85ENTRY(__get_user_8)
86#ifdef CONFIG_X86_64
87 add $7,%_ASM_AX
88 jc bad_get_user
89 mov PER_CPU_VAR(current_task), %_ASM_DX
90 cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
91 jae bad_get_user
92 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
93 and %_ASM_DX, %_ASM_AX
94 ASM_STAC
954: movq -7(%_ASM_AX),%rdx
96 xor %eax,%eax
97 ASM_CLAC
98 ret
99#else
100 add $7,%_ASM_AX
101 jc bad_get_user_8
102 mov PER_CPU_VAR(current_task), %_ASM_DX
103 cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
104 jae bad_get_user_8
105 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
106 and %_ASM_DX, %_ASM_AX
107 ASM_STAC
1084: movl -7(%_ASM_AX),%edx
1095: movl -3(%_ASM_AX),%ecx
110 xor %eax,%eax
111 ASM_CLAC
112 ret
113#endif
114ENDPROC(__get_user_8)
115EXPORT_SYMBOL(__get_user_8)
116
117
118.Lbad_get_user_clac:
119 ASM_CLAC
120bad_get_user:
121 xor %edx,%edx
122 mov $(-EFAULT),%_ASM_AX
123 ret
124
125#ifdef CONFIG_X86_32
126.Lbad_get_user_8_clac:
127 ASM_CLAC
128bad_get_user_8:
129 xor %edx,%edx
130 xor %ecx,%ecx
131 mov $(-EFAULT),%_ASM_AX
132 ret
133#endif
134
135 _ASM_EXTABLE_UA(1b, .Lbad_get_user_clac)
136 _ASM_EXTABLE_UA(2b, .Lbad_get_user_clac)
137 _ASM_EXTABLE_UA(3b, .Lbad_get_user_clac)
138#ifdef CONFIG_X86_64
139 _ASM_EXTABLE_UA(4b, .Lbad_get_user_clac)
140#else
141 _ASM_EXTABLE_UA(4b, .Lbad_get_user_8_clac)
142 _ASM_EXTABLE_UA(5b, .Lbad_get_user_8_clac)
143#endif