Linux Audio

Check our new training course

Loading...
v6.8
  1/* SPDX-License-Identifier: GPL-2.0 */
  2/*
  3 * __get_user functions.
  4 *
  5 * (C) Copyright 1998 Linus Torvalds
  6 * (C) Copyright 2005 Andi Kleen
  7 * (C) Copyright 2008 Glauber Costa
  8 *
  9 * These functions have a non-standard call interface
 10 * to make them more efficient, especially as they
 11 * return an error value in addition to the "real"
 12 * return value.
 13 */
 14
 15/*
 16 * __get_user_X
 17 *
 18 * Inputs:	%[r|e]ax contains the address.
 19 *
 20 * Outputs:	%[r|e]ax is error code (0 or -EFAULT)
 21 *		%[r|e]dx contains zero-extended value
 22 *		%ecx contains the high half for 32-bit __get_user_8
 23 *
 24 *
 25 * These functions should not modify any other registers,
 26 * as they get called from within inline assembly.
 27 */
 28
 29#include <linux/export.h>
 30#include <linux/linkage.h>
 31#include <asm/page_types.h>
 32#include <asm/errno.h>
 33#include <asm/asm-offsets.h>
 34#include <asm/thread_info.h>
 35#include <asm/asm.h>
 36#include <asm/smap.h>
 
 37
 38#define ASM_BARRIER_NOSPEC ALTERNATIVE "", "lfence", X86_FEATURE_LFENCE_RDTSC
 39
 40.macro check_range size:req
 41.if IS_ENABLED(CONFIG_X86_64)
 42	mov %rax, %rdx
 43	sar $63, %rdx
 44	or %rdx, %rax
 45.else
 46	cmp $TASK_SIZE_MAX-\size+1, %eax
 47	jae .Lbad_get_user
 48	sbb %edx, %edx		/* array_index_mask_nospec() */
 49	and %edx, %eax
 50.endif
 51.endm
 52
 53	.text
 54SYM_FUNC_START(__get_user_1)
 55	check_range size=1
 
 
 
 
 56	ASM_STAC
 571:	movzbl (%_ASM_AX),%edx
 58	xor %eax,%eax
 59	ASM_CLAC
 60	RET
 61SYM_FUNC_END(__get_user_1)
 62EXPORT_SYMBOL(__get_user_1)
 63
 64SYM_FUNC_START(__get_user_2)
 65	check_range size=2
 
 
 
 
 66	ASM_STAC
 672:	movzwl (%_ASM_AX),%edx
 68	xor %eax,%eax
 69	ASM_CLAC
 70	RET
 71SYM_FUNC_END(__get_user_2)
 72EXPORT_SYMBOL(__get_user_2)
 73
 74SYM_FUNC_START(__get_user_4)
 75	check_range size=4
 
 
 
 
 76	ASM_STAC
 773:	movl (%_ASM_AX),%edx
 78	xor %eax,%eax
 79	ASM_CLAC
 80	RET
 81SYM_FUNC_END(__get_user_4)
 82EXPORT_SYMBOL(__get_user_4)
 83
 84SYM_FUNC_START(__get_user_8)
 85	check_range size=8
 86	ASM_STAC
 87#ifdef CONFIG_X86_64
 
 
 
 
 
 
 884:	movq (%_ASM_AX),%rdx
 
 
 
 89#else
 
 
 
 
 
 
 904:	movl (%_ASM_AX),%edx
 915:	movl 4(%_ASM_AX),%ecx
 92#endif
 93	xor %eax,%eax
 94	ASM_CLAC
 95	RET
 
 96SYM_FUNC_END(__get_user_8)
 97EXPORT_SYMBOL(__get_user_8)
 98
 99/* .. and the same for __get_user, just without the range checks */
100SYM_FUNC_START(__get_user_nocheck_1)
101	ASM_STAC
102	ASM_BARRIER_NOSPEC
1036:	movzbl (%_ASM_AX),%edx
104	xor %eax,%eax
105	ASM_CLAC
106	RET
107SYM_FUNC_END(__get_user_nocheck_1)
108EXPORT_SYMBOL(__get_user_nocheck_1)
109
110SYM_FUNC_START(__get_user_nocheck_2)
111	ASM_STAC
112	ASM_BARRIER_NOSPEC
1137:	movzwl (%_ASM_AX),%edx
114	xor %eax,%eax
115	ASM_CLAC
116	RET
117SYM_FUNC_END(__get_user_nocheck_2)
118EXPORT_SYMBOL(__get_user_nocheck_2)
119
120SYM_FUNC_START(__get_user_nocheck_4)
121	ASM_STAC
122	ASM_BARRIER_NOSPEC
1238:	movl (%_ASM_AX),%edx
124	xor %eax,%eax
125	ASM_CLAC
126	RET
127SYM_FUNC_END(__get_user_nocheck_4)
128EXPORT_SYMBOL(__get_user_nocheck_4)
129
130SYM_FUNC_START(__get_user_nocheck_8)
131	ASM_STAC
132	ASM_BARRIER_NOSPEC
133#ifdef CONFIG_X86_64
1349:	movq (%_ASM_AX),%rdx
135#else
1369:	movl (%_ASM_AX),%edx
13710:	movl 4(%_ASM_AX),%ecx
138#endif
139	xor %eax,%eax
140	ASM_CLAC
141	RET
142SYM_FUNC_END(__get_user_nocheck_8)
143EXPORT_SYMBOL(__get_user_nocheck_8)
144
145
146SYM_CODE_START_LOCAL(__get_user_handle_exception)
147	ASM_CLAC
148.Lbad_get_user:
149	xor %edx,%edx
150	mov $(-EFAULT),%_ASM_AX
151	RET
152SYM_CODE_END(__get_user_handle_exception)
153
154#ifdef CONFIG_X86_32
155SYM_CODE_START_LOCAL(__get_user_8_handle_exception)
156	ASM_CLAC
157bad_get_user_8:
158	xor %edx,%edx
159	xor %ecx,%ecx
160	mov $(-EFAULT),%_ASM_AX
161	RET
162SYM_CODE_END(__get_user_8_handle_exception)
163#endif
164
165/* get_user */
166	_ASM_EXTABLE_UA(1b, __get_user_handle_exception)
167	_ASM_EXTABLE_UA(2b, __get_user_handle_exception)
168	_ASM_EXTABLE_UA(3b, __get_user_handle_exception)
169#ifdef CONFIG_X86_64
170	_ASM_EXTABLE_UA(4b, __get_user_handle_exception)
171#else
172	_ASM_EXTABLE_UA(4b, __get_user_8_handle_exception)
173	_ASM_EXTABLE_UA(5b, __get_user_8_handle_exception)
174#endif
175
176/* __get_user */
177	_ASM_EXTABLE_UA(6b, __get_user_handle_exception)
178	_ASM_EXTABLE_UA(7b, __get_user_handle_exception)
179	_ASM_EXTABLE_UA(8b, __get_user_handle_exception)
180#ifdef CONFIG_X86_64
181	_ASM_EXTABLE_UA(9b, __get_user_handle_exception)
182#else
183	_ASM_EXTABLE_UA(9b, __get_user_8_handle_exception)
184	_ASM_EXTABLE_UA(10b, __get_user_8_handle_exception)
185#endif
v5.14.15
  1/* SPDX-License-Identifier: GPL-2.0 */
  2/*
  3 * __get_user functions.
  4 *
  5 * (C) Copyright 1998 Linus Torvalds
  6 * (C) Copyright 2005 Andi Kleen
  7 * (C) Copyright 2008 Glauber Costa
  8 *
  9 * These functions have a non-standard call interface
 10 * to make them more efficient, especially as they
 11 * return an error value in addition to the "real"
 12 * return value.
 13 */
 14
 15/*
 16 * __get_user_X
 17 *
 18 * Inputs:	%[r|e]ax contains the address.
 19 *
 20 * Outputs:	%[r|e]ax is error code (0 or -EFAULT)
 21 *		%[r|e]dx contains zero-extended value
 22 *		%ecx contains the high half for 32-bit __get_user_8
 23 *
 24 *
 25 * These functions should not modify any other registers,
 26 * as they get called from within inline assembly.
 27 */
 28
 
 29#include <linux/linkage.h>
 30#include <asm/page_types.h>
 31#include <asm/errno.h>
 32#include <asm/asm-offsets.h>
 33#include <asm/thread_info.h>
 34#include <asm/asm.h>
 35#include <asm/smap.h>
 36#include <asm/export.h>
 37
 38#define ASM_BARRIER_NOSPEC ALTERNATIVE "", "lfence", X86_FEATURE_LFENCE_RDTSC
 39
 40#ifdef CONFIG_X86_5LEVEL
 41#define LOAD_TASK_SIZE_MINUS_N(n) \
 42	ALTERNATIVE __stringify(mov $((1 << 47) - 4096 - (n)),%rdx), \
 43		    __stringify(mov $((1 << 56) - 4096 - (n)),%rdx), X86_FEATURE_LA57
 44#else
 45#define LOAD_TASK_SIZE_MINUS_N(n) \
 46	mov $(TASK_SIZE_MAX - (n)),%_ASM_DX
 47#endif
 
 
 
 
 48
 49	.text
 50SYM_FUNC_START(__get_user_1)
 51	LOAD_TASK_SIZE_MINUS_N(0)
 52	cmp %_ASM_DX,%_ASM_AX
 53	jae bad_get_user
 54	sbb %_ASM_DX, %_ASM_DX		/* array_index_mask_nospec() */
 55	and %_ASM_DX, %_ASM_AX
 56	ASM_STAC
 571:	movzbl (%_ASM_AX),%edx
 58	xor %eax,%eax
 59	ASM_CLAC
 60	ret
 61SYM_FUNC_END(__get_user_1)
 62EXPORT_SYMBOL(__get_user_1)
 63
 64SYM_FUNC_START(__get_user_2)
 65	LOAD_TASK_SIZE_MINUS_N(1)
 66	cmp %_ASM_DX,%_ASM_AX
 67	jae bad_get_user
 68	sbb %_ASM_DX, %_ASM_DX		/* array_index_mask_nospec() */
 69	and %_ASM_DX, %_ASM_AX
 70	ASM_STAC
 712:	movzwl (%_ASM_AX),%edx
 72	xor %eax,%eax
 73	ASM_CLAC
 74	ret
 75SYM_FUNC_END(__get_user_2)
 76EXPORT_SYMBOL(__get_user_2)
 77
 78SYM_FUNC_START(__get_user_4)
 79	LOAD_TASK_SIZE_MINUS_N(3)
 80	cmp %_ASM_DX,%_ASM_AX
 81	jae bad_get_user
 82	sbb %_ASM_DX, %_ASM_DX		/* array_index_mask_nospec() */
 83	and %_ASM_DX, %_ASM_AX
 84	ASM_STAC
 853:	movl (%_ASM_AX),%edx
 86	xor %eax,%eax
 87	ASM_CLAC
 88	ret
 89SYM_FUNC_END(__get_user_4)
 90EXPORT_SYMBOL(__get_user_4)
 91
 92SYM_FUNC_START(__get_user_8)
 
 
 93#ifdef CONFIG_X86_64
 94	LOAD_TASK_SIZE_MINUS_N(7)
 95	cmp %_ASM_DX,%_ASM_AX
 96	jae bad_get_user
 97	sbb %_ASM_DX, %_ASM_DX		/* array_index_mask_nospec() */
 98	and %_ASM_DX, %_ASM_AX
 99	ASM_STAC
1004:	movq (%_ASM_AX),%rdx
101	xor %eax,%eax
102	ASM_CLAC
103	ret
104#else
105	LOAD_TASK_SIZE_MINUS_N(7)
106	cmp %_ASM_DX,%_ASM_AX
107	jae bad_get_user_8
108	sbb %_ASM_DX, %_ASM_DX		/* array_index_mask_nospec() */
109	and %_ASM_DX, %_ASM_AX
110	ASM_STAC
1114:	movl (%_ASM_AX),%edx
1125:	movl 4(%_ASM_AX),%ecx
 
113	xor %eax,%eax
114	ASM_CLAC
115	ret
116#endif
117SYM_FUNC_END(__get_user_8)
118EXPORT_SYMBOL(__get_user_8)
119
120/* .. and the same for __get_user, just without the range checks */
121SYM_FUNC_START(__get_user_nocheck_1)
122	ASM_STAC
123	ASM_BARRIER_NOSPEC
1246:	movzbl (%_ASM_AX),%edx
125	xor %eax,%eax
126	ASM_CLAC
127	ret
128SYM_FUNC_END(__get_user_nocheck_1)
129EXPORT_SYMBOL(__get_user_nocheck_1)
130
131SYM_FUNC_START(__get_user_nocheck_2)
132	ASM_STAC
133	ASM_BARRIER_NOSPEC
1347:	movzwl (%_ASM_AX),%edx
135	xor %eax,%eax
136	ASM_CLAC
137	ret
138SYM_FUNC_END(__get_user_nocheck_2)
139EXPORT_SYMBOL(__get_user_nocheck_2)
140
141SYM_FUNC_START(__get_user_nocheck_4)
142	ASM_STAC
143	ASM_BARRIER_NOSPEC
1448:	movl (%_ASM_AX),%edx
145	xor %eax,%eax
146	ASM_CLAC
147	ret
148SYM_FUNC_END(__get_user_nocheck_4)
149EXPORT_SYMBOL(__get_user_nocheck_4)
150
151SYM_FUNC_START(__get_user_nocheck_8)
152	ASM_STAC
153	ASM_BARRIER_NOSPEC
154#ifdef CONFIG_X86_64
1559:	movq (%_ASM_AX),%rdx
156#else
1579:	movl (%_ASM_AX),%edx
15810:	movl 4(%_ASM_AX),%ecx
159#endif
160	xor %eax,%eax
161	ASM_CLAC
162	ret
163SYM_FUNC_END(__get_user_nocheck_8)
164EXPORT_SYMBOL(__get_user_nocheck_8)
165
166
167SYM_CODE_START_LOCAL(.Lbad_get_user_clac)
168	ASM_CLAC
169bad_get_user:
170	xor %edx,%edx
171	mov $(-EFAULT),%_ASM_AX
172	ret
173SYM_CODE_END(.Lbad_get_user_clac)
174
175#ifdef CONFIG_X86_32
176SYM_CODE_START_LOCAL(.Lbad_get_user_8_clac)
177	ASM_CLAC
178bad_get_user_8:
179	xor %edx,%edx
180	xor %ecx,%ecx
181	mov $(-EFAULT),%_ASM_AX
182	ret
183SYM_CODE_END(.Lbad_get_user_8_clac)
184#endif
185
186/* get_user */
187	_ASM_EXTABLE_UA(1b, .Lbad_get_user_clac)
188	_ASM_EXTABLE_UA(2b, .Lbad_get_user_clac)
189	_ASM_EXTABLE_UA(3b, .Lbad_get_user_clac)
190#ifdef CONFIG_X86_64
191	_ASM_EXTABLE_UA(4b, .Lbad_get_user_clac)
192#else
193	_ASM_EXTABLE_UA(4b, .Lbad_get_user_8_clac)
194	_ASM_EXTABLE_UA(5b, .Lbad_get_user_8_clac)
195#endif
196
197/* __get_user */
198	_ASM_EXTABLE_UA(6b, .Lbad_get_user_clac)
199	_ASM_EXTABLE_UA(7b, .Lbad_get_user_clac)
200	_ASM_EXTABLE_UA(8b, .Lbad_get_user_clac)
201#ifdef CONFIG_X86_64
202	_ASM_EXTABLE_UA(9b, .Lbad_get_user_clac)
203#else
204	_ASM_EXTABLE_UA(9b, .Lbad_get_user_8_clac)
205	_ASM_EXTABLE_UA(10b, .Lbad_get_user_8_clac)
206#endif