1 /* SPDX-License-Identifier: GPL-2.0 */
3 * __get_user functions.
5 * (C) Copyright 1998 Linus Torvalds
6 * (C) Copyright 2005 Andi Kleen
7 * (C) Copyright 2008 Glauber Costa
9 * These functions have a non-standard call interface
10 * to make them more efficient, especially as they
11 * return an error value in addition to the "real"
18 * Inputs: %[r|e]ax contains the address.
20 * Outputs: %[r|e]ax is error code (0 or -EFAULT)
21 * %[r|e]dx contains zero-extended value
22 * %ecx contains the high half for 32-bit __get_user_8
25 * These functions should not modify any other registers,
26 * as they get called from within inline assembly.
29 #include <linux/linkage.h>
30 #include <asm/page_types.h>
31 #include <asm/errno.h>
32 #include <asm/asm-offsets.h>
33 #include <asm/thread_info.h>
36 #include <asm/export.h>
38 #define ASM_BARRIER_NOSPEC ALTERNATIVE "", "lfence", X86_FEATURE_LFENCE_RDTSC
40 #ifdef CONFIG_X86_5LEVEL
41 #define LOAD_TASK_SIZE_MINUS_N(n) \
42 ALTERNATIVE __stringify(mov $((1 << 47) - 4096 - (n)),%rdx), \
43 __stringify(mov $((1 << 56) - 4096 - (n)),%rdx), X86_FEATURE_LA57
45 #define LOAD_TASK_SIZE_MINUS_N(n) \
46 mov $(TASK_SIZE_MAX - (n)),%_ASM_DX
50 SYM_FUNC_START(__get_user_1)
51 LOAD_TASK_SIZE_MINUS_N(0)
54 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
55 and %_ASM_DX, %_ASM_AX
57 1: movzbl (%_ASM_AX),%edx
61 SYM_FUNC_END(__get_user_1)
62 EXPORT_SYMBOL(__get_user_1)
64 SYM_FUNC_START(__get_user_2)
65 LOAD_TASK_SIZE_MINUS_N(1)
68 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
69 and %_ASM_DX, %_ASM_AX
71 2: movzwl (%_ASM_AX),%edx
75 SYM_FUNC_END(__get_user_2)
76 EXPORT_SYMBOL(__get_user_2)
78 SYM_FUNC_START(__get_user_4)
79 LOAD_TASK_SIZE_MINUS_N(3)
82 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
83 and %_ASM_DX, %_ASM_AX
85 3: movl (%_ASM_AX),%edx
89 SYM_FUNC_END(__get_user_4)
90 EXPORT_SYMBOL(__get_user_4)
92 SYM_FUNC_START(__get_user_8)
94 LOAD_TASK_SIZE_MINUS_N(7)
97 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
98 and %_ASM_DX, %_ASM_AX
100 4: movq (%_ASM_AX),%rdx
105 LOAD_TASK_SIZE_MINUS_N(7)
106 cmp %_ASM_DX,%_ASM_AX
108 sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
109 and %_ASM_DX, %_ASM_AX
111 4: movl (%_ASM_AX),%edx
112 5: movl 4(%_ASM_AX),%ecx
117 SYM_FUNC_END(__get_user_8)
118 EXPORT_SYMBOL(__get_user_8)
120 /* .. and the same for __get_user, just without the range checks */
121 SYM_FUNC_START(__get_user_nocheck_1)
124 6: movzbl (%_ASM_AX),%edx
128 SYM_FUNC_END(__get_user_nocheck_1)
129 EXPORT_SYMBOL(__get_user_nocheck_1)
131 SYM_FUNC_START(__get_user_nocheck_2)
134 7: movzwl (%_ASM_AX),%edx
138 SYM_FUNC_END(__get_user_nocheck_2)
139 EXPORT_SYMBOL(__get_user_nocheck_2)
141 SYM_FUNC_START(__get_user_nocheck_4)
144 8: movl (%_ASM_AX),%edx
148 SYM_FUNC_END(__get_user_nocheck_4)
149 EXPORT_SYMBOL(__get_user_nocheck_4)
151 SYM_FUNC_START(__get_user_nocheck_8)
155 9: movq (%_ASM_AX),%rdx
157 9: movl (%_ASM_AX),%edx
158 10: movl 4(%_ASM_AX),%ecx
163 SYM_FUNC_END(__get_user_nocheck_8)
164 EXPORT_SYMBOL(__get_user_nocheck_8)
167 SYM_CODE_START_LOCAL(.Lbad_get_user_clac)
171 mov $(-EFAULT),%_ASM_AX
173 SYM_CODE_END(.Lbad_get_user_clac)
176 SYM_CODE_START_LOCAL(.Lbad_get_user_8_clac)
181 mov $(-EFAULT),%_ASM_AX
183 SYM_CODE_END(.Lbad_get_user_8_clac)
187 _ASM_EXTABLE_UA(1b, .Lbad_get_user_clac)
188 _ASM_EXTABLE_UA(2b, .Lbad_get_user_clac)
189 _ASM_EXTABLE_UA(3b, .Lbad_get_user_clac)
191 _ASM_EXTABLE_UA(4b, .Lbad_get_user_clac)
193 _ASM_EXTABLE_UA(4b, .Lbad_get_user_8_clac)
194 _ASM_EXTABLE_UA(5b, .Lbad_get_user_8_clac)
198 _ASM_EXTABLE_UA(6b, .Lbad_get_user_clac)
199 _ASM_EXTABLE_UA(7b, .Lbad_get_user_clac)
200 _ASM_EXTABLE_UA(8b, .Lbad_get_user_clac)
202 _ASM_EXTABLE_UA(9b, .Lbad_get_user_clac)
204 _ASM_EXTABLE_UA(9b, .Lbad_get_user_8_clac)
205 _ASM_EXTABLE_UA(10b, .Lbad_get_user_8_clac)