uwb: Use kcalloc instead of kzalloc to allocate array
[zen-stable.git] / arch / x86 / include / asm / uaccess_64.h
blob1c66d30971adedaac940770d701e141ae464e1f4
1 #ifndef _ASM_X86_UACCESS_64_H
2 #define _ASM_X86_UACCESS_64_H
4 /*
5 * User space memory access functions
6 */
7 #include <linux/compiler.h>
8 #include <linux/errno.h>
9 #include <linux/lockdep.h>
10 #include <asm/alternative.h>
11 #include <asm/cpufeature.h>
12 #include <asm/page.h>
15 * Copy To/From Userspace
18 /* Handles exceptions in both to and from, but doesn't do access_ok */
19 __must_check unsigned long
20 copy_user_generic_string(void *to, const void *from, unsigned len);
21 __must_check unsigned long
22 copy_user_generic_unrolled(void *to, const void *from, unsigned len);
24 static __always_inline __must_check unsigned long
25 copy_user_generic(void *to, const void *from, unsigned len)
27 unsigned ret;
29 alternative_call(copy_user_generic_unrolled,
30 copy_user_generic_string,
31 X86_FEATURE_REP_GOOD,
32 ASM_OUTPUT2("=a" (ret), "=D" (to), "=S" (from),
33 "=d" (len)),
34 "1" (to), "2" (from), "3" (len)
35 : "memory", "rcx", "r8", "r9", "r10", "r11");
36 return ret;
39 __must_check unsigned long
40 _copy_to_user(void __user *to, const void *from, unsigned len);
41 __must_check unsigned long
42 _copy_from_user(void *to, const void __user *from, unsigned len);
43 __must_check unsigned long
44 copy_in_user(void __user *to, const void __user *from, unsigned len);
46 static inline unsigned long __must_check copy_from_user(void *to,
47 const void __user *from,
48 unsigned long n)
50 int sz = __compiletime_object_size(to);
52 might_fault();
53 if (likely(sz == -1 || sz >= n))
54 n = _copy_from_user(to, from, n);
55 #ifdef CONFIG_DEBUG_VM
56 else
57 WARN(1, "Buffer overflow detected!\n");
58 #endif
59 return n;
62 static __always_inline __must_check
63 int copy_to_user(void __user *dst, const void *src, unsigned size)
65 might_fault();
67 return _copy_to_user(dst, src, size);
70 static __always_inline __must_check
71 int __copy_from_user(void *dst, const void __user *src, unsigned size)
73 int ret = 0;
75 might_fault();
76 if (!__builtin_constant_p(size))
77 return copy_user_generic(dst, (__force void *)src, size);
78 switch (size) {
79 case 1:__get_user_asm(*(u8 *)dst, (u8 __user *)src,
80 ret, "b", "b", "=q", 1);
81 return ret;
82 case 2:__get_user_asm(*(u16 *)dst, (u16 __user *)src,
83 ret, "w", "w", "=r", 2);
84 return ret;
85 case 4:__get_user_asm(*(u32 *)dst, (u32 __user *)src,
86 ret, "l", "k", "=r", 4);
87 return ret;
88 case 8:__get_user_asm(*(u64 *)dst, (u64 __user *)src,
89 ret, "q", "", "=r", 8);
90 return ret;
91 case 10:
92 __get_user_asm(*(u64 *)dst, (u64 __user *)src,
93 ret, "q", "", "=r", 10);
94 if (unlikely(ret))
95 return ret;
96 __get_user_asm(*(u16 *)(8 + (char *)dst),
97 (u16 __user *)(8 + (char __user *)src),
98 ret, "w", "w", "=r", 2);
99 return ret;
100 case 16:
101 __get_user_asm(*(u64 *)dst, (u64 __user *)src,
102 ret, "q", "", "=r", 16);
103 if (unlikely(ret))
104 return ret;
105 __get_user_asm(*(u64 *)(8 + (char *)dst),
106 (u64 __user *)(8 + (char __user *)src),
107 ret, "q", "", "=r", 8);
108 return ret;
109 default:
110 return copy_user_generic(dst, (__force void *)src, size);
114 static __always_inline __must_check
115 int __copy_to_user(void __user *dst, const void *src, unsigned size)
117 int ret = 0;
119 might_fault();
120 if (!__builtin_constant_p(size))
121 return copy_user_generic((__force void *)dst, src, size);
122 switch (size) {
123 case 1:__put_user_asm(*(u8 *)src, (u8 __user *)dst,
124 ret, "b", "b", "iq", 1);
125 return ret;
126 case 2:__put_user_asm(*(u16 *)src, (u16 __user *)dst,
127 ret, "w", "w", "ir", 2);
128 return ret;
129 case 4:__put_user_asm(*(u32 *)src, (u32 __user *)dst,
130 ret, "l", "k", "ir", 4);
131 return ret;
132 case 8:__put_user_asm(*(u64 *)src, (u64 __user *)dst,
133 ret, "q", "", "er", 8);
134 return ret;
135 case 10:
136 __put_user_asm(*(u64 *)src, (u64 __user *)dst,
137 ret, "q", "", "er", 10);
138 if (unlikely(ret))
139 return ret;
140 asm("":::"memory");
141 __put_user_asm(4[(u16 *)src], 4 + (u16 __user *)dst,
142 ret, "w", "w", "ir", 2);
143 return ret;
144 case 16:
145 __put_user_asm(*(u64 *)src, (u64 __user *)dst,
146 ret, "q", "", "er", 16);
147 if (unlikely(ret))
148 return ret;
149 asm("":::"memory");
150 __put_user_asm(1[(u64 *)src], 1 + (u64 __user *)dst,
151 ret, "q", "", "er", 8);
152 return ret;
153 default:
154 return copy_user_generic((__force void *)dst, src, size);
158 static __always_inline __must_check
159 int __copy_in_user(void __user *dst, const void __user *src, unsigned size)
161 int ret = 0;
163 might_fault();
164 if (!__builtin_constant_p(size))
165 return copy_user_generic((__force void *)dst,
166 (__force void *)src, size);
167 switch (size) {
168 case 1: {
169 u8 tmp;
170 __get_user_asm(tmp, (u8 __user *)src,
171 ret, "b", "b", "=q", 1);
172 if (likely(!ret))
173 __put_user_asm(tmp, (u8 __user *)dst,
174 ret, "b", "b", "iq", 1);
175 return ret;
177 case 2: {
178 u16 tmp;
179 __get_user_asm(tmp, (u16 __user *)src,
180 ret, "w", "w", "=r", 2);
181 if (likely(!ret))
182 __put_user_asm(tmp, (u16 __user *)dst,
183 ret, "w", "w", "ir", 2);
184 return ret;
187 case 4: {
188 u32 tmp;
189 __get_user_asm(tmp, (u32 __user *)src,
190 ret, "l", "k", "=r", 4);
191 if (likely(!ret))
192 __put_user_asm(tmp, (u32 __user *)dst,
193 ret, "l", "k", "ir", 4);
194 return ret;
196 case 8: {
197 u64 tmp;
198 __get_user_asm(tmp, (u64 __user *)src,
199 ret, "q", "", "=r", 8);
200 if (likely(!ret))
201 __put_user_asm(tmp, (u64 __user *)dst,
202 ret, "q", "", "er", 8);
203 return ret;
205 default:
206 return copy_user_generic((__force void *)dst,
207 (__force void *)src, size);
211 __must_check long
212 strncpy_from_user(char *dst, const char __user *src, long count);
213 __must_check long
214 __strncpy_from_user(char *dst, const char __user *src, long count);
215 __must_check long strnlen_user(const char __user *str, long n);
216 __must_check long __strnlen_user(const char __user *str, long n);
217 __must_check long strlen_user(const char __user *str);
218 __must_check unsigned long clear_user(void __user *mem, unsigned long len);
219 __must_check unsigned long __clear_user(void __user *mem, unsigned long len);
221 static __must_check __always_inline int
222 __copy_from_user_inatomic(void *dst, const void __user *src, unsigned size)
224 return copy_user_generic(dst, (__force const void *)src, size);
227 static __must_check __always_inline int
228 __copy_to_user_inatomic(void __user *dst, const void *src, unsigned size)
230 return copy_user_generic((__force void *)dst, src, size);
233 extern long __copy_user_nocache(void *dst, const void __user *src,
234 unsigned size, int zerorest);
236 static inline int
237 __copy_from_user_nocache(void *dst, const void __user *src, unsigned size)
239 might_sleep();
240 return __copy_user_nocache(dst, src, size, 1);
243 static inline int
244 __copy_from_user_inatomic_nocache(void *dst, const void __user *src,
245 unsigned size)
247 return __copy_user_nocache(dst, src, size, 0);
250 unsigned long
251 copy_user_handle_tail(char *to, char *from, unsigned len, unsigned zerorest);
253 #endif /* _ASM_X86_UACCESS_64_H */