1 /* SPDX-License-Identifier: GPL-2.0 */
3 * User space memory access functions
5 * Copyright (C) 1999, 2002 Niibe Yutaka
6 * Copyright (C) 2003 - 2008 Paul Mundt
9 * MIPS implementation version 1.15 by
10 * Copyright (C) 1996, 1997, 1998 by Ralf Baechle
13 #ifndef __ASM_SH_UACCESS_32_H
14 #define __ASM_SH_UACCESS_32_H
16 #define __get_user_size(x,ptr,size,retval) \
21 __get_user_asm(x, ptr, retval, "b"); \
24 __get_user_asm(x, ptr, retval, "w"); \
27 __get_user_asm(x, ptr, retval, "l"); \
30 __get_user_u64(x, ptr, retval); \
33 __get_user_unknown(); \
39 #define __get_user_asm(x, addr, err, insn) \
41 __asm__ __volatile__( \
43 "mov." insn " %2, %1\n\t" \
45 ".section .fixup,\"ax\"\n" \
54 ".section __ex_table,\"a\"\n\t" \
57 :"=&r" (err), "=&r" (x) \
58 :"m" (__m(addr)), "i" (-EFAULT), "0" (err)); })
60 #define __get_user_asm(x, addr, err, insn) \
62 __asm__ __volatile__ ( \
63 "mov." insn " %1, %0\n\t" \
68 #endif /* CONFIG_MMU */
70 extern void __get_user_unknown(void);
72 #if defined(CONFIG_CPU_LITTLE_ENDIAN)
73 #define __get_user_u64(x, addr, err) \
75 __asm__ __volatile__( \
80 ".section .fixup,\"ax\"\n" \
90 ".section __ex_table,\"a\"\n\t" \
92 ".long 1b + 2, 3b\n\t" \
94 :"=&r" (err), "=&r" (x) \
95 :"m" (__m(addr)), "i" (-EFAULT), "0" (err)); })
97 #define __get_user_u64(x, addr, err) \
99 __asm__ __volatile__( \
102 "mov.l %T2,%R1\n\t" \
104 ".section .fixup,\"ax\"\n" \
114 ".section __ex_table,\"a\"\n\t" \
116 ".long 1b + 2, 3b\n\t" \
118 :"=&r" (err), "=&r" (x) \
119 :"m" (__m(addr)), "i" (-EFAULT), "0" (err)); })
122 #define __put_user_size(x,ptr,size,retval) \
127 __put_user_asm(x, ptr, retval, "b"); \
130 __put_user_asm(x, ptr, retval, "w"); \
133 __put_user_asm(x, ptr, retval, "l"); \
136 __put_user_u64(x, ptr, retval); \
139 __put_user_unknown(); \
144 #define __put_user_asm(x, addr, err, insn) \
146 __asm__ __volatile__ ( \
148 "mov." insn " %1, %2\n\t" \
150 ".section .fixup,\"ax\"\n" \
158 ".section __ex_table,\"a\"\n\t" \
162 : "r" (x), "m" (__m(addr)), "i" (-EFAULT), \
168 #define __put_user_asm(x, addr, err, insn) \
170 __asm__ __volatile__ ( \
171 "mov." insn " %0, %1\n\t" \
173 : "r" (x), "m" (__m(addr)) \
177 #endif /* CONFIG_MMU */
179 #if defined(CONFIG_CPU_LITTLE_ENDIAN)
180 #define __put_user_u64(val,addr,retval) \
182 __asm__ __volatile__( \
185 "mov.l %S1,%T2\n\t" \
187 ".section .fixup,\"ax\"\n" \
195 ".section __ex_table,\"a\"\n\t" \
199 : "r" (val), "m" (__m(addr)), "i" (-EFAULT), "0" (retval) \
202 #define __put_user_u64(val,addr,retval) \
204 __asm__ __volatile__( \
207 "mov.l %R1,%T2\n\t" \
209 ".section .fixup,\"ax\"\n" \
217 ".section __ex_table,\"a\"\n\t" \
221 : "r" (val), "m" (__m(addr)), "i" (-EFAULT), "0" (retval) \
225 extern void __put_user_unknown(void);
227 #endif /* __ASM_SH_UACCESS_32_H */