1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_X86_FUTEX_H
3 #define _ASM_X86_FUTEX_H
7 #include <linux/futex.h>
8 #include <linux/uaccess.h>
11 #include <asm/errno.h>
12 #include <asm/processor.h>
15 #define unsafe_atomic_op1(insn, oval, uaddr, oparg, label) \
17 int oldval = 0, ret; \
18 asm volatile("1:\t" insn "\n" \
20 "\t.section .fixup,\"ax\"\n" \
24 _ASM_EXTABLE_UA(1b, 3b) \
25 : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \
26 : "i" (-EFAULT), "0" (oparg), "1" (0)); \
33 #define unsafe_atomic_op2(insn, oval, uaddr, oparg, label) \
35 int oldval = 0, ret, tem; \
36 asm volatile("1:\tmovl %2, %0\n" \
37 "2:\tmovl\t%0, %3\n" \
39 "3:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" \
42 "\t.section .fixup,\"ax\"\n" \
46 _ASM_EXTABLE_UA(1b, 5b) \
47 _ASM_EXTABLE_UA(3b, 5b) \
48 : "=&a" (oldval), "=&r" (ret), \
49 "+m" (*uaddr), "=&r" (tem) \
50 : "r" (oparg), "i" (-EFAULT), "1" (0)); \
56 static __always_inline
int arch_futex_atomic_op_inuser(int op
, int oparg
, int *oval
,
59 if (!user_access_begin(uaddr
, sizeof(u32
)))
64 unsafe_atomic_op1("xchgl %0, %2", oval
, uaddr
, oparg
, Efault
);
67 unsafe_atomic_op1(LOCK_PREFIX
"xaddl %0, %2", oval
,
68 uaddr
, oparg
, Efault
);
71 unsafe_atomic_op2("orl %4, %3", oval
, uaddr
, oparg
, Efault
);
74 unsafe_atomic_op2("andl %4, %3", oval
, uaddr
, ~oparg
, Efault
);
77 unsafe_atomic_op2("xorl %4, %3", oval
, uaddr
, oparg
, Efault
);
90 static inline int futex_atomic_cmpxchg_inatomic(u32
*uval
, u32 __user
*uaddr
,
91 u32 oldval
, u32 newval
)
95 if (!user_access_begin(uaddr
, sizeof(u32
)))
98 "1:\t" LOCK_PREFIX
"cmpxchgl %4, %2\n"
100 "\t.section .fixup, \"ax\"\n"
104 _ASM_EXTABLE_UA(1b
, 3b
)
105 : "+r" (ret
), "=a" (oldval
), "+m" (*uaddr
)
106 : "i" (-EFAULT
), "r" (newval
), "1" (oldval
)
115 #endif /* _ASM_X86_FUTEX_H */