1 #ifndef _ASM_X86_FUTEX_H
2 #define _ASM_X86_FUTEX_H
6 #include <linux/futex.h>
7 #include <linux/uaccess.h>
10 #include <asm/errno.h>
11 #include <asm/processor.h>
14 #define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \
15 asm volatile("\t" ASM_STAC "\n" \
17 "2:\t" ASM_CLAC "\n" \
18 "\t.section .fixup,\"ax\"\n" \
22 _ASM_EXTABLE(1b, 3b) \
23 : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \
24 : "i" (-EFAULT), "0" (oparg), "1" (0))
26 #define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \
27 asm volatile("\t" ASM_STAC "\n" \
31 "2:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" \
33 "3:\t" ASM_CLAC "\n" \
34 "\t.section .fixup,\"ax\"\n" \
38 _ASM_EXTABLE(1b, 4b) \
39 _ASM_EXTABLE(2b, 4b) \
40 : "=&a" (oldval), "=&r" (ret), \
41 "+m" (*uaddr), "=&r" (tem) \
42 : "r" (oparg), "i" (-EFAULT), "1" (0))
44 static inline int futex_atomic_op_inuser(int encoded_op
, u32 __user
*uaddr
)
46 int op
= (encoded_op
>> 28) & 7;
47 int cmp
= (encoded_op
>> 24) & 15;
48 int oparg
= (encoded_op
<< 8) >> 20;
49 int cmparg
= (encoded_op
<< 20) >> 20;
50 int oldval
= 0, ret
, tem
;
52 if (encoded_op
& (FUTEX_OP_OPARG_SHIFT
<< 28))
55 if (!access_ok(VERIFY_WRITE
, uaddr
, sizeof(u32
)))
62 __futex_atomic_op1("xchgl %0, %2", ret
, oldval
, uaddr
, oparg
);
65 __futex_atomic_op1(LOCK_PREFIX
"xaddl %0, %2", ret
, oldval
,
69 __futex_atomic_op2("orl %4, %3", ret
, oldval
, uaddr
, oparg
);
72 __futex_atomic_op2("andl %4, %3", ret
, oldval
, uaddr
, ~oparg
);
75 __futex_atomic_op2("xorl %4, %3", ret
, oldval
, uaddr
, oparg
);
86 ret
= (oldval
== cmparg
);
89 ret
= (oldval
!= cmparg
);
92 ret
= (oldval
< cmparg
);
95 ret
= (oldval
>= cmparg
);
98 ret
= (oldval
<= cmparg
);
100 case FUTEX_OP_CMP_GT
:
101 ret
= (oldval
> cmparg
);
110 static inline int futex_atomic_cmpxchg_inatomic(u32
*uval
, u32 __user
*uaddr
,
111 u32 oldval
, u32 newval
)
113 return user_atomic_cmpxchg_inatomic(uval
, uaddr
, oldval
, newval
);
117 #endif /* _ASM_X86_FUTEX_H */