4 #include <linux/futex.h>
5 #include <linux/uaccess.h>
8 #define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \
10 register unsigned long r8 __asm ("r8") = 0; \
11 __asm__ __volatile__( \
13 "[1:] " insn ";; \n" \
14 " .xdata4 \"__ex_table\", 1b-., 2f-. \n" \
16 : "+r" (r8), "=r" (oldval) \
17 : "r" (uaddr), "r" (oparg) \
22 #define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \
24 register unsigned long r8 __asm ("r8") = 0; \
27 __asm__ __volatile__( \
29 "[1:] ld4 %3=[%4];; \n" \
32 " mov ar.ccv=%2;; \n" \
33 "[2:] cmpxchg4.acq %1=[%4],%3,ar.ccv;; \n" \
34 " .xdata4 \"__ex_table\", 1b-., 3f-.\n" \
35 " .xdata4 \"__ex_table\", 2b-., 3f-.\n" \
37 : "+r" (r8), "=r" (val), "=&r" (oldval), \
39 : "r" (uaddr), "r" (oparg) \
43 } while (unlikely (val != oldval)); \
48 arch_futex_atomic_op_inuser(int op
, int oparg
, int *oval
, u32 __user
*uaddr
)
56 __futex_atomic_op1("xchg4 %1=[%2],%3", ret
, oldval
, uaddr
,
60 __futex_atomic_op2("add %3=%3,%5", ret
, oldval
, uaddr
, oparg
);
63 __futex_atomic_op2("or %3=%3,%5", ret
, oldval
, uaddr
, oparg
);
66 __futex_atomic_op2("and %3=%3,%5", ret
, oldval
, uaddr
,
70 __futex_atomic_op2("xor %3=%3,%5", ret
, oldval
, uaddr
, oparg
);
85 futex_atomic_cmpxchg_inatomic(u32
*uval
, u32 __user
*uaddr
,
86 u32 oldval
, u32 newval
)
88 if (!access_ok(VERIFY_WRITE
, uaddr
, sizeof(u32
)))
92 register unsigned long r8
__asm ("r8") = 0;
97 "[1:] cmpxchg4.acq %1=[%2],%3,ar.ccv \n"
98 " .xdata4 \"__ex_table\", 1b-., 2f-. \n"
100 : "+r" (r8
), "=&r" (prev
)
101 : "r" (uaddr
), "r" (newval
),
102 "rO" ((long) (unsigned) oldval
)
109 #endif /* _ASM_FUTEX_H */