1 #ifndef __ASM_OPENRISC_FUTEX_H
2 #define __ASM_OPENRISC_FUTEX_H
6 #include <linux/futex.h>
7 #include <linux/uaccess.h>
10 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
12 __asm__ __volatile__ ( \
13 "1: l.lwa %0, %2 \n" \
15 "2: l.swa %2, %1 \n" \
17 " l.ori %1, r0, 0 \n" \
19 ".section .fixup,\"ax\" \n" \
21 " l.addi %1, r0, %3 \n" \
23 ".section __ex_table,\"a\" \n" \
24 ".word 1b,4b,2b,4b \n" \
26 : "=&r" (oldval), "=&r" (ret), "+m" (*uaddr) \
27 : "i" (-EFAULT), "r" (oparg) \
33 futex_atomic_op_inuser(int encoded_op
, u32 __user
*uaddr
)
35 int op
= (encoded_op
>> 28) & 7;
36 int cmp
= (encoded_op
>> 24) & 15;
37 int oparg
= (encoded_op
<< 8) >> 20;
38 int cmparg
= (encoded_op
<< 20) >> 20;
41 if (encoded_op
& (FUTEX_OP_OPARG_SHIFT
<< 28))
44 if (!access_ok(VERIFY_WRITE
, uaddr
, sizeof(u32
)))
51 __futex_atomic_op("l.or %1,%4,%4", ret
, oldval
, uaddr
, oparg
);
54 __futex_atomic_op("l.add %1,%0,%4", ret
, oldval
, uaddr
, oparg
);
57 __futex_atomic_op("l.or %1,%0,%4", ret
, oldval
, uaddr
, oparg
);
60 __futex_atomic_op("l.and %1,%0,%4", ret
, oldval
, uaddr
, ~oparg
);
63 __futex_atomic_op("l.xor %1,%0,%4", ret
, oldval
, uaddr
, oparg
);
74 ret
= (oldval
== cmparg
);
77 ret
= (oldval
!= cmparg
);
80 ret
= (oldval
< cmparg
);
83 ret
= (oldval
>= cmparg
);
86 ret
= (oldval
<= cmparg
);
89 ret
= (oldval
> cmparg
);
99 futex_atomic_cmpxchg_inatomic(u32
*uval
, u32 __user
*uaddr
,
100 u32 oldval
, u32 newval
)
105 if (!access_ok(VERIFY_WRITE
, uaddr
, sizeof(u32
)))
108 __asm__
__volatile__ ( \
109 "1: l.lwa %1, %2 \n" \
110 " l.sfeq %1, %3 \n" \
113 "2: l.swa %2, %4 \n" \
117 ".section .fixup,\"ax\" \n" \
119 " l.addi %0, r0, %5 \n" \
121 ".section __ex_table,\"a\" \n" \
122 ".word 1b,4b,2b,4b \n" \
124 : "+r" (ret
), "=&r" (prev
), "+m" (*uaddr
) \
125 : "r" (oldval
), "r" (newval
), "i" (-EFAULT
) \
133 #endif /* __KERNEL__ */
135 #endif /* __ASM_OPENRISC_FUTEX_H */