1 #ifndef _ASM_MICROBLAZE_FUTEX_H
2 #define _ASM_MICROBLAZE_FUTEX_H
6 #include <linux/futex.h>
7 #include <linux/uaccess.h>
10 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
12 __asm__ __volatile__ ( \
13 "1: lwx %0, %2, r0; " \
19 .section .fixup,\"ax\"; \
23 .section __ex_table,\"a\"; \
26 : "=&r" (oldval), "=&r" (ret) \
27 : "r" (uaddr), "i" (-EFAULT), "r" (oparg) \
32 futex_atomic_op_inuser(int encoded_op
, u32 __user
*uaddr
)
34 int op
= (encoded_op
>> 28) & 7;
35 int cmp
= (encoded_op
>> 24) & 15;
36 int oparg
= (encoded_op
<< 8) >> 20;
37 int cmparg
= (encoded_op
<< 20) >> 20;
39 if (encoded_op
& (FUTEX_OP_OPARG_SHIFT
<< 28))
42 if (!access_ok(VERIFY_WRITE
, uaddr
, sizeof(u32
)))
49 __futex_atomic_op("or %1,%4,%4;", ret
, oldval
, uaddr
, oparg
);
52 __futex_atomic_op("add %1,%0,%4;", ret
, oldval
, uaddr
, oparg
);
55 __futex_atomic_op("or %1,%0,%4;", ret
, oldval
, uaddr
, oparg
);
58 __futex_atomic_op("andn %1,%0,%4;", ret
, oldval
, uaddr
, oparg
);
61 __futex_atomic_op("xor %1,%0,%4;", ret
, oldval
, uaddr
, oparg
);
72 ret
= (oldval
== cmparg
);
75 ret
= (oldval
!= cmparg
);
78 ret
= (oldval
< cmparg
);
81 ret
= (oldval
>= cmparg
);
84 ret
= (oldval
<= cmparg
);
87 ret
= (oldval
> cmparg
);
97 futex_atomic_cmpxchg_inatomic(u32
*uval
, u32 __user
*uaddr
,
98 u32 oldval
, u32 newval
)
103 if (!access_ok(VERIFY_WRITE
, uaddr
, sizeof(u32
)))
106 __asm__
__volatile__ ("1: lwx %1, %3, r0; \
113 .section .fixup,\"ax\"; \
117 .section __ex_table,\"a\"; \
120 : "+r" (ret
), "=&r" (prev
), "=&r"(cmp
) \
121 : "r" (uaddr
), "r" (oldval
), "r" (newval
), "i" (-EFAULT
));
127 #endif /* __KERNEL__ */