2 * Atomic futex routines
4 * Based on the PowerPC implementataion
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
10 * Copyright (C) 2013 TangoTec Ltd.
12 * Baruch Siach <baruch@tkos.co.il>
15 #ifndef _ASM_XTENSA_FUTEX_H
16 #define _ASM_XTENSA_FUTEX_H
20 #include <linux/futex.h>
21 #include <linux/uaccess.h>
22 #include <linux/errno.h>
24 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
26 "1: l32i %0, %2, 0\n" \
28 " wsr %0, scompare1\n" \
29 "2: s32c1i %1, %2, 0\n" \
33 " .section .fixup,\"ax\"\n" \
40 " .section __ex_table,\"a\"\n" \
41 " .long 1b,5b,2b,5b\n" \
43 : "=&r" (oldval), "=&r" (ret) \
44 : "r" (uaddr), "I" (-EFAULT), "r" (oparg) \
47 static inline int arch_futex_atomic_op_inuser(int op
, int oparg
, int *oval
,
52 #if !XCHAL_HAVE_S32C1I
60 __futex_atomic_op("mov %1, %4", ret
, oldval
, uaddr
, oparg
);
63 __futex_atomic_op("add %1, %0, %4", ret
, oldval
, uaddr
,
67 __futex_atomic_op("or %1, %0, %4", ret
, oldval
, uaddr
,
71 __futex_atomic_op("and %1, %0, %4", ret
, oldval
, uaddr
,
75 __futex_atomic_op("xor %1, %0, %4", ret
, oldval
, uaddr
,
91 futex_atomic_cmpxchg_inatomic(u32
*uval
, u32 __user
*uaddr
,
92 u32 oldval
, u32 newval
)
96 if (!access_ok(VERIFY_WRITE
, uaddr
, sizeof(u32
)))
99 #if !XCHAL_HAVE_S32C1I
103 __asm__
__volatile__ (
104 " # futex_atomic_cmpxchg_inatomic\n"
105 " wsr %5, scompare1\n"
106 "1: s32c1i %1, %4, 0\n"
109 " .section .fixup,\"ax\"\n"
116 " .section __ex_table,\"a\"\n"
119 : "+r" (ret
), "+r" (newval
), "+m" (*uaddr
), "+m" (*uval
)
120 : "r" (uaddr
), "r" (oldval
), "r" (uval
), "I" (-EFAULT
)
126 #endif /* __KERNEL__ */
127 #endif /* _ASM_XTENSA_FUTEX_H */