1 /* SPDX-License-Identifier: GPL-2.0-only */
3 * Atomic futex routines
5 * Based on the PowerPC implementataion
7 * Copyright (C) 2013 TangoTec Ltd.
9 * Baruch Siach <baruch@tkos.co.il>
12 #ifndef _ASM_XTENSA_FUTEX_H
13 #define _ASM_XTENSA_FUTEX_H
15 #include <linux/futex.h>
16 #include <linux/uaccess.h>
17 #include <linux/errno.h>
19 #define arch_futex_atomic_op_inuser arch_futex_atomic_op_inuser
20 #define futex_atomic_cmpxchg_inatomic futex_atomic_cmpxchg_inatomic
21 #include <asm-generic/futex.h>
23 #if XCHAL_HAVE_EXCLUSIVE
24 #define __futex_atomic_op(insn, ret, old, uaddr, arg) \
26 "1: l32ex %[oldval], %[addr]\n" \
28 "2: s32ex %[newval], %[addr]\n" \
29 " getex %[newval]\n" \
30 " beqz %[newval], 1b\n" \
31 " movi %[newval], 0\n" \
33 " .section .fixup,\"ax\"\n" \
35 " .literal_position\n" \
36 "5: movi %[oldval], 3b\n" \
37 " movi %[newval], %[fault]\n" \
40 " .section __ex_table,\"a\"\n" \
41 " .long 1b, 5b, 2b, 5b\n" \
43 : [oldval] "=&r" (old), [newval] "=&r" (ret) \
44 : [addr] "r" (uaddr), [oparg] "r" (arg), \
45 [fault] "I" (-EFAULT) \
47 #elif XCHAL_HAVE_S32C1I
48 #define __futex_atomic_op(insn, ret, old, uaddr, arg) \
50 "1: l32i %[oldval], %[mem]\n" \
52 " wsr %[oldval], scompare1\n" \
53 "2: s32c1i %[newval], %[mem]\n" \
54 " bne %[newval], %[oldval], 1b\n" \
55 " movi %[newval], 0\n" \
57 " .section .fixup,\"ax\"\n" \
59 " .literal_position\n" \
60 "5: movi %[oldval], 3b\n" \
61 " movi %[newval], %[fault]\n" \
64 " .section __ex_table,\"a\"\n" \
65 " .long 1b, 5b, 2b, 5b\n" \
67 : [oldval] "=&r" (old), [newval] "=&r" (ret), \
68 [mem] "+m" (*(uaddr)) \
69 : [oparg] "r" (arg), [fault] "I" (-EFAULT) \
73 static inline int arch_futex_atomic_op_inuser(int op
, int oparg
, int *oval
,
76 #if XCHAL_HAVE_S32C1I || XCHAL_HAVE_EXCLUSIVE
79 if (!access_ok(uaddr
, sizeof(u32
)))
84 __futex_atomic_op("mov %[newval], %[oparg]",
85 ret
, oldval
, uaddr
, oparg
);
88 __futex_atomic_op("add %[newval], %[oldval], %[oparg]",
89 ret
, oldval
, uaddr
, oparg
);
92 __futex_atomic_op("or %[newval], %[oldval], %[oparg]",
93 ret
, oldval
, uaddr
, oparg
);
96 __futex_atomic_op("and %[newval], %[oldval], %[oparg]",
97 ret
, oldval
, uaddr
, ~oparg
);
100 __futex_atomic_op("xor %[newval], %[oldval], %[oparg]",
101 ret
, oldval
, uaddr
, oparg
);
112 return futex_atomic_op_inuser_local(op
, oparg
, oval
, uaddr
);
117 futex_atomic_cmpxchg_inatomic(u32
*uval
, u32 __user
*uaddr
,
118 u32 oldval
, u32 newval
)
120 #if XCHAL_HAVE_S32C1I || XCHAL_HAVE_EXCLUSIVE
124 if (!access_ok(uaddr
, sizeof(u32
)))
127 __asm__
__volatile__ (
128 " # futex_atomic_cmpxchg_inatomic\n"
129 #if XCHAL_HAVE_EXCLUSIVE
130 "1: l32ex %[tmp], %[addr]\n"
131 " s32i %[tmp], %[uval], 0\n"
132 " bne %[tmp], %[oldval], 2f\n"
133 " mov %[tmp], %[newval]\n"
134 "3: s32ex %[tmp], %[addr]\n"
137 #elif XCHAL_HAVE_S32C1I
138 " wsr %[oldval], scompare1\n"
139 "1: s32c1i %[newval], %[addr], 0\n"
140 " s32i %[newval], %[uval], 0\n"
143 " .section .fixup,\"ax\"\n"
145 " .literal_position\n"
146 "4: movi %[tmp], 2b\n"
147 " movi %[ret], %[fault]\n"
150 " .section __ex_table,\"a\"\n"
152 #if XCHAL_HAVE_EXCLUSIVE
156 : [ret
] "+r" (ret
), [newval
] "+r" (newval
), [tmp
] "=&r" (tmp
)
157 : [addr
] "r" (uaddr
), [oldval
] "r" (oldval
), [uval
] "r" (uval
),
158 [fault
] "I" (-EFAULT
)
163 return futex_atomic_cmpxchg_inatomic_local(uval
, uaddr
, oldval
, newval
);
167 #endif /* _ASM_XTENSA_FUTEX_H */