Linux 5.7.6
[linux/fpc-iii.git] / arch / xtensa / include / asm / futex.h
bloba1a27b2ea4608522160a1ec5653740fa62d9cb54
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3 * Atomic futex routines
5 * Based on the PowerPC implementataion
7 * Copyright (C) 2013 TangoTec Ltd.
9 * Baruch Siach <baruch@tkos.co.il>
12 #ifndef _ASM_XTENSA_FUTEX_H
13 #define _ASM_XTENSA_FUTEX_H
15 #include <linux/futex.h>
16 #include <linux/uaccess.h>
17 #include <linux/errno.h>
19 #if XCHAL_HAVE_EXCLUSIVE
20 #define __futex_atomic_op(insn, ret, old, uaddr, arg) \
21 __asm__ __volatile( \
22 "1: l32ex %[oldval], %[addr]\n" \
23 insn "\n" \
24 "2: s32ex %[newval], %[addr]\n" \
25 " getex %[newval]\n" \
26 " beqz %[newval], 1b\n" \
27 " movi %[newval], 0\n" \
28 "3:\n" \
29 " .section .fixup,\"ax\"\n" \
30 " .align 4\n" \
31 " .literal_position\n" \
32 "5: movi %[oldval], 3b\n" \
33 " movi %[newval], %[fault]\n" \
34 " jx %[oldval]\n" \
35 " .previous\n" \
36 " .section __ex_table,\"a\"\n" \
37 " .long 1b, 5b, 2b, 5b\n" \
38 " .previous\n" \
39 : [oldval] "=&r" (old), [newval] "=&r" (ret) \
40 : [addr] "r" (uaddr), [oparg] "r" (arg), \
41 [fault] "I" (-EFAULT) \
42 : "memory")
43 #elif XCHAL_HAVE_S32C1I
44 #define __futex_atomic_op(insn, ret, old, uaddr, arg) \
45 __asm__ __volatile( \
46 "1: l32i %[oldval], %[mem]\n" \
47 insn "\n" \
48 " wsr %[oldval], scompare1\n" \
49 "2: s32c1i %[newval], %[mem]\n" \
50 " bne %[newval], %[oldval], 1b\n" \
51 " movi %[newval], 0\n" \
52 "3:\n" \
53 " .section .fixup,\"ax\"\n" \
54 " .align 4\n" \
55 " .literal_position\n" \
56 "5: movi %[oldval], 3b\n" \
57 " movi %[newval], %[fault]\n" \
58 " jx %[oldval]\n" \
59 " .previous\n" \
60 " .section __ex_table,\"a\"\n" \
61 " .long 1b, 5b, 2b, 5b\n" \
62 " .previous\n" \
63 : [oldval] "=&r" (old), [newval] "=&r" (ret), \
64 [mem] "+m" (*(uaddr)) \
65 : [oparg] "r" (arg), [fault] "I" (-EFAULT) \
66 : "memory")
67 #endif
69 static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
70 u32 __user *uaddr)
72 #if XCHAL_HAVE_S32C1I || XCHAL_HAVE_EXCLUSIVE
73 int oldval = 0, ret;
75 if (!access_ok(uaddr, sizeof(u32)))
76 return -EFAULT;
78 switch (op) {
79 case FUTEX_OP_SET:
80 __futex_atomic_op("mov %[newval], %[oparg]",
81 ret, oldval, uaddr, oparg);
82 break;
83 case FUTEX_OP_ADD:
84 __futex_atomic_op("add %[newval], %[oldval], %[oparg]",
85 ret, oldval, uaddr, oparg);
86 break;
87 case FUTEX_OP_OR:
88 __futex_atomic_op("or %[newval], %[oldval], %[oparg]",
89 ret, oldval, uaddr, oparg);
90 break;
91 case FUTEX_OP_ANDN:
92 __futex_atomic_op("and %[newval], %[oldval], %[oparg]",
93 ret, oldval, uaddr, ~oparg);
94 break;
95 case FUTEX_OP_XOR:
96 __futex_atomic_op("xor %[newval], %[oldval], %[oparg]",
97 ret, oldval, uaddr, oparg);
98 break;
99 default:
100 ret = -ENOSYS;
103 if (!ret)
104 *oval = oldval;
106 return ret;
107 #else
108 return -ENOSYS;
109 #endif
112 static inline int
113 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
114 u32 oldval, u32 newval)
116 #if XCHAL_HAVE_S32C1I || XCHAL_HAVE_EXCLUSIVE
117 unsigned long tmp;
118 int ret = 0;
120 if (!access_ok(uaddr, sizeof(u32)))
121 return -EFAULT;
123 __asm__ __volatile__ (
124 " # futex_atomic_cmpxchg_inatomic\n"
125 #if XCHAL_HAVE_EXCLUSIVE
126 "1: l32ex %[tmp], %[addr]\n"
127 " s32i %[tmp], %[uval], 0\n"
128 " bne %[tmp], %[oldval], 2f\n"
129 " mov %[tmp], %[newval]\n"
130 "3: s32ex %[tmp], %[addr]\n"
131 " getex %[tmp]\n"
132 " beqz %[tmp], 1b\n"
133 #elif XCHAL_HAVE_S32C1I
134 " wsr %[oldval], scompare1\n"
135 "1: s32c1i %[newval], %[addr], 0\n"
136 " s32i %[newval], %[uval], 0\n"
137 #endif
138 "2:\n"
139 " .section .fixup,\"ax\"\n"
140 " .align 4\n"
141 " .literal_position\n"
142 "4: movi %[tmp], 2b\n"
143 " movi %[ret], %[fault]\n"
144 " jx %[tmp]\n"
145 " .previous\n"
146 " .section __ex_table,\"a\"\n"
147 " .long 1b, 4b\n"
148 #if XCHAL_HAVE_EXCLUSIVE
149 " .long 3b, 4b\n"
150 #endif
151 " .previous\n"
152 : [ret] "+r" (ret), [newval] "+r" (newval), [tmp] "=&r" (tmp)
153 : [addr] "r" (uaddr), [oldval] "r" (oldval), [uval] "r" (uval),
154 [fault] "I" (-EFAULT)
155 : "memory");
157 return ret;
158 #else
159 return -ENOSYS;
160 #endif
163 #endif /* _ASM_XTENSA_FUTEX_H */