Merge tag 'for_linus' of git://git.kernel.org/pub/scm/linux/kernel/git/mst/vhost
[cris-mirror.git] / arch / alpha / include / asm / xchg.h
blobe2b59fac5257de10134c8d0c116d8a4827b76a09
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ALPHA_CMPXCHG_H
3 #error Do not include xchg.h directly!
4 #else
5 /*
6 * xchg/xchg_local and cmpxchg/cmpxchg_local share the same code
7 * except that local version do not have the expensive memory barrier.
8 * So this file is included twice from asm/cmpxchg.h.
9 */
12 * Atomic exchange.
13 * Since it can be used to implement critical sections
14 * it must clobber "memory" (also for interrupts in UP).
16 * The leading and the trailing memory barriers guarantee that these
17 * operations are fully ordered.
21 static inline unsigned long
22 ____xchg(_u8, volatile char *m, unsigned long val)
24 unsigned long ret, tmp, addr64;
26 smp_mb();
27 __asm__ __volatile__(
28 " andnot %4,7,%3\n"
29 " insbl %1,%4,%1\n"
30 "1: ldq_l %2,0(%3)\n"
31 " extbl %2,%4,%0\n"
32 " mskbl %2,%4,%2\n"
33 " or %1,%2,%2\n"
34 " stq_c %2,0(%3)\n"
35 " beq %2,2f\n"
36 ".subsection 2\n"
37 "2: br 1b\n"
38 ".previous"
39 : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
40 : "r" ((long)m), "1" (val) : "memory");
41 smp_mb();
43 return ret;
46 static inline unsigned long
47 ____xchg(_u16, volatile short *m, unsigned long val)
49 unsigned long ret, tmp, addr64;
51 smp_mb();
52 __asm__ __volatile__(
53 " andnot %4,7,%3\n"
54 " inswl %1,%4,%1\n"
55 "1: ldq_l %2,0(%3)\n"
56 " extwl %2,%4,%0\n"
57 " mskwl %2,%4,%2\n"
58 " or %1,%2,%2\n"
59 " stq_c %2,0(%3)\n"
60 " beq %2,2f\n"
61 ".subsection 2\n"
62 "2: br 1b\n"
63 ".previous"
64 : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
65 : "r" ((long)m), "1" (val) : "memory");
66 smp_mb();
68 return ret;
71 static inline unsigned long
72 ____xchg(_u32, volatile int *m, unsigned long val)
74 unsigned long dummy;
76 smp_mb();
77 __asm__ __volatile__(
78 "1: ldl_l %0,%4\n"
79 " bis $31,%3,%1\n"
80 " stl_c %1,%2\n"
81 " beq %1,2f\n"
82 ".subsection 2\n"
83 "2: br 1b\n"
84 ".previous"
85 : "=&r" (val), "=&r" (dummy), "=m" (*m)
86 : "rI" (val), "m" (*m) : "memory");
87 smp_mb();
89 return val;
92 static inline unsigned long
93 ____xchg(_u64, volatile long *m, unsigned long val)
95 unsigned long dummy;
97 smp_mb();
98 __asm__ __volatile__(
99 "1: ldq_l %0,%4\n"
100 " bis $31,%3,%1\n"
101 " stq_c %1,%2\n"
102 " beq %1,2f\n"
103 ".subsection 2\n"
104 "2: br 1b\n"
105 ".previous"
106 : "=&r" (val), "=&r" (dummy), "=m" (*m)
107 : "rI" (val), "m" (*m) : "memory");
108 smp_mb();
110 return val;
113 /* This function doesn't exist, so you'll get a linker error
114 if something tries to do an invalid xchg(). */
115 extern void __xchg_called_with_bad_pointer(void);
117 static __always_inline unsigned long
118 ____xchg(, volatile void *ptr, unsigned long x, int size)
120 switch (size) {
121 case 1:
122 return ____xchg(_u8, ptr, x);
123 case 2:
124 return ____xchg(_u16, ptr, x);
125 case 4:
126 return ____xchg(_u32, ptr, x);
127 case 8:
128 return ____xchg(_u64, ptr, x);
130 __xchg_called_with_bad_pointer();
131 return x;
135 * Atomic compare and exchange. Compare OLD with MEM, if identical,
136 * store NEW in MEM. Return the initial value in MEM. Success is
137 * indicated by comparing RETURN with OLD.
139 * The leading and the trailing memory barriers guarantee that these
140 * operations are fully ordered.
142 * The trailing memory barrier is placed in SMP unconditionally, in
143 * order to guarantee that dependency ordering is preserved when a
144 * dependency is headed by an unsuccessful operation.
147 static inline unsigned long
148 ____cmpxchg(_u8, volatile char *m, unsigned char old, unsigned char new)
150 unsigned long prev, tmp, cmp, addr64;
152 smp_mb();
153 __asm__ __volatile__(
154 " andnot %5,7,%4\n"
155 " insbl %1,%5,%1\n"
156 "1: ldq_l %2,0(%4)\n"
157 " extbl %2,%5,%0\n"
158 " cmpeq %0,%6,%3\n"
159 " beq %3,2f\n"
160 " mskbl %2,%5,%2\n"
161 " or %1,%2,%2\n"
162 " stq_c %2,0(%4)\n"
163 " beq %2,3f\n"
164 "2:\n"
165 ".subsection 2\n"
166 "3: br 1b\n"
167 ".previous"
168 : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
169 : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
170 smp_mb();
172 return prev;
175 static inline unsigned long
176 ____cmpxchg(_u16, volatile short *m, unsigned short old, unsigned short new)
178 unsigned long prev, tmp, cmp, addr64;
180 smp_mb();
181 __asm__ __volatile__(
182 " andnot %5,7,%4\n"
183 " inswl %1,%5,%1\n"
184 "1: ldq_l %2,0(%4)\n"
185 " extwl %2,%5,%0\n"
186 " cmpeq %0,%6,%3\n"
187 " beq %3,2f\n"
188 " mskwl %2,%5,%2\n"
189 " or %1,%2,%2\n"
190 " stq_c %2,0(%4)\n"
191 " beq %2,3f\n"
192 "2:\n"
193 ".subsection 2\n"
194 "3: br 1b\n"
195 ".previous"
196 : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
197 : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
198 smp_mb();
200 return prev;
203 static inline unsigned long
204 ____cmpxchg(_u32, volatile int *m, int old, int new)
206 unsigned long prev, cmp;
208 smp_mb();
209 __asm__ __volatile__(
210 "1: ldl_l %0,%5\n"
211 " cmpeq %0,%3,%1\n"
212 " beq %1,2f\n"
213 " mov %4,%1\n"
214 " stl_c %1,%2\n"
215 " beq %1,3f\n"
216 "2:\n"
217 ".subsection 2\n"
218 "3: br 1b\n"
219 ".previous"
220 : "=&r"(prev), "=&r"(cmp), "=m"(*m)
221 : "r"((long) old), "r"(new), "m"(*m) : "memory");
222 smp_mb();
224 return prev;
227 static inline unsigned long
228 ____cmpxchg(_u64, volatile long *m, unsigned long old, unsigned long new)
230 unsigned long prev, cmp;
232 smp_mb();
233 __asm__ __volatile__(
234 "1: ldq_l %0,%5\n"
235 " cmpeq %0,%3,%1\n"
236 " beq %1,2f\n"
237 " mov %4,%1\n"
238 " stq_c %1,%2\n"
239 " beq %1,3f\n"
240 "2:\n"
241 ".subsection 2\n"
242 "3: br 1b\n"
243 ".previous"
244 : "=&r"(prev), "=&r"(cmp), "=m"(*m)
245 : "r"((long) old), "r"(new), "m"(*m) : "memory");
246 smp_mb();
248 return prev;
251 /* This function doesn't exist, so you'll get a linker error
252 if something tries to do an invalid cmpxchg(). */
253 extern void __cmpxchg_called_with_bad_pointer(void);
255 static __always_inline unsigned long
256 ____cmpxchg(, volatile void *ptr, unsigned long old, unsigned long new,
257 int size)
259 switch (size) {
260 case 1:
261 return ____cmpxchg(_u8, ptr, old, new);
262 case 2:
263 return ____cmpxchg(_u16, ptr, old, new);
264 case 4:
265 return ____cmpxchg(_u32, ptr, old, new);
266 case 8:
267 return ____cmpxchg(_u64, ptr, old, new);
269 __cmpxchg_called_with_bad_pointer();
270 return old;
273 #endif