1 /* SPDX-License-Identifier: GPL-2.0-only */
3 * Copyright (C) 2014 Regents of the University of California
6 #ifndef _ASM_RISCV_CMPXCHG_H
7 #define _ASM_RISCV_CMPXCHG_H
11 #include <asm/barrier.h>
12 #include <asm/fence.h>
14 #define __xchg_relaxed(ptr, new, size) \
16 __typeof__(ptr) __ptr = (ptr); \
17 __typeof__(new) __new = (new); \
18 __typeof__(*(ptr)) __ret; \
21 __asm__ __volatile__ ( \
22 " amoswap.w %0, %2, %1\n" \
23 : "=r" (__ret), "+A" (*__ptr) \
28 __asm__ __volatile__ ( \
29 " amoswap.d %0, %2, %1\n" \
30 : "=r" (__ret), "+A" (*__ptr) \
40 #define xchg_relaxed(ptr, x) \
42 __typeof__(*(ptr)) _x_ = (x); \
43 (__typeof__(*(ptr))) __xchg_relaxed((ptr), \
44 _x_, sizeof(*(ptr))); \
47 #define __xchg_acquire(ptr, new, size) \
49 __typeof__(ptr) __ptr = (ptr); \
50 __typeof__(new) __new = (new); \
51 __typeof__(*(ptr)) __ret; \
54 __asm__ __volatile__ ( \
55 " amoswap.w %0, %2, %1\n" \
56 RISCV_ACQUIRE_BARRIER \
57 : "=r" (__ret), "+A" (*__ptr) \
62 __asm__ __volatile__ ( \
63 " amoswap.d %0, %2, %1\n" \
64 RISCV_ACQUIRE_BARRIER \
65 : "=r" (__ret), "+A" (*__ptr) \
75 #define xchg_acquire(ptr, x) \
77 __typeof__(*(ptr)) _x_ = (x); \
78 (__typeof__(*(ptr))) __xchg_acquire((ptr), \
79 _x_, sizeof(*(ptr))); \
82 #define __xchg_release(ptr, new, size) \
84 __typeof__(ptr) __ptr = (ptr); \
85 __typeof__(new) __new = (new); \
86 __typeof__(*(ptr)) __ret; \
89 __asm__ __volatile__ ( \
90 RISCV_RELEASE_BARRIER \
91 " amoswap.w %0, %2, %1\n" \
92 : "=r" (__ret), "+A" (*__ptr) \
97 __asm__ __volatile__ ( \
98 RISCV_RELEASE_BARRIER \
99 " amoswap.d %0, %2, %1\n" \
100 : "=r" (__ret), "+A" (*__ptr) \
110 #define xchg_release(ptr, x) \
112 __typeof__(*(ptr)) _x_ = (x); \
113 (__typeof__(*(ptr))) __xchg_release((ptr), \
114 _x_, sizeof(*(ptr))); \
117 #define __xchg(ptr, new, size) \
119 __typeof__(ptr) __ptr = (ptr); \
120 __typeof__(new) __new = (new); \
121 __typeof__(*(ptr)) __ret; \
124 __asm__ __volatile__ ( \
125 " amoswap.w.aqrl %0, %2, %1\n" \
126 : "=r" (__ret), "+A" (*__ptr) \
131 __asm__ __volatile__ ( \
132 " amoswap.d.aqrl %0, %2, %1\n" \
133 : "=r" (__ret), "+A" (*__ptr) \
143 #define xchg(ptr, x) \
145 __typeof__(*(ptr)) _x_ = (x); \
146 (__typeof__(*(ptr))) __xchg((ptr), _x_, sizeof(*(ptr))); \
149 #define xchg32(ptr, x) \
151 BUILD_BUG_ON(sizeof(*(ptr)) != 4); \
155 #define xchg64(ptr, x) \
157 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
162 * Atomic compare and exchange. Compare OLD with MEM, if identical,
163 * store NEW in MEM. Return the initial value in MEM. Success is
164 * indicated by comparing RETURN with OLD.
166 #define __cmpxchg_relaxed(ptr, old, new, size) \
168 __typeof__(ptr) __ptr = (ptr); \
169 __typeof__(*(ptr)) __old = (old); \
170 __typeof__(*(ptr)) __new = (new); \
171 __typeof__(*(ptr)) __ret; \
172 register unsigned int __rc; \
175 __asm__ __volatile__ ( \
177 " bne %0, %z3, 1f\n" \
178 " sc.w %1, %z4, %2\n" \
181 : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
182 : "rJ" ((long)__old), "rJ" (__new) \
186 __asm__ __volatile__ ( \
188 " bne %0, %z3, 1f\n" \
189 " sc.d %1, %z4, %2\n" \
192 : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
193 : "rJ" (__old), "rJ" (__new) \
202 #define cmpxchg_relaxed(ptr, o, n) \
204 __typeof__(*(ptr)) _o_ = (o); \
205 __typeof__(*(ptr)) _n_ = (n); \
206 (__typeof__(*(ptr))) __cmpxchg_relaxed((ptr), \
207 _o_, _n_, sizeof(*(ptr))); \
210 #define __cmpxchg_acquire(ptr, old, new, size) \
212 __typeof__(ptr) __ptr = (ptr); \
213 __typeof__(*(ptr)) __old = (old); \
214 __typeof__(*(ptr)) __new = (new); \
215 __typeof__(*(ptr)) __ret; \
216 register unsigned int __rc; \
219 __asm__ __volatile__ ( \
221 " bne %0, %z3, 1f\n" \
222 " sc.w %1, %z4, %2\n" \
224 RISCV_ACQUIRE_BARRIER \
226 : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
227 : "rJ" ((long)__old), "rJ" (__new) \
231 __asm__ __volatile__ ( \
233 " bne %0, %z3, 1f\n" \
234 " sc.d %1, %z4, %2\n" \
236 RISCV_ACQUIRE_BARRIER \
238 : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
239 : "rJ" (__old), "rJ" (__new) \
248 #define cmpxchg_acquire(ptr, o, n) \
250 __typeof__(*(ptr)) _o_ = (o); \
251 __typeof__(*(ptr)) _n_ = (n); \
252 (__typeof__(*(ptr))) __cmpxchg_acquire((ptr), \
253 _o_, _n_, sizeof(*(ptr))); \
256 #define __cmpxchg_release(ptr, old, new, size) \
258 __typeof__(ptr) __ptr = (ptr); \
259 __typeof__(*(ptr)) __old = (old); \
260 __typeof__(*(ptr)) __new = (new); \
261 __typeof__(*(ptr)) __ret; \
262 register unsigned int __rc; \
265 __asm__ __volatile__ ( \
266 RISCV_RELEASE_BARRIER \
268 " bne %0, %z3, 1f\n" \
269 " sc.w %1, %z4, %2\n" \
272 : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
273 : "rJ" ((long)__old), "rJ" (__new) \
277 __asm__ __volatile__ ( \
278 RISCV_RELEASE_BARRIER \
280 " bne %0, %z3, 1f\n" \
281 " sc.d %1, %z4, %2\n" \
284 : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
285 : "rJ" (__old), "rJ" (__new) \
294 #define cmpxchg_release(ptr, o, n) \
296 __typeof__(*(ptr)) _o_ = (o); \
297 __typeof__(*(ptr)) _n_ = (n); \
298 (__typeof__(*(ptr))) __cmpxchg_release((ptr), \
299 _o_, _n_, sizeof(*(ptr))); \
302 #define __cmpxchg(ptr, old, new, size) \
304 __typeof__(ptr) __ptr = (ptr); \
305 __typeof__(*(ptr)) __old = (old); \
306 __typeof__(*(ptr)) __new = (new); \
307 __typeof__(*(ptr)) __ret; \
308 register unsigned int __rc; \
311 __asm__ __volatile__ ( \
313 " bne %0, %z3, 1f\n" \
314 " sc.w.rl %1, %z4, %2\n" \
318 : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
319 : "rJ" ((long)__old), "rJ" (__new) \
323 __asm__ __volatile__ ( \
325 " bne %0, %z3, 1f\n" \
326 " sc.d.rl %1, %z4, %2\n" \
330 : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
331 : "rJ" (__old), "rJ" (__new) \
340 #define cmpxchg(ptr, o, n) \
342 __typeof__(*(ptr)) _o_ = (o); \
343 __typeof__(*(ptr)) _n_ = (n); \
344 (__typeof__(*(ptr))) __cmpxchg((ptr), \
345 _o_, _n_, sizeof(*(ptr))); \
348 #define cmpxchg_local(ptr, o, n) \
349 (__cmpxchg_relaxed((ptr), (o), (n), sizeof(*(ptr))))
351 #define cmpxchg32(ptr, o, n) \
353 BUILD_BUG_ON(sizeof(*(ptr)) != 4); \
354 cmpxchg((ptr), (o), (n)); \
357 #define cmpxchg32_local(ptr, o, n) \
359 BUILD_BUG_ON(sizeof(*(ptr)) != 4); \
360 cmpxchg_relaxed((ptr), (o), (n)) \
363 #define cmpxchg64(ptr, o, n) \
365 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
366 cmpxchg((ptr), (o), (n)); \
369 #define cmpxchg64_local(ptr, o, n) \
371 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
372 cmpxchg_relaxed((ptr), (o), (n)); \
375 #endif /* _ASM_RISCV_CMPXCHG_H */