1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ALPHA_CMPXCHG_H
3 #define _ALPHA_CMPXCHG_H
6 * Atomic exchange routines.
9 #define ____xchg(type, args...) __xchg ## type ## _local(args)
10 #define ____cmpxchg(type, args...) __cmpxchg ## type ## _local(args)
13 #define xchg_local(ptr, x) \
15 __typeof__(*(ptr)) _x_ = (x); \
16 (__typeof__(*(ptr))) __xchg_local((ptr), (unsigned long)_x_, \
20 #define cmpxchg_local(ptr, o, n) \
22 __typeof__(*(ptr)) _o_ = (o); \
23 __typeof__(*(ptr)) _n_ = (n); \
24 (__typeof__(*(ptr))) __cmpxchg_local((ptr), (unsigned long)_o_, \
29 #define cmpxchg64_local(ptr, o, n) \
31 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
32 cmpxchg_local((ptr), (o), (n)); \
37 #define ____xchg(type, args...) __xchg ##type(args)
38 #define ____cmpxchg(type, args...) __cmpxchg ##type(args)
42 * The leading and the trailing memory barriers guarantee that these
43 * operations are fully ordered.
45 #define xchg(ptr, x) \
47 __typeof__(*(ptr)) __ret; \
48 __typeof__(*(ptr)) _x_ = (x); \
50 __ret = (__typeof__(*(ptr))) \
51 __xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \
56 #define cmpxchg(ptr, o, n) \
58 __typeof__(*(ptr)) __ret; \
59 __typeof__(*(ptr)) _o_ = (o); \
60 __typeof__(*(ptr)) _n_ = (n); \
62 __ret = (__typeof__(*(ptr))) __cmpxchg((ptr), \
63 (unsigned long)_o_, (unsigned long)_n_, sizeof(*(ptr)));\
68 #define cmpxchg64(ptr, o, n) \
70 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
71 cmpxchg((ptr), (o), (n)); \
76 #endif /* _ALPHA_CMPXCHG_H */