1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ALPHA_CMPXCHG_H
3 #error Do not include xchg.h directly!
6 * xchg/xchg_local and cmpxchg/cmpxchg_local share the same code
7 * except that local version do not have the expensive memory barrier.
8 * So this file is included twice from asm/cmpxchg.h.
13 * Since it can be used to implement critical sections
14 * it must clobber "memory" (also for interrupts in UP).
16 * The leading and the trailing memory barriers guarantee that these
17 * operations are fully ordered.
21 static inline unsigned long
22 ____xchg(_u8
, volatile char *m
, unsigned long val
)
24 unsigned long ret
, tmp
, addr64
;
39 : "=&r" (ret
), "=&r" (val
), "=&r" (tmp
), "=&r" (addr64
)
40 : "r" ((long)m
), "1" (val
) : "memory");
46 static inline unsigned long
47 ____xchg(_u16
, volatile short *m
, unsigned long val
)
49 unsigned long ret
, tmp
, addr64
;
64 : "=&r" (ret
), "=&r" (val
), "=&r" (tmp
), "=&r" (addr64
)
65 : "r" ((long)m
), "1" (val
) : "memory");
71 static inline unsigned long
72 ____xchg(_u32
, volatile int *m
, unsigned long val
)
85 : "=&r" (val
), "=&r" (dummy
), "=m" (*m
)
86 : "rI" (val
), "m" (*m
) : "memory");
92 static inline unsigned long
93 ____xchg(_u64
, volatile long *m
, unsigned long val
)
106 : "=&r" (val
), "=&r" (dummy
), "=m" (*m
)
107 : "rI" (val
), "m" (*m
) : "memory");
113 /* This function doesn't exist, so you'll get a linker error
114 if something tries to do an invalid xchg(). */
115 extern void __xchg_called_with_bad_pointer(void);
117 static __always_inline
unsigned long
118 ____xchg(, volatile void *ptr
, unsigned long x
, int size
)
122 return ____xchg(_u8
, ptr
, x
);
124 return ____xchg(_u16
, ptr
, x
);
126 return ____xchg(_u32
, ptr
, x
);
128 return ____xchg(_u64
, ptr
, x
);
130 __xchg_called_with_bad_pointer();
135 * Atomic compare and exchange. Compare OLD with MEM, if identical,
136 * store NEW in MEM. Return the initial value in MEM. Success is
137 * indicated by comparing RETURN with OLD.
139 * The leading and the trailing memory barriers guarantee that these
140 * operations are fully ordered.
142 * The trailing memory barrier is placed in SMP unconditionally, in
143 * order to guarantee that dependency ordering is preserved when a
144 * dependency is headed by an unsuccessful operation.
147 static inline unsigned long
148 ____cmpxchg(_u8
, volatile char *m
, unsigned char old
, unsigned char new)
150 unsigned long prev
, tmp
, cmp
, addr64
;
153 __asm__
__volatile__(
156 "1: ldq_l %2,0(%4)\n"
168 : "=&r" (prev
), "=&r" (new), "=&r" (tmp
), "=&r" (cmp
), "=&r" (addr64
)
169 : "r" ((long)m
), "Ir" (old
), "1" (new) : "memory");
175 static inline unsigned long
176 ____cmpxchg(_u16
, volatile short *m
, unsigned short old
, unsigned short new)
178 unsigned long prev
, tmp
, cmp
, addr64
;
181 __asm__
__volatile__(
184 "1: ldq_l %2,0(%4)\n"
196 : "=&r" (prev
), "=&r" (new), "=&r" (tmp
), "=&r" (cmp
), "=&r" (addr64
)
197 : "r" ((long)m
), "Ir" (old
), "1" (new) : "memory");
203 static inline unsigned long
204 ____cmpxchg(_u32
, volatile int *m
, int old
, int new)
206 unsigned long prev
, cmp
;
209 __asm__
__volatile__(
220 : "=&r"(prev
), "=&r"(cmp
), "=m"(*m
)
221 : "r"((long) old
), "r"(new), "m"(*m
) : "memory");
227 static inline unsigned long
228 ____cmpxchg(_u64
, volatile long *m
, unsigned long old
, unsigned long new)
230 unsigned long prev
, cmp
;
233 __asm__
__volatile__(
244 : "=&r"(prev
), "=&r"(cmp
), "=m"(*m
)
245 : "r"((long) old
), "r"(new), "m"(*m
) : "memory");
251 /* This function doesn't exist, so you'll get a linker error
252 if something tries to do an invalid cmpxchg(). */
253 extern void __cmpxchg_called_with_bad_pointer(void);
255 static __always_inline
unsigned long
256 ____cmpxchg(, volatile void *ptr
, unsigned long old
, unsigned long new,
261 return ____cmpxchg(_u8
, ptr
, old
, new);
263 return ____cmpxchg(_u16
, ptr
, old
, new);
265 return ____cmpxchg(_u32
, ptr
, old
, new);
267 return ____cmpxchg(_u64
, ptr
, old
, new);
269 __cmpxchg_called_with_bad_pointer();