1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ALPHA_CMPXCHG_H
3 #error Do not include xchg.h directly!
6 * xchg/xchg_local and cmpxchg/cmpxchg_local share the same code
7 * except that local version do not have the expensive memory barrier.
8 * So this file is included twice from asm/cmpxchg.h.
13 * Since it can be used to implement critical sections
14 * it must clobber "memory" (also for interrupts in UP).
17 static inline unsigned long
18 ____xchg(_u8
, volatile char *m
, unsigned long val
)
20 unsigned long ret
, tmp
, addr64
;
34 : "=&r" (ret
), "=&r" (val
), "=&r" (tmp
), "=&r" (addr64
)
35 : "r" ((long)m
), "1" (val
) : "memory");
40 static inline unsigned long
41 ____xchg(_u16
, volatile short *m
, unsigned long val
)
43 unsigned long ret
, tmp
, addr64
;
57 : "=&r" (ret
), "=&r" (val
), "=&r" (tmp
), "=&r" (addr64
)
58 : "r" ((long)m
), "1" (val
) : "memory");
63 static inline unsigned long
64 ____xchg(_u32
, volatile int *m
, unsigned long val
)
76 : "=&r" (val
), "=&r" (dummy
), "=m" (*m
)
77 : "rI" (val
), "m" (*m
) : "memory");
82 static inline unsigned long
83 ____xchg(_u64
, volatile long *m
, unsigned long val
)
95 : "=&r" (val
), "=&r" (dummy
), "=m" (*m
)
96 : "rI" (val
), "m" (*m
) : "memory");
101 /* This function doesn't exist, so you'll get a linker error
102 if something tries to do an invalid xchg(). */
103 extern void __xchg_called_with_bad_pointer(void);
105 static __always_inline
unsigned long
106 ____xchg(, volatile void *ptr
, unsigned long x
, int size
)
110 return ____xchg(_u8
, ptr
, x
);
112 return ____xchg(_u16
, ptr
, x
);
114 return ____xchg(_u32
, ptr
, x
);
116 return ____xchg(_u64
, ptr
, x
);
118 __xchg_called_with_bad_pointer();
123 * Atomic compare and exchange. Compare OLD with MEM, if identical,
124 * store NEW in MEM. Return the initial value in MEM. Success is
125 * indicated by comparing RETURN with OLD.
128 static inline unsigned long
129 ____cmpxchg(_u8
, volatile char *m
, unsigned char old
, unsigned char new)
131 unsigned long prev
, tmp
, cmp
, addr64
;
133 __asm__
__volatile__(
136 "1: ldq_l %2,0(%4)\n"
148 : "=&r" (prev
), "=&r" (new), "=&r" (tmp
), "=&r" (cmp
), "=&r" (addr64
)
149 : "r" ((long)m
), "Ir" (old
), "1" (new) : "memory");
154 static inline unsigned long
155 ____cmpxchg(_u16
, volatile short *m
, unsigned short old
, unsigned short new)
157 unsigned long prev
, tmp
, cmp
, addr64
;
159 __asm__
__volatile__(
162 "1: ldq_l %2,0(%4)\n"
174 : "=&r" (prev
), "=&r" (new), "=&r" (tmp
), "=&r" (cmp
), "=&r" (addr64
)
175 : "r" ((long)m
), "Ir" (old
), "1" (new) : "memory");
180 static inline unsigned long
181 ____cmpxchg(_u32
, volatile int *m
, int old
, int new)
183 unsigned long prev
, cmp
;
185 __asm__
__volatile__(
196 : "=&r"(prev
), "=&r"(cmp
), "=m"(*m
)
197 : "r"((long) old
), "r"(new), "m"(*m
) : "memory");
202 static inline unsigned long
203 ____cmpxchg(_u64
, volatile long *m
, unsigned long old
, unsigned long new)
205 unsigned long prev
, cmp
;
207 __asm__
__volatile__(
218 : "=&r"(prev
), "=&r"(cmp
), "=m"(*m
)
219 : "r"((long) old
), "r"(new), "m"(*m
) : "memory");
224 /* This function doesn't exist, so you'll get a linker error
225 if something tries to do an invalid cmpxchg(). */
226 extern void __cmpxchg_called_with_bad_pointer(void);
228 static __always_inline
unsigned long
229 ____cmpxchg(, volatile void *ptr
, unsigned long old
, unsigned long new,
234 return ____cmpxchg(_u8
, ptr
, old
, new);
236 return ____cmpxchg(_u16
, ptr
, old
, new);
238 return ____cmpxchg(_u32
, ptr
, old
, new);
240 return ____cmpxchg(_u64
, ptr
, old
, new);
242 __cmpxchg_called_with_bad_pointer();