1 /* SPDX-License-Identifier: GPL-2.0 */
2 /* 32-bit atomic xchg() and cmpxchg() definitions.
4 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
5 * Copyright (C) 2000 Anton Blanchard (anton@linuxcare.com.au)
6 * Copyright (C) 2007 Kyle McMartin (kyle@parisc-linux.org)
8 * Additions by Keith M Wesolowski (wesolows@foobazco.org) based
9 * on asm-parisc/atomic.h Copyright (C) 2000 Philipp Rumpf <prumpf@tux.org>.
12 #ifndef __ARCH_SPARC_CMPXCHG__
13 #define __ARCH_SPARC_CMPXCHG__
15 unsigned long __xchg_u32(volatile u32
*m
, u32
new);
16 void __xchg_called_with_bad_pointer(void);
18 static inline unsigned long __xchg(unsigned long x
, __volatile__
void * ptr
, int size
)
22 return __xchg_u32(ptr
, x
);
24 __xchg_called_with_bad_pointer();
28 #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
30 /* Emulate cmpxchg() the same way we emulate atomics,
31 * by hashing the object address and indexing into an array
32 * of spinlocks to get a bit of performance...
34 * See arch/sparc/lib/atomic32.c for implementation.
36 * Cribbed from <asm-parisc/atomic.h>
39 /* bug catcher for when unsupported size is used - won't link */
40 void __cmpxchg_called_with_bad_pointer(void);
41 /* we only need to support cmpxchg of a u32 on sparc */
42 unsigned long __cmpxchg_u32(volatile u32
*m
, u32 old
, u32 new_
);
44 /* don't worry...optimizer will get rid of most of this */
45 static inline unsigned long
46 __cmpxchg(volatile void *ptr
, unsigned long old
, unsigned long new_
, int size
)
50 return __cmpxchg_u32((u32
*)ptr
, (u32
)old
, (u32
)new_
);
52 __cmpxchg_called_with_bad_pointer();
58 #define cmpxchg(ptr, o, n) \
60 __typeof__(*(ptr)) _o_ = (o); \
61 __typeof__(*(ptr)) _n_ = (n); \
62 (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \
63 (unsigned long)_n_, sizeof(*(ptr))); \
66 u64
__cmpxchg_u64(u64
*ptr
, u64 old
, u64
new);
67 #define cmpxchg64(ptr, old, new) __cmpxchg_u64(ptr, old, new)
69 #include <asm-generic/cmpxchg-local.h>
72 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
75 #define cmpxchg_local(ptr, o, n) \
76 ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
77 (unsigned long)(n), sizeof(*(ptr))))
78 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
80 #endif /* __ARCH_SPARC_CMPXCHG__ */