1 /* 32-bit atomic xchg() and cmpxchg() definitions.
3 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
4 * Copyright (C) 2000 Anton Blanchard (anton@linuxcare.com.au)
5 * Copyright (C) 2007 Kyle McMartin (kyle@parisc-linux.org)
7 * Additions by Keith M Wesolowski (wesolows@foobazco.org) based
8 * on asm-parisc/atomic.h Copyright (C) 2000 Philipp Rumpf <prumpf@tux.org>.
11 #ifndef __ARCH_SPARC_CMPXCHG__
12 #define __ARCH_SPARC_CMPXCHG__
14 static inline unsigned long xchg_u32(__volatile__
unsigned long *m
, unsigned long val
)
16 __asm__
__volatile__("swap [%2], %0"
23 extern void __xchg_called_with_bad_pointer(void);
25 static inline unsigned long __xchg(unsigned long x
, __volatile__
void * ptr
, int size
)
29 return xchg_u32(ptr
, x
);
31 __xchg_called_with_bad_pointer();
35 #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
37 /* Emulate cmpxchg() the same way we emulate atomics,
38 * by hashing the object address and indexing into an array
39 * of spinlocks to get a bit of performance...
41 * See arch/sparc/lib/atomic32.c for implementation.
43 * Cribbed from <asm-parisc/atomic.h>
45 #define __HAVE_ARCH_CMPXCHG 1
47 /* bug catcher for when unsupported size is used - won't link */
48 extern void __cmpxchg_called_with_bad_pointer(void);
49 /* we only need to support cmpxchg of a u32 on sparc */
50 extern unsigned long __cmpxchg_u32(volatile u32
*m
, u32 old
, u32 new_
);
52 /* don't worry...optimizer will get rid of most of this */
53 static inline unsigned long
54 __cmpxchg(volatile void *ptr
, unsigned long old
, unsigned long new_
, int size
)
58 return __cmpxchg_u32((u32
*)ptr
, (u32
)old
, (u32
)new_
);
60 __cmpxchg_called_with_bad_pointer();
66 #define cmpxchg(ptr, o, n) \
68 __typeof__(*(ptr)) _o_ = (o); \
69 __typeof__(*(ptr)) _n_ = (n); \
70 (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \
71 (unsigned long)_n_, sizeof(*(ptr))); \
74 #include <asm-generic/cmpxchg-local.h>
77 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
80 #define cmpxchg_local(ptr, o, n) \
81 ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
82 (unsigned long)(n), sizeof(*(ptr))))
83 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
85 #endif /* __ARCH_SPARC_CMPXCHG__ */