2 * bitops.c: atomic operations which got too long to be inlined all over
5 * Copyright 1999 Philipp Rumpf (prumpf@tux.org)
6 * Copyright 2000 Grant Grundler (grundler@cup.hp.com)
9 #include <linux/kernel.h>
10 #include <linux/spinlock.h>
11 #include <linux/atomic.h>
14 arch_spinlock_t __atomic_hash
[ATOMIC_HASH_SIZE
] __lock_aligned
= {
15 [0 ... (ATOMIC_HASH_SIZE
-1)] = __ARCH_SPIN_LOCK_UNLOCKED
20 unsigned long __xchg64(unsigned long x
, unsigned long *ptr
)
22 unsigned long temp
, flags
;
24 _atomic_spin_lock_irqsave(ptr
, flags
);
27 _atomic_spin_unlock_irqrestore(ptr
, flags
);
32 unsigned long __xchg32(int x
, int *ptr
)
37 _atomic_spin_lock_irqsave(ptr
, flags
);
38 temp
= (long) *ptr
; /* XXX - sign extension wanted? */
40 _atomic_spin_unlock_irqrestore(ptr
, flags
);
41 return (unsigned long)temp
;
45 unsigned long __xchg8(char x
, char *ptr
)
50 _atomic_spin_lock_irqsave(ptr
, flags
);
51 temp
= (long) *ptr
; /* XXX - sign extension wanted? */
53 _atomic_spin_unlock_irqrestore(ptr
, flags
);
54 return (unsigned long)temp
;
58 u64
__cmpxchg_u64(volatile u64
*ptr
, u64 old
, u64
new)
63 _atomic_spin_lock_irqsave(ptr
, flags
);
64 if ((prev
= *ptr
) == old
)
66 _atomic_spin_unlock_irqrestore(ptr
, flags
);
70 unsigned long __cmpxchg_u32(volatile unsigned int *ptr
, unsigned int old
, unsigned int new)
75 _atomic_spin_lock_irqsave(ptr
, flags
);
76 if ((prev
= *ptr
) == old
)
78 _atomic_spin_unlock_irqrestore(ptr
, flags
);
79 return (unsigned long)prev
;