2 * Copyright 2004-2011 Analog Devices Inc.
4 * Licensed under the GPL-2 or later.
7 #ifndef __ARCH_BLACKFIN_CMPXCHG__
8 #define __ARCH_BLACKFIN_CMPXCHG__
12 #include <linux/linkage.h>
14 asmlinkage
unsigned long __raw_xchg_1_asm(volatile void *ptr
, unsigned long value
);
15 asmlinkage
unsigned long __raw_xchg_2_asm(volatile void *ptr
, unsigned long value
);
16 asmlinkage
unsigned long __raw_xchg_4_asm(volatile void *ptr
, unsigned long value
);
17 asmlinkage
unsigned long __raw_cmpxchg_1_asm(volatile void *ptr
,
18 unsigned long new, unsigned long old
);
19 asmlinkage
unsigned long __raw_cmpxchg_2_asm(volatile void *ptr
,
20 unsigned long new, unsigned long old
);
21 asmlinkage
unsigned long __raw_cmpxchg_4_asm(volatile void *ptr
,
22 unsigned long new, unsigned long old
);
24 static inline unsigned long __xchg(unsigned long x
, volatile void *ptr
,
31 tmp
= __raw_xchg_1_asm(ptr
, x
);
34 tmp
= __raw_xchg_2_asm(ptr
, x
);
37 tmp
= __raw_xchg_4_asm(ptr
, x
);
45 * Atomic compare and exchange. Compare OLD with MEM, if identical,
46 * store NEW in MEM. Return the initial value in MEM. Success is
47 * indicated by comparing RETURN with OLD.
49 static inline unsigned long __cmpxchg(volatile void *ptr
, unsigned long old
,
50 unsigned long new, int size
)
56 tmp
= __raw_cmpxchg_1_asm(ptr
, new, old
);
59 tmp
= __raw_cmpxchg_2_asm(ptr
, new, old
);
62 tmp
= __raw_cmpxchg_4_asm(ptr
, new, old
);
68 #define cmpxchg(ptr, o, n) \
69 ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \
70 (unsigned long)(n), sizeof(*(ptr))))
72 #else /* !CONFIG_SMP */
74 #include <mach/blackfin.h>
75 #include <asm/irqflags.h>
80 #define __xg(x) ((volatile struct __xchg_dummy *)(x))
82 static inline unsigned long __xchg(unsigned long x
, volatile void *ptr
,
85 unsigned long tmp
= 0;
88 flags
= hard_local_irq_save();
95 : "=&d" (tmp
) : "d" (x
), "m" (*__xg(ptr
)) : "memory");
101 : "=&d" (tmp
) : "d" (x
), "m" (*__xg(ptr
)) : "memory");
107 : "=&d" (tmp
) : "d" (x
), "m" (*__xg(ptr
)) : "memory");
110 hard_local_irq_restore(flags
);
114 #include <asm-generic/cmpxchg-local.h>
117 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
120 #define cmpxchg_local(ptr, o, n) \
121 ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
122 (unsigned long)(n), sizeof(*(ptr))))
123 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
125 #define cmpxchg(ptr, o, n) cmpxchg_local((ptr), (o), (n))
126 #define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
128 #endif /* !CONFIG_SMP */
130 #define xchg(ptr, x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), sizeof(*(ptr))))
132 #endif /* __ARCH_BLACKFIN_CMPXCHG__ */