Linux 3.17-rc2
[linux/fpc-iii.git] / arch / blackfin / include / asm / cmpxchg.h
blobc05868cc61c1aacc290ecfacfebb048cf032bc6f
1 /*
2 * Copyright 2004-2011 Analog Devices Inc.
4 * Licensed under the GPL-2 or later.
5 */
7 #ifndef __ARCH_BLACKFIN_CMPXCHG__
8 #define __ARCH_BLACKFIN_CMPXCHG__
10 #ifdef CONFIG_SMP
12 #include <linux/linkage.h>
14 asmlinkage unsigned long __raw_xchg_1_asm(volatile void *ptr, unsigned long value);
15 asmlinkage unsigned long __raw_xchg_2_asm(volatile void *ptr, unsigned long value);
16 asmlinkage unsigned long __raw_xchg_4_asm(volatile void *ptr, unsigned long value);
17 asmlinkage unsigned long __raw_cmpxchg_1_asm(volatile void *ptr,
18 unsigned long new, unsigned long old);
19 asmlinkage unsigned long __raw_cmpxchg_2_asm(volatile void *ptr,
20 unsigned long new, unsigned long old);
21 asmlinkage unsigned long __raw_cmpxchg_4_asm(volatile void *ptr,
22 unsigned long new, unsigned long old);
24 static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
25 int size)
27 unsigned long tmp;
29 switch (size) {
30 case 1:
31 tmp = __raw_xchg_1_asm(ptr, x);
32 break;
33 case 2:
34 tmp = __raw_xchg_2_asm(ptr, x);
35 break;
36 case 4:
37 tmp = __raw_xchg_4_asm(ptr, x);
38 break;
41 return tmp;
45 * Atomic compare and exchange. Compare OLD with MEM, if identical,
46 * store NEW in MEM. Return the initial value in MEM. Success is
47 * indicated by comparing RETURN with OLD.
49 static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
50 unsigned long new, int size)
52 unsigned long tmp;
54 switch (size) {
55 case 1:
56 tmp = __raw_cmpxchg_1_asm(ptr, new, old);
57 break;
58 case 2:
59 tmp = __raw_cmpxchg_2_asm(ptr, new, old);
60 break;
61 case 4:
62 tmp = __raw_cmpxchg_4_asm(ptr, new, old);
63 break;
66 return tmp;
68 #define cmpxchg(ptr, o, n) \
69 ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \
70 (unsigned long)(n), sizeof(*(ptr))))
72 #else /* !CONFIG_SMP */
74 #include <mach/blackfin.h>
75 #include <asm/irqflags.h>
77 struct __xchg_dummy {
78 unsigned long a[100];
80 #define __xg(x) ((volatile struct __xchg_dummy *)(x))
82 static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
83 int size)
85 unsigned long tmp = 0;
86 unsigned long flags;
88 flags = hard_local_irq_save();
90 switch (size) {
91 case 1:
92 __asm__ __volatile__
93 ("%0 = b%2 (z);\n\t"
94 "b%2 = %1;\n\t"
95 : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
96 break;
97 case 2:
98 __asm__ __volatile__
99 ("%0 = w%2 (z);\n\t"
100 "w%2 = %1;\n\t"
101 : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
102 break;
103 case 4:
104 __asm__ __volatile__
105 ("%0 = %2;\n\t"
106 "%2 = %1;\n\t"
107 : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
108 break;
110 hard_local_irq_restore(flags);
111 return tmp;
114 #include <asm-generic/cmpxchg-local.h>
117 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
118 * them available.
120 #define cmpxchg_local(ptr, o, n) \
121 ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
122 (unsigned long)(n), sizeof(*(ptr))))
123 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
125 #define cmpxchg(ptr, o, n) cmpxchg_local((ptr), (o), (n))
126 #define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
128 #endif /* !CONFIG_SMP */
130 #define xchg(ptr, x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), sizeof(*(ptr))))
131 #define tas(ptr) ((void)xchg((ptr), 1))
133 #endif /* __ARCH_BLACKFIN_CMPXCHG__ */