2 * Copyright IBM Corp. 1999, 2011
4 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
7 #ifndef __ASM_CMPXCHG_H
8 #define __ASM_CMPXCHG_H
10 #include <linux/mmdebug.h>
11 #include <linux/types.h>
12 #include <linux/bug.h>
14 extern void __xchg_called_with_bad_pointer(void);
16 static inline unsigned long __xchg(unsigned long x
, void *ptr
, int size
)
18 unsigned long addr
, old
;
23 addr
= (unsigned long) ptr
;
24 shift
= (3 ^ (addr
& 3)) << 3;
33 : "=&d" (old
), "=Q" (*(int *) addr
)
34 : "d" ((x
& 0xff) << shift
), "d" (~(0xff << shift
)),
35 "Q" (*(int *) addr
) : "memory", "cc", "0");
38 addr
= (unsigned long) ptr
;
39 shift
= (2 ^ (addr
& 2)) << 3;
48 : "=&d" (old
), "=Q" (*(int *) addr
)
49 : "d" ((x
& 0xffff) << shift
), "d" (~(0xffff << shift
)),
50 "Q" (*(int *) addr
) : "memory", "cc", "0");
57 : "=&d" (old
), "=Q" (*(int *) ptr
)
58 : "d" (x
), "Q" (*(int *) ptr
)
67 : "=&d" (old
), "=m" (*(long *) ptr
)
68 : "d" (x
), "Q" (*(long *) ptr
)
71 #endif /* CONFIG_64BIT */
73 __xchg_called_with_bad_pointer();
77 #define xchg(ptr, x) \
79 __typeof__(*(ptr)) __ret; \
80 __ret = (__typeof__(*(ptr))) \
81 __xchg((unsigned long)(x), (void *)(ptr), sizeof(*(ptr)));\
86 * Atomic compare and exchange. Compare OLD with MEM, if identical,
87 * store NEW in MEM. Return the initial value in MEM. Success is
88 * indicated by comparing RETURN with OLD.
91 #define __HAVE_ARCH_CMPXCHG
93 extern void __cmpxchg_called_with_bad_pointer(void);
95 static inline unsigned long __cmpxchg(void *ptr
, unsigned long old
,
96 unsigned long new, int size
)
98 unsigned long addr
, prev
, tmp
;
103 addr
= (unsigned long) ptr
;
104 shift
= (3 ^ (addr
& 3)) << 3;
118 : "=&d" (prev
), "=&d" (tmp
), "+Q" (*(int *) addr
)
119 : "d" ((old
& 0xff) << shift
),
120 "d" ((new & 0xff) << shift
),
121 "d" (~(0xff << shift
))
123 return prev
>> shift
;
125 addr
= (unsigned long) ptr
;
126 shift
= (2 ^ (addr
& 2)) << 3;
140 : "=&d" (prev
), "=&d" (tmp
), "+Q" (*(int *) addr
)
141 : "d" ((old
& 0xffff) << shift
),
142 "d" ((new & 0xffff) << shift
),
143 "d" (~(0xffff << shift
))
145 return prev
>> shift
;
149 : "=&d" (prev
), "=Q" (*(int *) ptr
)
150 : "0" (old
), "d" (new), "Q" (*(int *) ptr
)
157 : "=&d" (prev
), "=Q" (*(long *) ptr
)
158 : "0" (old
), "d" (new), "Q" (*(long *) ptr
)
161 #endif /* CONFIG_64BIT */
163 __cmpxchg_called_with_bad_pointer();
167 #define cmpxchg(ptr, o, n) \
169 __typeof__(*(ptr)) __ret; \
170 __ret = (__typeof__(*(ptr))) \
171 __cmpxchg((ptr), (unsigned long)(o), (unsigned long)(n), \
177 #define cmpxchg64(ptr, o, n) \
179 cmpxchg((ptr), (o), (n)); \
181 #else /* CONFIG_64BIT */
182 static inline unsigned long long __cmpxchg64(void *ptr
,
183 unsigned long long old
,
184 unsigned long long new)
186 register_pair rp_old
= {.pair
= old
};
187 register_pair rp_new
= {.pair
= new};
191 : "+&d" (rp_old
), "=Q" (ptr
)
192 : "d" (rp_new
), "Q" (ptr
)
197 #define cmpxchg64(ptr, o, n) \
199 __typeof__(*(ptr)) __ret; \
200 __ret = (__typeof__(*(ptr))) \
202 (unsigned long long)(o), \
203 (unsigned long long)(n)); \
206 #endif /* CONFIG_64BIT */
208 #define __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, insn) \
210 register __typeof__(*(p1)) __old1 asm("2") = (o1); \
211 register __typeof__(*(p2)) __old2 asm("3") = (o2); \
212 register __typeof__(*(p1)) __new1 asm("4") = (n1); \
213 register __typeof__(*(p2)) __new2 asm("5") = (n2); \
216 insn " %[old],%[new],%[ptr]\n" \
219 : [cc] "=d" (cc), [old] "+d" (__old1), "+d" (__old2) \
220 : [new] "d" (__new1), "d" (__new2), \
221 [ptr] "Q" (*(p1)), "Q" (*(p2)) \
226 #define __cmpxchg_double_4(p1, p2, o1, o2, n1, n2) \
227 __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, "cds")
229 #define __cmpxchg_double_8(p1, p2, o1, o2, n1, n2) \
230 __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, "cdsg")
232 extern void __cmpxchg_double_called_with_bad_pointer(void);
234 #define __cmpxchg_double(p1, p2, o1, o2, n1, n2) \
237 switch (sizeof(*(p1))) { \
239 __ret = __cmpxchg_double_4(p1, p2, o1, o2, n1, n2); \
242 __ret = __cmpxchg_double_8(p1, p2, o1, o2, n1, n2); \
245 __cmpxchg_double_called_with_bad_pointer(); \
250 #define cmpxchg_double(p1, p2, o1, o2, n1, n2) \
252 __typeof__(p1) __p1 = (p1); \
253 __typeof__(p2) __p2 = (p2); \
255 BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long)); \
256 BUILD_BUG_ON(sizeof(*(p2)) != sizeof(long)); \
257 VM_BUG_ON((unsigned long)((__p1) + 1) != (unsigned long)(__p2));\
258 if (sizeof(long) == 4) \
259 __ret = __cmpxchg_double_4(__p1, __p2, o1, o2, n1, n2); \
261 __ret = __cmpxchg_double_8(__p1, __p2, o1, o2, n1, n2); \
265 #define system_has_cmpxchg_double() 1
267 #include <asm-generic/cmpxchg-local.h>
269 static inline unsigned long __cmpxchg_local(void *ptr
,
271 unsigned long new, int size
)
280 return __cmpxchg(ptr
, old
, new, size
);
282 return __cmpxchg_local_generic(ptr
, old
, new, size
);
289 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
292 #define cmpxchg_local(ptr, o, n) \
294 __typeof__(*(ptr)) __ret; \
295 __ret = (__typeof__(*(ptr))) \
296 __cmpxchg_local((ptr), (unsigned long)(o), \
297 (unsigned long)(n), sizeof(*(ptr))); \
301 #define cmpxchg64_local(ptr, o, n) cmpxchg64((ptr), (o), (n))
303 #endif /* __ASM_CMPXCHG_H */