1 #ifndef _ASM_X86_CMPXCHG_64_H
2 #define _ASM_X86_CMPXCHG_64_H
4 static inline void set_64bit(volatile u64
*ptr
, u64 val
)
9 #define __HAVE_ARCH_CMPXCHG 1
11 #define cmpxchg64(ptr, o, n) \
13 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
14 cmpxchg((ptr), (o), (n)); \
17 #define cmpxchg64_local(ptr, o, n) \
19 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
20 cmpxchg_local((ptr), (o), (n)); \
23 #define cmpxchg16b(ptr, o1, o2, n1, n2) \
26 __typeof__(o2) __junk; \
27 __typeof__(*(ptr)) __old1 = (o1); \
28 __typeof__(o2) __old2 = (o2); \
29 __typeof__(*(ptr)) __new1 = (n1); \
30 __typeof__(o2) __new2 = (n2); \
31 asm volatile(LOCK_PREFIX "cmpxchg16b %2;setz %1" \
32 : "=d"(__junk), "=a"(__ret), "+m" (*ptr) \
33 : "b"(__new1), "c"(__new2), \
34 "a"(__old1), "d"(__old2)); \
38 #define cmpxchg16b_local(ptr, o1, o2, n1, n2) \
41 __typeof__(o2) __junk; \
42 __typeof__(*(ptr)) __old1 = (o1); \
43 __typeof__(o2) __old2 = (o2); \
44 __typeof__(*(ptr)) __new1 = (n1); \
45 __typeof__(o2) __new2 = (n2); \
46 asm volatile("cmpxchg16b %2;setz %1" \
47 : "=d"(__junk), "=a"(__ret), "+m" (*ptr) \
48 : "b"(__new1), "c"(__new2), \
49 "a"(__old1), "d"(__old2)); \
52 #define cmpxchg_double(ptr, o1, o2, n1, n2) \
54 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
55 VM_BUG_ON((unsigned long)(ptr) % 16); \
56 cmpxchg16b((ptr), (o1), (o2), (n1), (n2)); \
59 #define cmpxchg_double_local(ptr, o1, o2, n1, n2) \
61 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
62 VM_BUG_ON((unsigned long)(ptr) % 16); \
63 cmpxchg16b_local((ptr), (o1), (o2), (n1), (n2)); \
66 #define system_has_cmpxchg_double() cpu_has_cx16
68 #endif /* _ASM_X86_CMPXCHG_64_H */