2 * Based on arch/arm/include/asm/cmpxchg.h
4 * Copyright (C) 2012 ARM Ltd.
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program. If not, see <http://www.gnu.org/licenses/>.
18 #ifndef __ASM_CMPXCHG_H
19 #define __ASM_CMPXCHG_H
21 #include <linux/bug.h>
23 #include <asm/barrier.h>
25 static inline unsigned long __xchg(unsigned long x
, volatile void *ptr
, int size
)
27 unsigned long ret
, tmp
;
31 asm volatile("// __xchg1\n"
33 " stlxrb %w1, %w3, %2\n"
35 : "=&r" (ret
), "=&r" (tmp
), "+Q" (*(u8
*)ptr
)
40 asm volatile("// __xchg2\n"
42 " stlxrh %w1, %w3, %2\n"
44 : "=&r" (ret
), "=&r" (tmp
), "+Q" (*(u16
*)ptr
)
49 asm volatile("// __xchg4\n"
51 " stlxr %w1, %w3, %2\n"
53 : "=&r" (ret
), "=&r" (tmp
), "+Q" (*(u32
*)ptr
)
58 asm volatile("// __xchg8\n"
60 " stlxr %w1, %3, %2\n"
62 : "=&r" (ret
), "=&r" (tmp
), "+Q" (*(u64
*)ptr
)
76 __typeof__(*(ptr)) __ret; \
77 __ret = (__typeof__(*(ptr))) \
78 __xchg((unsigned long)(x), (ptr), sizeof(*(ptr))); \
82 static inline unsigned long __cmpxchg(volatile void *ptr
, unsigned long old
,
83 unsigned long new, int size
)
85 unsigned long oldval
= 0, res
;
90 asm volatile("// __cmpxchg1\n"
95 " stxrb %w0, %w4, %2\n"
97 : "=&r" (res
), "=&r" (oldval
), "+Q" (*(u8
*)ptr
)
98 : "Ir" (old
), "r" (new)
105 asm volatile("// __cmpxchg2\n"
110 " stxrh %w0, %w4, %2\n"
112 : "=&r" (res
), "=&r" (oldval
), "+Q" (*(u16
*)ptr
)
113 : "Ir" (old
), "r" (new)
120 asm volatile("// __cmpxchg4\n"
125 " stxr %w0, %w4, %2\n"
127 : "=&r" (res
), "=&r" (oldval
), "+Q" (*(u32
*)ptr
)
128 : "Ir" (old
), "r" (new)
135 asm volatile("// __cmpxchg8\n"
140 " stxr %w0, %4, %2\n"
142 : "=&r" (res
), "=&r" (oldval
), "+Q" (*(u64
*)ptr
)
143 : "Ir" (old
), "r" (new)
155 static inline unsigned long __cmpxchg_mb(volatile void *ptr
, unsigned long old
,
156 unsigned long new, int size
)
161 ret
= __cmpxchg(ptr
, old
, new, size
);
167 #define cmpxchg(ptr, o, n) \
169 __typeof__(*(ptr)) __ret; \
170 __ret = (__typeof__(*(ptr))) \
171 __cmpxchg_mb((ptr), (unsigned long)(o), (unsigned long)(n), \
176 #define cmpxchg_local(ptr, o, n) \
178 __typeof__(*(ptr)) __ret; \
179 __ret = (__typeof__(*(ptr))) \
180 __cmpxchg((ptr), (unsigned long)(o), \
181 (unsigned long)(n), sizeof(*(ptr))); \
185 #define cmpxchg64(ptr,o,n) cmpxchg((ptr),(o),(n))
186 #define cmpxchg64_local(ptr,o,n) cmpxchg_local((ptr),(o),(n))
188 #define cmpxchg64_relaxed(ptr,o,n) cmpxchg_local((ptr),(o),(n))
190 #endif /* __ASM_CMPXCHG_H */