2 * Based on arch/arm/include/asm/cmpxchg.h
4 * Copyright (C) 2012 ARM Ltd.
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program. If not, see <http://www.gnu.org/licenses/>.
18 #ifndef __ASM_CMPXCHG_H
19 #define __ASM_CMPXCHG_H
21 #include <linux/bug.h>
23 #include <asm/barrier.h>
25 static inline unsigned long __xchg(unsigned long x
, volatile void *ptr
, int size
)
27 unsigned long ret
, tmp
;
31 asm volatile("// __xchg1\n"
33 " stlxrb %w1, %w3, %2\n"
35 : "=&r" (ret
), "=&r" (tmp
), "+Q" (*(u8
*)ptr
)
40 asm volatile("// __xchg2\n"
42 " stlxrh %w1, %w3, %2\n"
44 : "=&r" (ret
), "=&r" (tmp
), "+Q" (*(u16
*)ptr
)
49 asm volatile("// __xchg4\n"
51 " stlxr %w1, %w3, %2\n"
53 : "=&r" (ret
), "=&r" (tmp
), "+Q" (*(u32
*)ptr
)
58 asm volatile("// __xchg8\n"
60 " stlxr %w1, %3, %2\n"
62 : "=&r" (ret
), "=&r" (tmp
), "+Q" (*(u64
*)ptr
)
75 ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
77 static inline unsigned long __cmpxchg(volatile void *ptr
, unsigned long old
,
78 unsigned long new, int size
)
80 unsigned long oldval
= 0, res
;
85 asm volatile("// __cmpxchg1\n"
90 " stxrb %w0, %w4, %2\n"
92 : "=&r" (res
), "=&r" (oldval
), "+Q" (*(u8
*)ptr
)
93 : "Ir" (old
), "r" (new)
100 asm volatile("// __cmpxchg2\n"
105 " stxrh %w0, %w4, %2\n"
107 : "=&r" (res
), "=&r" (oldval
), "+Q" (*(u16
*)ptr
)
108 : "Ir" (old
), "r" (new)
115 asm volatile("// __cmpxchg4\n"
120 " stxr %w0, %w4, %2\n"
122 : "=&r" (res
), "=&r" (oldval
), "+Q" (*(u32
*)ptr
)
123 : "Ir" (old
), "r" (new)
130 asm volatile("// __cmpxchg8\n"
135 " stxr %w0, %4, %2\n"
137 : "=&r" (res
), "=&r" (oldval
), "+Q" (*(u64
*)ptr
)
138 : "Ir" (old
), "r" (new)
150 static inline unsigned long __cmpxchg_mb(volatile void *ptr
, unsigned long old
,
151 unsigned long new, int size
)
156 ret
= __cmpxchg(ptr
, old
, new, size
);
162 #define cmpxchg(ptr, o, n) \
164 __typeof__(*(ptr)) __ret; \
165 __ret = (__typeof__(*(ptr))) \
166 __cmpxchg_mb((ptr), (unsigned long)(o), (unsigned long)(n), \
171 #define cmpxchg_local(ptr, o, n) \
173 __typeof__(*(ptr)) __ret; \
174 __ret = (__typeof__(*(ptr))) \
175 __cmpxchg((ptr), (unsigned long)(o), \
176 (unsigned long)(n), sizeof(*(ptr))); \
180 #define cmpxchg64(ptr,o,n) cmpxchg((ptr),(o),(n))
181 #define cmpxchg64_local(ptr,o,n) cmpxchg_local((ptr),(o),(n))
183 #define cmpxchg64_relaxed(ptr,o,n) cmpxchg_local((ptr),(o),(n))
185 #endif /* __ASM_CMPXCHG_H */