2 * Based on arch/arm/include/asm/cmpxchg.h
4 * Copyright (C) 2012 ARM Ltd.
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program. If not, see <http://www.gnu.org/licenses/>.
18 #ifndef __ASM_CMPXCHG_H
19 #define __ASM_CMPXCHG_H
21 #include <linux/bug.h>
23 #include <asm/barrier.h>
25 static inline unsigned long __xchg(unsigned long x
, volatile void *ptr
, int size
)
27 unsigned long ret
, tmp
;
31 asm volatile("// __xchg1\n"
32 "1: ldaxrb %w0, [%3]\n"
33 " stlxrb %w1, %w2, [%3]\n"
35 : "=&r" (ret
), "=&r" (tmp
)
40 asm volatile("// __xchg2\n"
41 "1: ldaxrh %w0, [%3]\n"
42 " stlxrh %w1, %w2, [%3]\n"
44 : "=&r" (ret
), "=&r" (tmp
)
49 asm volatile("// __xchg4\n"
50 "1: ldaxr %w0, [%3]\n"
51 " stlxr %w1, %w2, [%3]\n"
53 : "=&r" (ret
), "=&r" (tmp
)
58 asm volatile("// __xchg8\n"
60 " stlxr %w1, %2, [%3]\n"
62 : "=&r" (ret
), "=&r" (tmp
)
74 ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
76 static inline unsigned long __cmpxchg(volatile void *ptr
, unsigned long old
,
77 unsigned long new, int size
)
79 unsigned long oldval
= 0, res
;
84 asm volatile("// __cmpxchg1\n"
89 " stxrb %w0, %w4, [%2]\n"
91 : "=&r" (res
), "=&r" (oldval
)
92 : "r" (ptr
), "Ir" (old
), "r" (new)
99 asm volatile("// __cmpxchg2\n"
104 " stxrh %w0, %w4, [%2]\n"
106 : "=&r" (res
), "=&r" (oldval
)
107 : "r" (ptr
), "Ir" (old
), "r" (new)
114 asm volatile("// __cmpxchg4\n"
119 " stxr %w0, %w4, [%2]\n"
121 : "=&r" (res
), "=&r" (oldval
)
122 : "r" (ptr
), "Ir" (old
), "r" (new)
129 asm volatile("// __cmpxchg8\n"
134 " stxr %w0, %4, [%2]\n"
136 : "=&r" (res
), "=&r" (oldval
)
137 : "r" (ptr
), "Ir" (old
), "r" (new)
149 static inline unsigned long __cmpxchg_mb(volatile void *ptr
, unsigned long old
,
150 unsigned long new, int size
)
155 ret
= __cmpxchg(ptr
, old
, new, size
);
161 #define cmpxchg(ptr,o,n) \
162 ((__typeof__(*(ptr)))__cmpxchg_mb((ptr), \
163 (unsigned long)(o), \
164 (unsigned long)(n), \
167 #define cmpxchg_local(ptr,o,n) \
168 ((__typeof__(*(ptr)))__cmpxchg((ptr), \
169 (unsigned long)(o), \
170 (unsigned long)(n), \
173 #endif /* __ASM_CMPXCHG_H */