1 /* xchg and cmpxchg operation emulation for FR-V
3 * For an explanation of how atomic ops work in this arch, see:
4 * Documentation/frv/atomic-ops.txt
6 * Copyright (C) 2004 Red Hat, Inc. All Rights Reserved.
7 * Written by David Howells (dhowells@redhat.com)
9 * This program is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU General Public License
11 * as published by the Free Software Foundation; either version
12 * 2 of the License, or (at your option) any later version.
14 #ifndef _ASM_CMPXCHG_H
15 #define _ASM_CMPXCHG_H
17 #include <linux/types.h>
19 /*****************************************************************************/
21 * exchange value with memory
23 extern uint64_t __xchg_64(uint64_t i
, volatile void *v
);
25 #ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS
27 #define xchg(ptr, x) \
29 __typeof__(ptr) __xg_ptr = (ptr); \
30 __typeof__(*(ptr)) __xg_orig; \
32 switch (sizeof(__xg_orig)) { \
36 : "+m"(*__xg_ptr), "=r"(__xg_orig) \
43 __xg_orig = (__typeof__(__xg_orig))0; \
44 asm volatile("break"); \
53 extern uint32_t __xchg_32(uint32_t i
, volatile void *v
);
55 #define xchg(ptr, x) \
57 __typeof__(ptr) __xg_ptr = (ptr); \
58 __typeof__(*(ptr)) __xg_orig; \
60 switch (sizeof(__xg_orig)) { \
61 case 4: __xg_orig = (__typeof__(*(ptr))) __xchg_32((uint32_t) x, __xg_ptr); break; \
63 __xg_orig = (__typeof__(__xg_orig))0; \
64 asm volatile("break"); \
72 /*****************************************************************************/
74 * compare and conditionally exchange value with memory
75 * - if (*ptr == test) then orig = *ptr; *ptr = test;
76 * - if (*ptr != test) then orig = *ptr;
78 extern uint64_t __cmpxchg_64(uint64_t test
, uint64_t new, volatile uint64_t *v
);
80 #ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS
82 #define cmpxchg(ptr, test, new) \
84 __typeof__(ptr) __xg_ptr = (ptr); \
85 __typeof__(*(ptr)) __xg_orig, __xg_tmp; \
86 __typeof__(*(ptr)) __xg_test = (test); \
87 __typeof__(*(ptr)) __xg_new = (new); \
89 switch (sizeof(__xg_orig)) { \
93 " orcc gr0,gr0,gr0,icc3 \n" \
96 " orcr cc7,cc7,cc3 \n" \
97 " sub%I4cc %1,%4,%2,icc0 \n" \
98 " bne icc0,#0,1f \n" \
99 " cst.p %3,%M0 ,cc3,#1 \n" \
100 " corcc gr29,gr29,gr0 ,cc3,#1 \n" \
101 " beq icc3,#0,0b \n" \
103 : "+U"(*__xg_ptr), "=&r"(__xg_orig), "=&r"(__xg_tmp) \
104 : "r"(__xg_new), "NPr"(__xg_test) \
105 : "memory", "cc7", "cc3", "icc3", "icc0" \
110 __xg_orig = (__typeof__(__xg_orig))0; \
111 asm volatile("break"); \
120 extern uint32_t __cmpxchg_32(uint32_t *v
, uint32_t test
, uint32_t new);
122 #define cmpxchg(ptr, test, new) \
124 __typeof__(ptr) __xg_ptr = (ptr); \
125 __typeof__(*(ptr)) __xg_orig; \
126 __typeof__(*(ptr)) __xg_test = (test); \
127 __typeof__(*(ptr)) __xg_new = (new); \
129 switch (sizeof(__xg_orig)) { \
130 case 4: __xg_orig = (__force __typeof__(*ptr)) \
131 __cmpxchg_32((__force uint32_t *)__xg_ptr, \
132 (__force uint32_t)__xg_test, \
133 (__force uint32_t)__xg_new); break; \
135 __xg_orig = (__typeof__(__xg_orig))0; \
136 asm volatile("break"); \
145 #include <asm-generic/cmpxchg-local.h>
147 static inline unsigned long __cmpxchg_local(volatile void *ptr
,
149 unsigned long new, int size
)
153 return cmpxchg((unsigned long *)ptr
, old
, new);
155 return __cmpxchg_local_generic(ptr
, old
, new, size
);
162 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
165 #define cmpxchg_local(ptr, o, n) \
166 ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
167 (unsigned long)(n), sizeof(*(ptr))))
168 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
170 #endif /* _ASM_CMPXCHG_H */