mm: fix exec activate_mm vs TLB shootdown and lazy tlb switching race
[linux/fpc-iii.git] / arch / m32r / include / asm / cmpxchg.h
blob1ccdce5ff0ac5e4dbc9989a97befdd05f17c4142
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_M32R_CMPXCHG_H
3 #define _ASM_M32R_CMPXCHG_H
5 /*
6 * M32R version:
7 * Copyright (C) 2001, 2002 Hitoshi Yamamoto
8 * Copyright (C) 2004 Hirokazu Takata <takata at linux-m32r.org>
9 */
11 #include <linux/irqflags.h>
12 #include <asm/assembler.h>
13 #include <asm/dcache_clear.h>
15 extern void __xchg_called_with_bad_pointer(void);
17 static __always_inline unsigned long
18 __xchg(unsigned long x, volatile void *ptr, int size)
20 unsigned long flags;
21 unsigned long tmp = 0;
23 local_irq_save(flags);
25 switch (size) {
26 #ifndef CONFIG_SMP
27 case 1:
28 __asm__ __volatile__ (
29 "ldb %0, @%2 \n\t"
30 "stb %1, @%2 \n\t"
31 : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
32 break;
33 case 2:
34 __asm__ __volatile__ (
35 "ldh %0, @%2 \n\t"
36 "sth %1, @%2 \n\t"
37 : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
38 break;
39 case 4:
40 __asm__ __volatile__ (
41 "ld %0, @%2 \n\t"
42 "st %1, @%2 \n\t"
43 : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
44 break;
45 #else /* CONFIG_SMP */
46 case 4:
47 __asm__ __volatile__ (
48 DCACHE_CLEAR("%0", "r4", "%2")
49 "lock %0, @%2; \n\t"
50 "unlock %1, @%2; \n\t"
51 : "=&r" (tmp) : "r" (x), "r" (ptr)
52 : "memory"
53 #ifdef CONFIG_CHIP_M32700_TS1
54 , "r4"
55 #endif /* CONFIG_CHIP_M32700_TS1 */
57 break;
58 #endif /* CONFIG_SMP */
59 default:
60 __xchg_called_with_bad_pointer();
63 local_irq_restore(flags);
65 return (tmp);
68 #define xchg(ptr, x) ({ \
69 ((__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), \
70 sizeof(*(ptr)))); \
73 static __always_inline unsigned long
74 __xchg_local(unsigned long x, volatile void *ptr, int size)
76 unsigned long flags;
77 unsigned long tmp = 0;
79 local_irq_save(flags);
81 switch (size) {
82 case 1:
83 __asm__ __volatile__ (
84 "ldb %0, @%2 \n\t"
85 "stb %1, @%2 \n\t"
86 : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
87 break;
88 case 2:
89 __asm__ __volatile__ (
90 "ldh %0, @%2 \n\t"
91 "sth %1, @%2 \n\t"
92 : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
93 break;
94 case 4:
95 __asm__ __volatile__ (
96 "ld %0, @%2 \n\t"
97 "st %1, @%2 \n\t"
98 : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
99 break;
100 default:
101 __xchg_called_with_bad_pointer();
104 local_irq_restore(flags);
106 return (tmp);
109 #define xchg_local(ptr, x) \
110 ((__typeof__(*(ptr)))__xchg_local((unsigned long)(x), (ptr), \
111 sizeof(*(ptr))))
113 static inline unsigned long
114 __cmpxchg_u32(volatile unsigned int *p, unsigned int old, unsigned int new)
116 unsigned long flags;
117 unsigned int retval;
119 local_irq_save(flags);
120 __asm__ __volatile__ (
121 DCACHE_CLEAR("%0", "r4", "%1")
122 M32R_LOCK" %0, @%1; \n"
123 " bne %0, %2, 1f; \n"
124 M32R_UNLOCK" %3, @%1; \n"
125 " bra 2f; \n"
126 " .fillinsn \n"
127 "1:"
128 M32R_UNLOCK" %0, @%1; \n"
129 " .fillinsn \n"
130 "2:"
131 : "=&r" (retval)
132 : "r" (p), "r" (old), "r" (new)
133 : "cbit", "memory"
134 #ifdef CONFIG_CHIP_M32700_TS1
135 , "r4"
136 #endif /* CONFIG_CHIP_M32700_TS1 */
138 local_irq_restore(flags);
140 return retval;
143 static inline unsigned long
144 __cmpxchg_local_u32(volatile unsigned int *p, unsigned int old,
145 unsigned int new)
147 unsigned long flags;
148 unsigned int retval;
150 local_irq_save(flags);
151 __asm__ __volatile__ (
152 DCACHE_CLEAR("%0", "r4", "%1")
153 "ld %0, @%1; \n"
154 " bne %0, %2, 1f; \n"
155 "st %3, @%1; \n"
156 " bra 2f; \n"
157 " .fillinsn \n"
158 "1:"
159 "st %0, @%1; \n"
160 " .fillinsn \n"
161 "2:"
162 : "=&r" (retval)
163 : "r" (p), "r" (old), "r" (new)
164 : "cbit", "memory"
165 #ifdef CONFIG_CHIP_M32700_TS1
166 , "r4"
167 #endif /* CONFIG_CHIP_M32700_TS1 */
169 local_irq_restore(flags);
171 return retval;
174 /* This function doesn't exist, so you'll get a linker error
175 if something tries to do an invalid cmpxchg(). */
176 extern void __cmpxchg_called_with_bad_pointer(void);
178 static inline unsigned long
179 __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
181 switch (size) {
182 case 4:
183 return __cmpxchg_u32(ptr, old, new);
184 #if 0 /* we don't have __cmpxchg_u64 */
185 case 8:
186 return __cmpxchg_u64(ptr, old, new);
187 #endif /* 0 */
189 __cmpxchg_called_with_bad_pointer();
190 return old;
193 #define cmpxchg(ptr, o, n) ({ \
194 ((__typeof__(*(ptr))) \
195 __cmpxchg((ptr), (unsigned long)(o), \
196 (unsigned long)(n), \
197 sizeof(*(ptr)))); \
200 #include <asm-generic/cmpxchg-local.h>
202 static inline unsigned long __cmpxchg_local(volatile void *ptr,
203 unsigned long old,
204 unsigned long new, int size)
206 switch (size) {
207 case 4:
208 return __cmpxchg_local_u32(ptr, old, new);
209 default:
210 return __cmpxchg_local_generic(ptr, old, new, size);
213 return old;
217 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
218 * them available.
220 #define cmpxchg_local(ptr, o, n) \
221 ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
222 (unsigned long)(n), sizeof(*(ptr))))
223 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
225 #endif /* _ASM_M32R_CMPXCHG_H */