autoupdate
[gnulib.git] / lib / simple-atomic.c
blobbbc52bffd128cfb4b14365da7eedae2fef0d6ffa
1 /* Simple atomic operations for multithreading.
2 Copyright (C) 2020-2025 Free Software Foundation, Inc.
4 This file is free software: you can redistribute it and/or modify
5 it under the terms of the GNU Lesser General Public License as
6 published by the Free Software Foundation; either version 2.1 of the
7 License, or (at your option) any later version.
9 This file is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU Lesser General Public License for more details.
14 You should have received a copy of the GNU Lesser General Public License
15 along with this program. If not, see <https://www.gnu.org/licenses/>. */
17 /* Written by Bruno Haible <bruno@clisp.org>, 2021. */
19 #include <config.h>
21 /* Specification. */
22 #include "simple-atomic.h"
24 #if 0x590 <= __SUNPRO_C && __STDC__
25 # define asm __asm
26 #endif
28 #if defined _WIN32 && ! defined __CYGWIN__
29 /* Native Windows. */
31 # include <windows.h>
33 void
34 memory_barrier (void)
36 /* MemoryBarrier
37 <https://docs.microsoft.com/en-us/windows/win32/api/winnt/nf-winnt-memorybarrier> */
38 MemoryBarrier ();
41 unsigned int
42 atomic_compare_and_swap (unsigned int volatile *vp,
43 unsigned int cmp,
44 unsigned int newval)
46 /* InterlockedCompareExchange
47 <https://docs.microsoft.com/en-us/windows/win32/api/winnt/nf-winnt-interlockedcompareexchange> */
48 return InterlockedCompareExchange ((LONG volatile *) vp,
49 (LONG) newval, (LONG) cmp);
52 uintptr_t
53 atomic_compare_and_swap_ptr (uintptr_t volatile *vp,
54 uintptr_t cmp,
55 uintptr_t newval)
57 /* InterlockedCompareExchangePointer
58 <https://docs.microsoft.com/en-us/windows/win32/api/winnt/nf-winnt-interlockedcompareexchangepointer> */
59 return (uintptr_t)
60 InterlockedCompareExchangePointer ((void * volatile *) vp,
61 (void *) newval, (void *) cmp);
64 #elif HAVE_PTHREAD_H
65 /* Some other platform that supports multi-threading.
67 We don't use the C11 <stdatomic.h> (available in GCC >= 4.9) because it would
68 require to link with -latomic. */
70 # if (((__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 1)) \
71 || __clang_major__ >= 3) \
72 && HAVE_ATOMIC_COMPARE_AND_SWAP_GCC41)
73 /* Use GCC built-ins (available on many platforms with GCC >= 4.1 or
74 clang >= 3.0).
75 Documentation:
76 <https://gcc.gnu.org/onlinedocs/gcc-4.1.2/gcc/Atomic-Builtins.html> */
78 void
79 memory_barrier (void)
81 __sync_synchronize ();
84 unsigned int
85 atomic_compare_and_swap (unsigned int volatile *vp,
86 unsigned int cmp,
87 unsigned int newval)
89 return __sync_val_compare_and_swap (vp, cmp, newval);
92 uintptr_t
93 atomic_compare_and_swap_ptr (uintptr_t volatile *vp,
94 uintptr_t cmp,
95 uintptr_t newval)
97 return __sync_val_compare_and_swap (vp, cmp, newval);
100 # elif defined _AIX
101 /* AIX */
102 /* For older versions of GCC or xlc, use inline assembly.
103 __compare_and_swap and __compare_and_swaplp are not sufficient here. */
105 void
106 memory_barrier (void)
108 asm volatile ("sync");
111 unsigned int
112 atomic_compare_and_swap (unsigned int volatile *vp,
113 unsigned int cmp,
114 unsigned int newval)
116 asm volatile ("sync");
118 unsigned int oldval;
119 asm volatile (
120 # if defined __GNUC__ || defined __clang__
121 "1: lwarx %0,0,%1\n"
122 " cmpw 0,%0,%2\n"
123 " bne 0,2f\n"
124 " stwcx. %3,0,%1\n"
125 " bne 0,1b\n"
126 "2:"
127 # else /* another label syntax */
128 ".L01: lwarx %0,0,%1\n"
129 " cmpw 0,%0,%2\n"
130 " bne 0,.L02\n"
131 " stwcx. %3,0,%1\n"
132 " bne 0,.L01\n"
133 ".L02:"
134 # endif
135 : "=&r" (oldval)
136 : "r" (vp), "r" (cmp), "r" (newval)
137 : "cr0");
139 asm volatile ("isync");
140 return oldval;
143 uintptr_t
144 atomic_compare_and_swap_ptr (uintptr_t volatile *vp,
145 uintptr_t cmp,
146 uintptr_t newval)
148 asm volatile ("sync");
150 uintptr_t oldval;
151 asm volatile (
152 # if defined __GNUC__ || defined __clang__
153 # if defined __powerpc64__ || defined __LP64__
154 "1: ldarx %0,0,%1\n"
155 " cmpd 0,%0,%2\n"
156 " bne 0,2f\n"
157 " stdcx. %3,0,%1\n"
158 " bne 0,1b\n"
159 "2:"
160 # else
161 "1: lwarx %0,0,%1\n"
162 " cmpw 0,%0,%2\n"
163 " bne 0,2f\n"
164 " stwcx. %3,0,%1\n"
165 " bne 0,1b\n"
166 "2:"
167 # endif
168 # else /* another label syntax */
169 # if defined __powerpc64__ || defined __LP64__
170 ".L01: ldarx %0,0,%1\n"
171 " cmpd 0,%0,%2\n"
172 " bne 0,.L02\n"
173 " stdcx. %3,0,%1\n"
174 " bne 0,.L01\n"
175 ".L02:"
176 # else
177 ".L01: lwarx %0,0,%1\n"
178 " cmpw 0,%0,%2\n"
179 " bne 0,.L02\n"
180 " stwcx. %3,0,%1\n"
181 " bne 0,.L01\n"
182 ".L02:"
183 # endif
184 # endif
185 : "=&r" (oldval)
186 : "r" (vp), "r" (cmp), "r" (newval)
187 : "cr0");
189 asm volatile ("isync");
190 return oldval;
193 # elif ((defined __GNUC__ || defined __clang__ || defined __SUNPRO_C) && (defined __sparc || defined __i386 || defined __x86_64__)) || (defined __TINYC__ && (defined __i386 || defined __x86_64__))
194 /* For older versions of GCC or clang, use inline assembly.
195 GCC, clang, and the Oracle Studio C 12 compiler understand GCC's extended
196 asm syntax, but the plain Oracle Studio C 11 compiler understands only
197 simple asm. */
199 void
200 memory_barrier (void)
202 # if defined __GNUC__ || defined __clang__ || __SUNPRO_C >= 0x590 || defined __TINYC__
203 # if defined __i386 || defined __x86_64__
204 # if defined __TINYC__ && defined __i386
205 /* Cannot use the SSE instruction "mfence" with this compiler. */
206 asm volatile ("lock orl $0,(%esp)");
207 # else
208 asm volatile ("mfence");
209 # endif
210 # endif
211 # if defined __sparc
212 asm volatile ("membar 2");
213 # endif
214 # else
215 # if defined __i386 || defined __x86_64__
216 asm ("mfence");
217 # endif
218 # if defined __sparc
219 asm ("membar 2");
220 # endif
221 # endif
224 unsigned int
225 atomic_compare_and_swap (unsigned int volatile *vp,
226 unsigned int cmp,
227 unsigned int newval)
229 # if defined __GNUC__ || defined __clang__ || __SUNPRO_C >= 0x590 || defined __TINYC__
230 unsigned int oldval;
231 # if defined __i386 || defined __x86_64__
232 asm volatile (" lock\n cmpxchgl %3,(%1)"
233 : "=a" (oldval) : "r" (vp), "a" (cmp), "r" (newval) : "memory");
234 # endif
235 # if defined __sparc
236 asm volatile (" cas [%1],%2,%3\n"
237 " mov %3,%0"
238 : "=r" (oldval) : "r" (vp), "r" (cmp), "r" (newval) : "memory");
239 # endif
240 return oldval;
241 # else /* __SUNPRO_C */
242 # if defined __x86_64__
243 asm (" movl %esi,%eax\n"
244 " lock\n cmpxchgl %edx,(%rdi)");
245 # elif defined __i386
246 asm (" movl 16(%ebp),%ecx\n"
247 " movl 12(%ebp),%eax\n"
248 " movl 8(%ebp),%edx\n"
249 " lock\n cmpxchgl %ecx,(%edx)");
250 # endif
251 # if defined __sparc
252 asm (" cas [%i0],%i1,%i2\n"
253 " mov %i2,%i0");
254 # endif
255 # endif
258 uintptr_t
259 atomic_compare_and_swap_ptr (uintptr_t volatile *vp,
260 uintptr_t cmp,
261 uintptr_t newval)
263 # if defined __GNUC__ || defined __clang__ || __SUNPRO_C >= 0x590 || defined __TINYC__
264 uintptr_t oldval;
265 # if defined __x86_64__
266 asm volatile (" lock\n cmpxchgq %3,(%1)"
267 : "=a" (oldval) : "r" (vp), "a" (cmp), "r" (newval) : "memory");
268 # elif defined __i386
269 asm volatile (" lock\n cmpxchgl %3,(%1)"
270 : "=a" (oldval) : "r" (vp), "a" (cmp), "r" (newval) : "memory");
271 # endif
272 # if defined __sparc && (defined __sparcv9 || defined __arch64__)
273 asm volatile (" casx [%1],%2,%3\n"
274 " mov %3,%0"
275 : "=r" (oldval) : "r" (vp), "r" (cmp), "r" (newval) : "memory");
276 # elif defined __sparc
277 asm volatile (" cas [%1],%2,%3\n"
278 " mov %3,%0"
279 : "=r" (oldval) : "r" (vp), "r" (cmp), "r" (newval) : "memory");
280 # endif
281 return oldval;
282 # else /* __SUNPRO_C */
283 # if defined __x86_64__
284 asm (" movq %rsi,%rax\n"
285 " lock\n cmpxchgq %rdx,(%rdi)");
286 # elif defined __i386
287 asm (" movl 16(%ebp),%ecx\n"
288 " movl 12(%ebp),%eax\n"
289 " movl 8(%ebp),%edx\n"
290 " lock\n cmpxchgl %ecx,(%edx)");
291 # endif
292 # if defined __sparc && (defined __sparcv9 || defined __arch64__)
293 asm (" casx [%i0],%i1,%i2\n"
294 " mov %i2,%i0");
295 # elif defined __sparc
296 asm (" cas [%i0],%i1,%i2\n"
297 " mov %i2,%i0");
298 # endif
299 # endif
302 # else
303 /* Fallback code. It has some race conditions. The unit test will fail. */
305 void
306 memory_barrier (void)
310 unsigned int
311 atomic_compare_and_swap (unsigned int volatile *vp,
312 unsigned int cmp,
313 unsigned int newval)
315 unsigned int oldval = *vp;
316 if (oldval == cmp)
317 *vp = newval;
318 return oldval;
321 uintptr_t
322 atomic_compare_and_swap_ptr (uintptr_t volatile *vp,
323 uintptr_t cmp,
324 uintptr_t newval)
326 uintptr_t oldval = *vp;
327 if (oldval == cmp)
328 *vp = newval;
329 return oldval;
332 # endif
334 #else
335 /* A platform that does not support multi-threading. */
337 void
338 memory_barrier (void)
342 unsigned int
343 atomic_compare_and_swap (unsigned int volatile *vp,
344 unsigned int cmp,
345 unsigned int newval)
347 unsigned int oldval = *vp;
348 if (oldval == cmp)
349 *vp = newval;
350 return oldval;
353 uintptr_t
354 atomic_compare_and_swap_ptr (uintptr_t volatile *vp,
355 uintptr_t cmp,
356 uintptr_t newval)
358 uintptr_t oldval = *vp;
359 if (oldval == cmp)
360 *vp = newval;
361 return oldval;
364 #endif