2 * Copyright © 2011 Ryan Lortie
4 * This library is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU Lesser General Public License as
6 * published by the Free Software Foundation; either version 2 of the
7 * licence, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful, but
10 * WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
17 * Author: Ryan Lortie <desrt@desrt.ca>
20 #ifndef __G_ATOMIC_H__
21 #define __G_ATOMIC_H__
23 #if !defined (__GLIB_H_INSIDE__) && !defined (GLIB_COMPILATION)
24 #error "Only <glib.h> can be included directly."
27 #include <glib/gtypes.h>
32 gint
g_atomic_int_get (const volatile gint
*atomic
);
34 void g_atomic_int_set (volatile gint
*atomic
,
37 void g_atomic_int_inc (volatile gint
*atomic
);
39 gboolean
g_atomic_int_dec_and_test (volatile gint
*atomic
);
41 gboolean
g_atomic_int_compare_and_exchange (volatile gint
*atomic
,
45 gint
g_atomic_int_add (volatile gint
*atomic
,
47 GLIB_AVAILABLE_IN_2_30
48 guint
g_atomic_int_and (volatile guint
*atomic
,
50 GLIB_AVAILABLE_IN_2_30
51 guint
g_atomic_int_or (volatile guint
*atomic
,
54 guint
g_atomic_int_xor (volatile guint
*atomic
,
58 gpointer
g_atomic_pointer_get (const volatile void *atomic
);
60 void g_atomic_pointer_set (volatile void *atomic
,
63 gboolean
g_atomic_pointer_compare_and_exchange (volatile void *atomic
,
67 gssize
g_atomic_pointer_add (volatile void *atomic
,
69 GLIB_AVAILABLE_IN_2_30
70 gsize
g_atomic_pointer_and (volatile void *atomic
,
72 GLIB_AVAILABLE_IN_2_30
73 gsize
g_atomic_pointer_or (volatile void *atomic
,
76 gsize
g_atomic_pointer_xor (volatile void *atomic
,
79 GLIB_DEPRECATED_IN_2_30_FOR(g_atomic_int_add
)
80 gint
g_atomic_int_exchange_and_add (volatile gint
*atomic
,
85 #if defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4)
87 /* We prefer the new C11-style atomic extension of GCC if available */
88 #if defined(__ATOMIC_SEQ_CST) && !defined(__clang__)
90 /* This assumes sizeof(int) is 4: gatomic.c statically
91 * asserts that (using G_STATIC_ASSERT at top-level in a header was
92 * problematic, see #730932) */
94 #define g_atomic_int_get(atomic) \
95 (G_GNUC_EXTENSION ({ \
96 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
97 (void) (0 ? *(atomic) ^ *(atomic) : 0); \
98 (gint) __atomic_load_4 ((atomic), __ATOMIC_SEQ_CST); \
100 #define g_atomic_int_set(atomic, newval) \
101 (G_GNUC_EXTENSION ({ \
102 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
103 (void) (0 ? *(atomic) ^ (newval) : 0); \
104 __atomic_store_4 ((atomic), (newval), __ATOMIC_SEQ_CST); \
107 #if GLIB_SIZEOF_VOID_P == 8
109 #define g_atomic_pointer_get(atomic) \
110 (G_GNUC_EXTENSION ({ \
111 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
112 (gpointer) __atomic_load_8 ((atomic), __ATOMIC_SEQ_CST); \
114 #define g_atomic_pointer_set(atomic, newval) \
115 (G_GNUC_EXTENSION ({ \
116 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
117 (void) (0 ? (gpointer) *(atomic) : 0); \
118 __atomic_store_8 ((atomic), (gsize) (newval), __ATOMIC_SEQ_CST); \
121 #else /* GLIB_SIZEOF_VOID_P == 8 */
123 /* This assumes that if sizeof(void *) is not 8, then it is 4:
124 * gatomic.c statically asserts that (using G_STATIC_ASSERT
125 * at top-level in a header was problematic, see #730932) */
127 #define g_atomic_pointer_get(atomic) \
128 (G_GNUC_EXTENSION ({ \
129 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
130 (gpointer) __atomic_load_4 ((atomic), __ATOMIC_SEQ_CST); \
132 #define g_atomic_pointer_set(atomic, newval) \
133 (G_GNUC_EXTENSION ({ \
134 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
135 (void) (0 ? (gpointer) *(atomic) : 0); \
136 __atomic_store_4 ((atomic), (gsize) (newval), __ATOMIC_SEQ_CST); \
139 #endif /* GLIB_SIZEOF_VOID_P == 8 */
141 #else /* defined(__ATOMIC_SEQ_CST) */
143 #define g_atomic_int_get(atomic) \
144 (G_GNUC_EXTENSION ({ \
145 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
146 (void) (0 ? *(atomic) ^ *(atomic) : 0); \
147 __sync_synchronize (); \
150 #define g_atomic_int_set(atomic, newval) \
151 (G_GNUC_EXTENSION ({ \
152 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
153 (void) (0 ? *(atomic) ^ (newval) : 0); \
154 *(atomic) = (newval); \
155 __sync_synchronize (); \
157 #define g_atomic_pointer_get(atomic) \
158 (G_GNUC_EXTENSION ({ \
159 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
160 __sync_synchronize (); \
161 (gpointer) *(atomic); \
163 #define g_atomic_pointer_set(atomic, newval) \
164 (G_GNUC_EXTENSION ({ \
165 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
166 (void) (0 ? (gpointer) *(atomic) : 0); \
167 *(atomic) = (__typeof__ (*(atomic))) (gsize) (newval); \
168 __sync_synchronize (); \
171 #endif /* !defined(__ATOMIC_SEQ_CST) */
173 #define g_atomic_int_inc(atomic) \
174 (G_GNUC_EXTENSION ({ \
175 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
176 (void) (0 ? *(atomic) ^ *(atomic) : 0); \
177 (void) __sync_fetch_and_add ((atomic), 1); \
179 #define g_atomic_int_dec_and_test(atomic) \
180 (G_GNUC_EXTENSION ({ \
181 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
182 (void) (0 ? *(atomic) ^ *(atomic) : 0); \
183 __sync_fetch_and_sub ((atomic), 1) == 1; \
185 #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
186 (G_GNUC_EXTENSION ({ \
187 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
188 (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 0); \
189 (gboolean) __sync_bool_compare_and_swap ((atomic), (oldval), (newval)); \
191 #define g_atomic_int_add(atomic, val) \
192 (G_GNUC_EXTENSION ({ \
193 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
194 (void) (0 ? *(atomic) ^ (val) : 0); \
195 (gint) __sync_fetch_and_add ((atomic), (val)); \
197 #define g_atomic_int_and(atomic, val) \
198 (G_GNUC_EXTENSION ({ \
199 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
200 (void) (0 ? *(atomic) ^ (val) : 0); \
201 (guint) __sync_fetch_and_and ((atomic), (val)); \
203 #define g_atomic_int_or(atomic, val) \
204 (G_GNUC_EXTENSION ({ \
205 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
206 (void) (0 ? *(atomic) ^ (val) : 0); \
207 (guint) __sync_fetch_and_or ((atomic), (val)); \
209 #define g_atomic_int_xor(atomic, val) \
210 (G_GNUC_EXTENSION ({ \
211 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
212 (void) (0 ? *(atomic) ^ (val) : 0); \
213 (guint) __sync_fetch_and_xor ((atomic), (val)); \
216 #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
217 (G_GNUC_EXTENSION ({ \
218 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
219 (void) (0 ? (gpointer) *(atomic) : 0); \
220 (gboolean) __sync_bool_compare_and_swap ((atomic), (oldval), (newval)); \
222 #define g_atomic_pointer_add(atomic, val) \
223 (G_GNUC_EXTENSION ({ \
224 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
225 (void) (0 ? (gpointer) *(atomic) : 0); \
226 (void) (0 ? (val) ^ (val) : 0); \
227 (gssize) __sync_fetch_and_add ((atomic), (val)); \
229 #define g_atomic_pointer_and(atomic, val) \
230 (G_GNUC_EXTENSION ({ \
231 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
232 (void) (0 ? (gpointer) *(atomic) : 0); \
233 (void) (0 ? (val) ^ (val) : 0); \
234 (gsize) __sync_fetch_and_and ((atomic), (val)); \
236 #define g_atomic_pointer_or(atomic, val) \
237 (G_GNUC_EXTENSION ({ \
238 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
239 (void) (0 ? (gpointer) *(atomic) : 0); \
240 (void) (0 ? (val) ^ (val) : 0); \
241 (gsize) __sync_fetch_and_or ((atomic), (val)); \
243 #define g_atomic_pointer_xor(atomic, val) \
244 (G_GNUC_EXTENSION ({ \
245 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
246 (void) (0 ? (gpointer) *(atomic) : 0); \
247 (void) (0 ? (val) ^ (val) : 0); \
248 (gsize) __sync_fetch_and_xor ((atomic), (val)); \
251 #else /* defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) */
253 #define g_atomic_int_get(atomic) \
254 (g_atomic_int_get ((gint *) (atomic)))
255 #define g_atomic_int_set(atomic, newval) \
256 (g_atomic_int_set ((gint *) (atomic), (gint) (newval)))
257 #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
258 (g_atomic_int_compare_and_exchange ((gint *) (atomic), (oldval), (newval)))
259 #define g_atomic_int_add(atomic, val) \
260 (g_atomic_int_add ((gint *) (atomic), (val)))
261 #define g_atomic_int_and(atomic, val) \
262 (g_atomic_int_and ((guint *) (atomic), (val)))
263 #define g_atomic_int_or(atomic, val) \
264 (g_atomic_int_or ((guint *) (atomic), (val)))
265 #define g_atomic_int_xor(atomic, val) \
266 (g_atomic_int_xor ((guint *) (atomic), (val)))
267 #define g_atomic_int_inc(atomic) \
268 (g_atomic_int_inc ((gint *) (atomic)))
269 #define g_atomic_int_dec_and_test(atomic) \
270 (g_atomic_int_dec_and_test ((gint *) (atomic)))
272 #define g_atomic_pointer_get(atomic) \
273 (g_atomic_pointer_get (atomic))
274 #define g_atomic_pointer_set(atomic, newval) \
275 (g_atomic_pointer_set ((atomic), (gpointer) (newval)))
276 #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
277 (g_atomic_pointer_compare_and_exchange ((atomic), (gpointer) (oldval), (gpointer) (newval)))
278 #define g_atomic_pointer_add(atomic, val) \
279 (g_atomic_pointer_add ((atomic), (gssize) (val)))
280 #define g_atomic_pointer_and(atomic, val) \
281 (g_atomic_pointer_and ((atomic), (gsize) (val)))
282 #define g_atomic_pointer_or(atomic, val) \
283 (g_atomic_pointer_or ((atomic), (gsize) (val)))
284 #define g_atomic_pointer_xor(atomic, val) \
285 (g_atomic_pointer_xor ((atomic), (gsize) (val)))
287 #endif /* defined(__GNUC__) && defined(G_ATOMIC_OP_USE_GCC_BUILTINS) */
289 #endif /* __G_ATOMIC_H__ */