2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
25 #if defined(__BIONIC__) || defined(__minix__) || defined(UNUSUAL_THREAD)
29 #if defined(THREAD_NONE)
30 #define thread_volatile
32 #define thread_volatile volatile
35 #if defined(THREAD_NONE)
40 #define tls_verify_type_common_(type) \
41 ajla_assert(sizeof(type) <= sizeof(void *), (file_line, "tls_verify_type_common_: too big type: %d > %d", (int)sizeof(type), (int)sizeof(type)))
43 #if defined(HAVE___THREAD)
44 #define tls_verify_type_(type, variable) ((void)(&variable - (type *)&variable), tls_verify_type_common_(type))
45 #define tls_decl(type, variable) HAVE___THREAD type variable = (type)0
46 #define tls_decl_extern(type, variable) extern HAVE___THREAD type variable
48 #define tls_get_(variable) (*variable)
49 #define tls_get__nocheck tls_get_
50 #define tls_get_cast(type)
51 #define tls_set_(variable, value) do { (*variable = (value)); } while (0)
52 #define tls_set__nocheck tls_set_
55 #define tls_verify_type_(type, variable) tls_verify_type_common_(type)
59 void tls_destructor_call(void);
62 #if defined(OS_WIN32) || defined(OS_CYGWIN)
64 #define thread_concurrency_win32_ \
67 GetSystemInfo(&info); \
68 if (info.dwNumberOfProcessors > 0) \
69 return info.dwNumberOfProcessors; \
70 warning("GetSystemInfo returned zero processors"); \
75 #if defined(THREAD_OS2)
77 #if defined(HAVE_SYS_BUILTIN_H) && defined(HAVE_SYS_FMUTEX_H) && defined(HAVE__FMUTEX_CREATE) && !defined(UNUSUAL_THREAD)
78 #define OS2_USE_FMUTEX
82 #include <sys/builtin.h>
83 #include <sys/fmutex.h>
84 typedef _fmutex mutex_t
;
91 struct list wait_list
;
94 typedef struct os2_thread
*thread_t
;
95 typedef void thread_function_t(void *arg
);
96 #define thread_function_decl(name, content) static void name(void attr_unused *arg) { content }
98 extern mutex_t thread_spawn_mutex
;
100 #if !defined(HAVE___THREAD)
101 typedef unsigned char os2_tls_key_t
;
102 #define OS2_THREAD_KEY_MAX 16
103 #define tls_decl(type, variable) os2_tls_key_t variable
104 #define tls_decl_extern(type, variable) extern os2_tls_key_t variable
105 typedef os2_tls_key_t tls_t_
;
108 #define rwmutex_fallback
110 #elif defined(THREAD_WIN32)
112 typedef CRITICAL_SECTION mutex_t
;
113 typedef CRITICAL_SECTION rwmutex_t
;
114 extern bool rwmutex_supported
;
118 struct list wait_list
;
121 typedef struct win32_thread
*thread_t
;
122 typedef void thread_function_t(void *arg
);
123 #define thread_function_decl(name, content) static void name(void attr_unused *arg) { content }
125 #if !defined(HAVE___THREAD)
126 typedef DWORD win32_tls_key_t
;
127 #define tls_decl(type, variable) win32_tls_key_t variable
128 #define tls_decl_extern(type, variable) extern win32_tls_key_t variable
129 typedef win32_tls_key_t tls_t_
;
132 #elif defined(THREAD_POSIX)
135 #ifndef UNUSUAL_SPINLOCK
136 typedef pthread_mutex_t mutex_t
;
138 typedef pthread_spinlock_t mutex_t
;
140 typedef pthread_rwlock_t rwmutex_t
;
141 #define rwmutex_supported 1
143 pthread_mutex_t mutex
;
146 typedef pthread_t thread_t
;
147 typedef void *thread_function_t(void *arg
);
148 #define thread_function_decl(name, content) static void *name(void attr_unused *arg) { asm_setup_thread(); { content } tls_destructor_call(); return NULL; }
150 #if !defined(HAVE___THREAD)
151 #ifdef HAVE_PTHREAD_KEY_T_ASSIGN
152 #define tls_decl_initializer_ = (pthread_key_t)-1 /* catch uninitialized tls's */
154 #define tls_decl_initializer_
156 #define tls_decl(type, variable) pthread_key_t variable tls_decl_initializer_
157 #define tls_decl_extern(type, variable) extern pthread_key_t variable
158 typedef pthread_key_t tls_t_
;
161 #elif defined(THREAD_NONE)
163 #if defined(DEBUG_OBJECT_POSSIBLE)
171 typedef EMPTY_TYPE mutex_t
;
172 typedef EMPTY_TYPE rwmutex_t
;
174 #define rwmutex_supported 0
187 #ifdef rwmutex_fallback
188 typedef mutex_t rwmutex_t
;
189 #define rwmutex_supported 0
190 #define do_rwmutex_init do_mutex_init
191 #define do_rwmutex_done do_mutex_done
192 #define do_rwmutex_lock_read do_mutex_lock
193 #define do_rwmutex_unlock_read do_mutex_unlock
194 #define do_rwmutex_lock_write do_mutex_lock
195 #define do_rwmutex_unlock_write do_mutex_unlock
199 #if defined(DEBUG_OBJECT_POSSIBLE) || !defined(THREAD_NONE)
200 void mutex_init_position(mutex_t
* argument_position
);
201 void mutex_done_position(mutex_t
* argument_position
);
202 void attr_fastcall
mutex_lock_position(mutex_t
* argument_position
);
203 bool attr_fastcall
mutex_trylock_position(mutex_t
* argument_position
);
204 void attr_fastcall
mutex_unlock_position(mutex_t
* argument_position
);
206 static inline void mutex_init_position(mutex_t attr_unused
*m argument_position
) { }
207 static inline void mutex_done_position(mutex_t attr_unused
*m argument_position
) { }
208 static inline void mutex_lock_position(mutex_t attr_unused
*m argument_position
) { }
209 static inline bool mutex_trylock_position(mutex_t attr_unused
*m argument_position
) { return true; }
210 static inline void mutex_unlock_position(mutex_t attr_unused
*m argument_position
) { }
212 #define mutex_init(x) mutex_init_position(x pass_file_line)
213 #define mutex_done(x) mutex_done_position(x pass_file_line)
214 #define mutex_lock(x) mutex_lock_position(x pass_file_line)
215 #define mutex_trylock(x) mutex_trylock_position(x pass_file_line)
216 #define mutex_unlock(x) mutex_unlock_position(x pass_file_line)
219 #if defined(DEBUG_OBJECT_POSSIBLE) || !defined(THREAD_NONE)
220 void rwmutex_init_position(rwmutex_t
* argument_position
);
221 void rwmutex_done_position(rwmutex_t
* argument_position
);
222 void attr_fastcall
rwmutex_lock_read_position(rwmutex_t
* argument_position
);
223 void attr_fastcall
rwmutex_unlock_read_position(rwmutex_t
* argument_position
);
224 void attr_fastcall
rwmutex_lock_write_position(rwmutex_t
* argument_position
);
225 void attr_fastcall
rwmutex_unlock_write_position(rwmutex_t
* argument_position
);
227 static inline void rwmutex_init_position(rwmutex_t attr_unused
*m argument_position
) { }
228 static inline void rwmutex_done_position(rwmutex_t attr_unused
*m argument_position
) { }
229 static inline void rwmutex_lock_read_position(rwmutex_t attr_unused
*m argument_position
) { }
230 static inline void rwmutex_unlock_read_position(rwmutex_t attr_unused
*m argument_position
) { }
231 static inline void rwmutex_lock_write_position(rwmutex_t attr_unused
*m argument_position
) { }
232 static inline void rwmutex_unlock_write_position(rwmutex_t attr_unused
*m argument_position
) { }
234 #define rwmutex_init(x) rwmutex_init_position(x pass_file_line)
235 #define rwmutex_done(x) rwmutex_done_position(x pass_file_line)
236 #define rwmutex_lock_read(x) rwmutex_lock_read_position(x pass_file_line)
237 #define rwmutex_unlock_read(x) rwmutex_unlock_read_position(x pass_file_line)
238 #define rwmutex_lock_write(x) rwmutex_lock_write_position(x pass_file_line)
239 #define rwmutex_unlock_write(x) rwmutex_unlock_write_position(x pass_file_line)
242 #if defined(DEBUG_OBJECT_POSSIBLE) || !defined(THREAD_NONE)
243 void cond_init_position(cond_t
* argument_position
);
244 void cond_done_position(cond_t
* argument_position
);
245 void attr_fastcall
cond_lock_position(cond_t
* argument_position
);
246 void attr_fastcall
cond_unlock_position(cond_t
* argument_position
);
247 void attr_fastcall
cond_unlock_signal_position(cond_t
* argument_position
);
248 void attr_fastcall
cond_unlock_broadcast_position(cond_t
* argument_position
);
249 void attr_fastcall
cond_wait_position(cond_t
* argument_position
);
250 bool attr_fastcall
cond_wait_us_position(cond_t
*, uint32_t argument_position
);
252 static inline void cond_init_position(cond_t attr_unused
*c argument_position
) { }
253 static inline void cond_done_position(cond_t attr_unused
*c argument_position
) { }
254 static inline void cond_lock_position(cond_t attr_unused
*c argument_position
) { }
255 static inline void cond_unlock_position(cond_t attr_unused
*c argument_position
) { }
256 static inline void cond_unlock_signal_position(cond_t attr_unused
*c argument_position
) { }
257 static inline void cond_unlock_broadcast_position(cond_t attr_unused
*c argument_position
) { }
258 static inline void cond_wait_position(cond_t attr_unused
*c argument_position
) { }
259 static inline bool cond_wait_us_position(cond_t attr_unused
*c
, uint32_t attr_unused us argument_position
) { return false; }
261 #define cond_init(x) cond_init_position(x pass_file_line)
262 #define cond_done(x) cond_done_position(x pass_file_line)
263 #define cond_lock(x) cond_lock_position(x pass_file_line)
264 #define cond_unlock(x) cond_unlock_position(x pass_file_line)
265 #define cond_unlock_signal(x) cond_unlock_signal_position(x pass_file_line)
266 #define cond_unlock_broadcast(x) cond_unlock_broadcast_position(x pass_file_line)
267 #define cond_wait(x) cond_wait_position(x pass_file_line)
268 #define cond_wait_us(x, y) cond_wait_us_position(x, y pass_file_line)
272 #define thread_needs_barriers false
273 static inline unsigned thread_concurrency(void) { return 1; }
275 extern uchar_efficient_t thread_needs_barriers
;
276 unsigned thread_concurrency(void);
286 bool thread_spawn_position(thread_t
*, thread_function_t
*, void *, thread_priority_t priority
, ajla_error_t
*err argument_position
);
287 void thread_join_position(thread_t
* argument_position
);
288 #define thread_spawn(t, fn, data, priority, err) thread_spawn_position(t, fn, data, priority, err pass_file_line)
289 #define thread_join(t) thread_join_position(t pass_file_line)
293 void tls_init__position(tls_t_
* argument_position
);
294 void tls_done__position(tls_t_
* argument_position
);
295 #if !defined(HAVE___THREAD)
296 uintptr_t attr_fastcall
tls_get__position(const tls_t_
* argument_position
);
297 void attr_fastcall
tls_set__position(const tls_t_
*, uintptr_t argument_position
);
298 uintptr_t attr_fastcall
tls_get__nocheck(const tls_t_
*);
299 void attr_fastcall
tls_set__nocheck(const tls_t_
*, uintptr_t);
300 #define tls_get_cast(type) (type)
301 #define tls_set_cast (uintptr_t)
303 #define tls_init_(x) tls_init__position(x pass_file_line)
304 #define tls_done_(x) tls_done__position(x pass_file_line)
305 #if !defined(HAVE___THREAD)
306 #define tls_get_(x) tls_get__position(x pass_file_line)
307 #define tls_set_(x, y) tls_set__position(x, y pass_file_line)
310 #define tls_init(type, variable) \
312 tls_verify_type_(type, variable); \
313 tls_init_(&variable); \
316 #define tls_done(type, variable) \
318 tls_verify_type_(type, variable); \
319 tls_done_(&variable); \
322 #define tls_get(type, variable) \
323 (tls_verify_type_(type, variable), tls_get_cast(type)tls_get_(&variable))
325 #define tls_set(type, variable, value) \
327 tls_verify_type_(type, variable); \
328 tls_set_(&variable, tls_set_cast(value)); \
331 #define tls_get_nocheck(type, variable) \
332 (tls_verify_type_(type, variable), tls_get_cast(type)tls_get__nocheck(&variable))
334 #define tls_set_nocheck(type, variable, value) \
336 tls_verify_type_(type, variable); \
337 tls_set__nocheck(&variable, tls_set_cast(value)); \
341 typedef EMPTY_TYPE tls_destructor_t
;
342 typedef void tls_destructor_fn(tls_destructor_t
*);
343 static inline void tls_destructor_position(tls_destructor_t attr_unused
*destr
, tls_destructor_fn attr_unused
*fn argument_position
) { }
345 struct tls_destructor_s
;
346 typedef void tls_destructor_fn(struct tls_destructor_s
*);
347 typedef struct tls_destructor_s
{
348 struct tls_destructor_s
*previous
;
349 tls_destructor_fn
*fn
;
351 void tls_destructor_position(tls_destructor_t
*, tls_destructor_fn
* argument_position
);
353 #define tls_destructor(dest, fn) tls_destructor_position(dest, fn pass_file_line)
356 * See smp_read_barrier_depends() in Linux for explanation.
357 * If we don't know how to do this barrier, we don't define this macro and
358 * the user must not use lockless access model.
360 #if defined(THREAD_NONE)
361 #define barrier_data_dependency() do { } while (0)
362 #elif defined(__alpha)
363 #if defined(HAVE_C11_ATOMICS) && !defined(UNUSUAL)
364 #define barrier_data_dependency() atomic_thread_fence(memory_order_seq_cst)
365 #elif defined(HAVE_SYNC_AND_FETCH) && !defined(UNUSUAL)
366 #define barrier_data_dependency() __sync_synchronize()
367 #elif defined(HAVE_GCC_ASSEMBLER) && !defined(UNUSUAL)
368 #define barrier_data_dependency() __asm__ volatile ("mb":::"memory")
370 #elif defined(__ADSPBLACKFIN__)
372 #define barrier_data_dependency() do { } while (0)
376 * The mutex_unlock/mutex_lock sequence serves as a memory write barrier.
378 * On powerpc, mutex_unlock/mutex_lock doesn't serve as a memory barrier for
379 * threads that don't take the lock, so we must add barrier explicitly.
381 * See smp_mb__after_unlock_lock() in Linux for explanation.
383 #if defined(THREAD_NONE)
384 #define barrier_write_before_unlock_lock() do { } while (0)
385 #elif defined(__powerpc__)
386 #if defined(HAVE_C11_ATOMICS) && !defined(UNUSUAL)
387 #define barrier_write_before_unlock_lock() atomic_thread_fence(memory_order_seq_cst)
388 #elif defined(HAVE_SYNC_AND_FETCH) && !defined(UNUSUAL)
389 #define barrier_write_before_unlock_lock() __sync_synchronize()
390 #elif defined(HAVE_GCC_ASSEMBLER)
391 #define barrier_write_before_unlock_lock() __asm__ volatile("sync":::"memory")
395 #define barrier_write_before_unlock_lock() do { } while (0)
399 * A write barrier before lock, it makes sure that previous writes are not
400 * reordered with the following content of locked region.
402 #if defined(THREAD_NONE) || defined(THREAD_OS2) || (defined(ARCH_X86) && !defined(UNUSUAL))
403 #define barrier_write_before_lock() do { } while (0)
404 #elif defined(HAVE_C11_ATOMICS) && !defined(UNUSUAL)
405 #define barrier_write_before_lock() atomic_thread_fence(memory_order_seq_cst)
406 #elif defined(HAVE_SYNC_AND_FETCH) && !defined(UNUSUAL)
407 #define barrier_write_before_lock() __sync_synchronize()
408 #elif defined(THREAD_WIN32)
409 #if (defined(MemoryBarrier) || defined(__buildmemorybarrier)) && !defined(UNUSUAL_THREAD)
410 #define barrier_write_before_lock() MemoryBarrier()
412 #define barrier_write_before_lock() \
415 InterlockedExchange(&x, 0); \
419 void barrier_write_before_lock(void);
423 #if !defined(THREAD_NONE) && defined(DEBUG_TRACE)
424 void thread_set_id(int id
);
425 int thread_get_id(void);
427 #define thread_set_id(id) do { } while (0)
428 #define thread_get_id() 0
432 bool thread_enable_debugging_option(const char *option
, size_t l
);
434 void thread_init(void);
435 void thread_done(void);