2 * kmp_os.h -- KPTS runtime header file.
5 //===----------------------------------------------------------------------===//
7 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
8 // See https://llvm.org/LICENSE.txt for license information.
9 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
11 //===----------------------------------------------------------------------===//
16 #include "kmp_config.h"
22 #define KMP_FTN_PLAIN 1
23 #define KMP_FTN_APPEND 2
24 #define KMP_FTN_UPPER 3
26 #define KMP_FTN_PREPEND 4
27 #define KMP_FTN_UAPPEND 5
30 #define KMP_PTR_SKIP (sizeof(void *))
32 /* -------------------------- Compiler variations ------------------------ */
37 #define KMP_MEM_CONS_VOLATILE 0
38 #define KMP_MEM_CONS_FENCE 1
40 #ifndef KMP_MEM_CONS_MODEL
41 #define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE
44 #ifndef __has_cpp_attribute
45 #define __has_cpp_attribute(x) 0
48 #ifndef __has_attribute
49 #define __has_attribute(x) 0
52 /* ------------------------- Compiler recognition ---------------------- */
53 #define KMP_COMPILER_ICC 0
54 #define KMP_COMPILER_GCC 0
55 #define KMP_COMPILER_CLANG 0
56 #define KMP_COMPILER_MSVC 0
57 #define KMP_COMPILER_ICX 0
59 #if __INTEL_CLANG_COMPILER
60 #undef KMP_COMPILER_ICX
61 #define KMP_COMPILER_ICX 1
62 #elif defined(__INTEL_COMPILER)
63 #undef KMP_COMPILER_ICC
64 #define KMP_COMPILER_ICC 1
65 #elif defined(__clang__)
66 #undef KMP_COMPILER_CLANG
67 #define KMP_COMPILER_CLANG 1
68 #elif defined(__GNUC__)
69 #undef KMP_COMPILER_GCC
70 #define KMP_COMPILER_GCC 1
71 #elif defined(_MSC_VER)
72 #undef KMP_COMPILER_MSVC
73 #define KMP_COMPILER_MSVC 1
75 #error Unknown compiler
78 #if (KMP_OS_LINUX || KMP_OS_WINDOWS || KMP_OS_FREEBSD || KMP_OS_NETBSD || \
79 KMP_OS_DRAGONFLY || KMP_OS_AIX) && \
80 !KMP_OS_WASI && !KMP_OS_EMSCRIPTEN
81 #define KMP_AFFINITY_SUPPORTED 1
82 #if KMP_OS_WINDOWS && KMP_ARCH_X86_64
83 #define KMP_GROUP_AFFINITY 1
85 #define KMP_GROUP_AFFINITY 0
88 #define KMP_AFFINITY_SUPPORTED 0
89 #define KMP_GROUP_AFFINITY 0
92 #if (KMP_OS_LINUX || (KMP_OS_FREEBSD && __FreeBSD_version >= 1301000))
93 #define KMP_HAVE_SCHED_GETCPU 1
95 #define KMP_HAVE_SCHED_GETCPU 0
98 /* Check for quad-precision extension. */
99 #define KMP_HAVE_QUAD 0
100 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
101 #if KMP_COMPILER_ICC || KMP_COMPILER_ICX
102 /* _Quad is already defined for icc */
104 #define KMP_HAVE_QUAD 1
105 #elif KMP_COMPILER_CLANG
106 /* Clang doesn't support a software-implemented
107 128-bit extended precision type yet */
108 typedef long double _Quad
;
109 #elif KMP_COMPILER_GCC
110 /* GCC on NetBSD lacks __multc3/__divtc3 builtins needed for quad until
111 NetBSD 10.0 which ships with GCC 10.5 */
112 #if (!KMP_OS_NETBSD || __GNUC__ >= 10)
113 typedef __float128 _Quad
;
115 #define KMP_HAVE_QUAD 1
117 #elif KMP_COMPILER_MSVC
118 typedef long double _Quad
;
121 #if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC
122 typedef long double _Quad
;
124 #define KMP_HAVE_QUAD 1
126 #endif /* KMP_ARCH_X86 || KMP_ARCH_X86_64 */
128 #define KMP_USE_X87CONTROL 0
130 #define KMP_END_OF_LINE "\r\n"
131 typedef char kmp_int8
;
132 typedef unsigned char kmp_uint8
;
133 typedef short kmp_int16
;
134 typedef unsigned short kmp_uint16
;
135 typedef int kmp_int32
;
136 typedef unsigned int kmp_uint32
;
137 #define KMP_INT32_SPEC "d"
138 #define KMP_UINT32_SPEC "u"
140 typedef __int64 kmp_int64
;
141 typedef unsigned __int64 kmp_uint64
;
142 #define KMP_INT64_SPEC "I64d"
143 #define KMP_UINT64_SPEC "I64u"
145 struct kmp_struct64
{
148 typedef struct kmp_struct64 kmp_int64
;
149 typedef struct kmp_struct64 kmp_uint64
;
150 /* Not sure what to use for KMP_[U]INT64_SPEC here */
152 #if KMP_ARCH_X86 && KMP_MSVC_COMPAT
153 #undef KMP_USE_X87CONTROL
154 #define KMP_USE_X87CONTROL 1
156 #if KMP_ARCH_X86_64 || KMP_ARCH_AARCH64
158 typedef __int64 kmp_intptr_t
;
159 typedef unsigned __int64 kmp_uintptr_t
;
160 #define KMP_INTPTR_SPEC "I64d"
161 #define KMP_UINTPTR_SPEC "I64u"
163 #endif /* KMP_OS_WINDOWS */
166 #define KMP_END_OF_LINE "\n"
167 typedef char kmp_int8
;
168 typedef unsigned char kmp_uint8
;
169 typedef short kmp_int16
;
170 typedef unsigned short kmp_uint16
;
171 typedef int kmp_int32
;
172 typedef unsigned int kmp_uint32
;
173 typedef long long kmp_int64
;
174 typedef unsigned long long kmp_uint64
;
175 #define KMP_INT32_SPEC "d"
176 #define KMP_UINT32_SPEC "u"
177 #define KMP_INT64_SPEC "lld"
178 #define KMP_UINT64_SPEC "llu"
179 #endif /* KMP_OS_UNIX */
181 #if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_MIPS || KMP_ARCH_WASM || \
182 KMP_ARCH_PPC || KMP_ARCH_AARCH64_32
183 #define KMP_SIZE_T_SPEC KMP_UINT32_SPEC
184 #elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64 || \
185 KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64 || KMP_ARCH_LOONGARCH64 || \
186 KMP_ARCH_VE || KMP_ARCH_S390X
187 #define KMP_SIZE_T_SPEC KMP_UINT64_SPEC
189 #error "Can't determine size_t printf format specifier."
192 #if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_WASM || KMP_ARCH_PPC
193 #define KMP_SIZE_T_MAX (0xFFFFFFFF)
195 #define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF)
198 typedef size_t kmp_size_t
;
199 typedef float kmp_real32
;
200 typedef double kmp_real64
;
204 typedef long kmp_intptr_t
;
205 typedef unsigned long kmp_uintptr_t
;
206 #define KMP_INTPTR_SPEC "ld"
207 #define KMP_UINTPTR_SPEC "lu"
211 typedef kmp_int64 kmp_int
;
212 typedef kmp_uint64 kmp_uint
;
214 typedef kmp_int32 kmp_int
;
215 typedef kmp_uint32 kmp_uint
;
216 #endif /* BUILD_I8 */
217 #define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF)
218 #define KMP_INT_MIN ((kmp_int32)0x80000000)
221 #if (KMP_ARCH_ARM || KMP_ARCH_X86_64 || KMP_ARCH_AARCH64 || KMP_ARCH_WASM) && \
222 (KMP_OS_FREEBSD || KMP_OS_LINUX || KMP_OS_WASI)
223 typedef va_list *kmp_va_list
;
224 #define kmp_va_deref(ap) (*(ap))
225 #define kmp_va_addr_of(ap) (&(ap))
227 typedef va_list kmp_va_list
;
228 #define kmp_va_deref(ap) (ap)
229 #define kmp_va_addr_of(ap) (ap)
233 // macros to cast out qualifiers and to re-interpret types
234 #define CCAST(type, var) const_cast<type>(var)
235 #define RCAST(type, var) reinterpret_cast<type>(var)
236 //-------------------------------------------------------------------------
237 // template for debug prints specification ( d, u, lld, llu ), and to obtain
238 // signed/unsigned flavors of a type
239 template <typename T
> struct traits_t
{};
241 template <> struct traits_t
<signed int> {
242 typedef signed int signed_t
;
243 typedef unsigned int unsigned_t
;
244 typedef double floating_t
;
245 static char const *spec
;
246 static const signed_t max_value
= 0x7fffffff;
247 static const signed_t min_value
= 0x80000000;
248 static const int type_size
= sizeof(signed_t
);
251 template <> struct traits_t
<unsigned int> {
252 typedef signed int signed_t
;
253 typedef unsigned int unsigned_t
;
254 typedef double floating_t
;
255 static char const *spec
;
256 static const unsigned_t max_value
= 0xffffffff;
257 static const unsigned_t min_value
= 0x00000000;
258 static const int type_size
= sizeof(unsigned_t
);
261 template <> struct traits_t
<signed long> {
262 typedef signed long signed_t
;
263 typedef unsigned long unsigned_t
;
264 typedef long double floating_t
;
265 static char const *spec
;
266 static const int type_size
= sizeof(signed_t
);
269 template <> struct traits_t
<signed long long> {
270 typedef signed long long signed_t
;
271 typedef unsigned long long unsigned_t
;
272 typedef long double floating_t
;
273 static char const *spec
;
274 static const signed_t max_value
= 0x7fffffffffffffffLL
;
275 static const signed_t min_value
= 0x8000000000000000LL
;
276 static const int type_size
= sizeof(signed_t
);
278 // unsigned long long
279 template <> struct traits_t
<unsigned long long> {
280 typedef signed long long signed_t
;
281 typedef unsigned long long unsigned_t
;
282 typedef long double floating_t
;
283 static char const *spec
;
284 static const unsigned_t max_value
= 0xffffffffffffffffLL
;
285 static const unsigned_t min_value
= 0x0000000000000000LL
;
286 static const int type_size
= sizeof(unsigned_t
);
288 //-------------------------------------------------------------------------
290 #define CCAST(type, var) (type)(var)
291 #define RCAST(type, var) (type)(var)
292 #endif // __cplusplus
294 #define KMP_EXPORT extern /* export declaration in guide libraries */
296 #if __GNUC__ >= 4 && !defined(__MINGW32__)
297 #define __forceinline __inline
300 /* Check if the OS/arch can support user-level mwait */
301 // All mwait code tests for UMWAIT first, so it should only fall back to ring3
303 #define KMP_HAVE_MWAIT \
304 ((KMP_ARCH_X86 || KMP_ARCH_X86_64) && (KMP_OS_LINUX || KMP_OS_WINDOWS) && \
306 #define KMP_HAVE_UMWAIT \
307 ((KMP_ARCH_X86 || KMP_ARCH_X86_64) && (KMP_OS_LINUX || KMP_OS_WINDOWS) && \
311 // Don't include everything related to NT status code, we'll do that explicitly
312 #define WIN32_NO_STATUS
315 static inline int KMP_GET_PAGE_SIZE(void) {
318 return si
.dwPageSize
;
321 #define KMP_GET_PAGE_SIZE() getpagesize()
324 #define PAGE_ALIGNED(_addr) \
325 (!((size_t)_addr & (size_t)(KMP_GET_PAGE_SIZE() - 1)))
326 #define ALIGN_TO_PAGE(x) \
327 (void *)(((size_t)(x)) & ~((size_t)(KMP_GET_PAGE_SIZE() - 1)))
329 /* ---------- Support for cache alignment, padding, etc. ----------------*/
333 #endif // __cplusplus
335 #define INTERNODE_CACHE_LINE 4096 /* for multi-node systems */
337 /* Define the default size of the cache line */
339 #define CACHE_LINE 128 /* cache line size in bytes */
341 #if (CACHE_LINE < 64) && !defined(KMP_OS_DARWIN)
342 // 2006-02-13: This produces too many warnings on OS X*. Disable for now
343 #warning CACHE_LINE is too small.
345 #endif /* CACHE_LINE */
347 #define KMP_CACHE_PREFETCH(ADDR) /* nothing */
349 // Define attribute that indicates that the fall through from the previous
350 // case label is intentional and should not be diagnosed by a compiler
351 // Code from libcxx/include/__config
352 // Use a function like macro to imply that it must be followed by a semicolon
353 #if __cplusplus > 201402L && __has_cpp_attribute(fallthrough)
354 #define KMP_FALLTHROUGH() [[fallthrough]]
355 // icc cannot properly tell this attribute is absent so force off
356 #elif KMP_COMPILER_ICC
357 #define KMP_FALLTHROUGH() ((void)0)
358 #elif __has_cpp_attribute(clang::fallthrough)
359 #define KMP_FALLTHROUGH() [[clang::fallthrough]]
360 #elif __has_attribute(fallthrough) || __GNUC__ >= 7
361 #define KMP_FALLTHROUGH() __attribute__((__fallthrough__))
363 #define KMP_FALLTHROUGH() ((void)0)
366 #if KMP_HAVE_ATTRIBUTE_WAITPKG
367 #define KMP_ATTRIBUTE_TARGET_WAITPKG __attribute__((target("waitpkg")))
369 #define KMP_ATTRIBUTE_TARGET_WAITPKG /* Nothing */
372 #if KMP_HAVE_ATTRIBUTE_RTM
373 #define KMP_ATTRIBUTE_TARGET_RTM __attribute__((target("rtm")))
375 #define KMP_ATTRIBUTE_TARGET_RTM /* Nothing */
378 // Define attribute that indicates a function does not return
379 #if __cplusplus >= 201103L
380 #define KMP_NORETURN [[noreturn]]
382 #define KMP_NORETURN __declspec(noreturn)
384 #define KMP_NORETURN __attribute__((noreturn))
387 #if KMP_OS_WINDOWS && KMP_MSVC_COMPAT
388 #define KMP_ALIGN(bytes) __declspec(align(bytes))
389 #define KMP_THREAD_LOCAL __declspec(thread)
390 #define KMP_ALIAS /* Nothing */
392 #define KMP_ALIGN(bytes) __attribute__((aligned(bytes)))
393 #define KMP_THREAD_LOCAL __thread
394 #define KMP_ALIAS(alias_of) __attribute__((alias(alias_of)))
397 #if KMP_HAVE_WEAK_ATTRIBUTE && !KMP_DYNAMIC_LIB
398 #define KMP_WEAK_ATTRIBUTE_EXTERNAL __attribute__((weak))
400 #define KMP_WEAK_ATTRIBUTE_EXTERNAL /* Nothing */
403 #if KMP_HAVE_WEAK_ATTRIBUTE
404 #define KMP_WEAK_ATTRIBUTE_INTERNAL __attribute__((weak))
406 #define KMP_WEAK_ATTRIBUTE_INTERNAL /* Nothing */
409 // Define KMP_VERSION_SYMBOL and KMP_EXPAND_NAME
411 #define KMP_STR(x) _KMP_STR(x)
412 #define _KMP_STR(x) #x
415 #ifdef KMP_USE_VERSION_SYMBOLS
416 // If using versioned symbols, KMP_EXPAND_NAME prepends
417 // __kmp_api_ to the real API name
418 #define KMP_EXPAND_NAME(api_name) _KMP_EXPAND_NAME(api_name)
419 #define _KMP_EXPAND_NAME(api_name) __kmp_api_##api_name
420 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) \
421 _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, "VERSION")
422 #define _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, default_ver) \
423 __typeof__(__kmp_api_##api_name) __kmp_api_##api_name##_##ver_num##_alias \
424 __attribute__((alias(KMP_STR(__kmp_api_##api_name)))); \
426 ".symver " KMP_STR(__kmp_api_##api_name##_##ver_num##_alias) "," KMP_STR( \
427 api_name) "@" ver_str "\n\t"); \
428 __asm__(".symver " KMP_STR(__kmp_api_##api_name) "," KMP_STR( \
429 api_name) "@@" default_ver "\n\t")
431 #define KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str) \
432 _KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str, "VERSION")
433 #define _KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str, \
435 __typeof__(__kmp_api_##apic_name) __kmp_api_##apic_name##_##ver_num##_alias \
436 __attribute__((alias(KMP_STR(__kmp_api_##apic_name)))); \
437 __asm__(".symver " KMP_STR(__kmp_api_##apic_name) "," KMP_STR( \
438 apic_name) "@@" default_ver "\n\t"); \
440 ".symver " KMP_STR(__kmp_api_##apic_name##_##ver_num##_alias) "," KMP_STR( \
441 api_name) "@" ver_str "\n\t")
443 #else // KMP_USE_VERSION_SYMBOLS
444 #define KMP_EXPAND_NAME(api_name) api_name
445 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) /* Nothing */
446 #define KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, \
447 ver_str) /* Nothing */
448 #endif // KMP_USE_VERSION_SYMBOLS
450 /* Temporary note: if performance testing of this passes, we can remove
451 all references to KMP_DO_ALIGN and replace with KMP_ALIGN. */
452 #define KMP_DO_ALIGN(bytes) KMP_ALIGN(bytes)
453 #define KMP_ALIGN_CACHE KMP_ALIGN(CACHE_LINE)
454 #define KMP_ALIGN_CACHE_INTERNODE KMP_ALIGN(INTERNODE_CACHE_LINE)
456 /* General purpose fence types for memory operations */
457 enum kmp_mem_fence_type
{
458 kmp_no_fence
, /* No memory fence */
459 kmp_acquire_fence
, /* Acquire (read) memory fence */
460 kmp_release_fence
, /* Release (write) memory fence */
461 kmp_full_fence
/* Full (read+write) memory fence */
464 // Synchronization primitives
466 #if KMP_ASM_INTRINS && KMP_OS_WINDOWS && !((KMP_ARCH_AARCH64 || KMP_ARCH_ARM) && (KMP_COMPILER_CLANG || KMP_COMPILER_GCC))
468 #if KMP_MSVC_COMPAT && !KMP_COMPILER_CLANG
469 #pragma intrinsic(InterlockedExchangeAdd)
470 #pragma intrinsic(InterlockedCompareExchange)
471 #pragma intrinsic(InterlockedExchange)
473 #pragma intrinsic(InterlockedExchange64)
477 // Using InterlockedIncrement / InterlockedDecrement causes a library loading
478 // ordering problem, so we use InterlockedExchangeAdd instead.
479 #define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd((volatile long *)(p), 1)
480 #define KMP_TEST_THEN_INC_ACQ32(p) \
481 InterlockedExchangeAdd((volatile long *)(p), 1)
482 #define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd((volatile long *)(p), 4)
483 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
484 InterlockedExchangeAdd((volatile long *)(p), 4)
485 #define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd((volatile long *)(p), -1)
486 #define KMP_TEST_THEN_DEC_ACQ32(p) \
487 InterlockedExchangeAdd((volatile long *)(p), -1)
488 #define KMP_TEST_THEN_ADD32(p, v) \
489 InterlockedExchangeAdd((volatile long *)(p), (v))
491 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
492 InterlockedCompareExchange((volatile long *)(p), (long)(sv), (long)(cv))
494 #define KMP_XCHG_FIXED32(p, v) \
495 InterlockedExchange((volatile long *)(p), (long)(v))
496 #define KMP_XCHG_FIXED64(p, v) \
497 InterlockedExchange64((volatile kmp_int64 *)(p), (kmp_int64)(v))
499 inline kmp_real32
KMP_XCHG_REAL32(volatile kmp_real32
*p
, kmp_real32 v
) {
500 kmp_int32 tmp
= InterlockedExchange((volatile long *)p
, *(long *)&v
);
501 return *(kmp_real32
*)&tmp
;
504 #define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8((p), (v))
505 #define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8((p), (v))
506 #define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32((p), (v))
507 #define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32((p), (v))
508 #define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64((p), (v))
509 #define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64((p), (v))
511 extern kmp_int8
__kmp_test_then_or8(volatile kmp_int8
*p
, kmp_int8 v
);
512 extern kmp_int8
__kmp_test_then_and8(volatile kmp_int8
*p
, kmp_int8 v
);
513 extern kmp_int32
__kmp_test_then_add32(volatile kmp_int32
*p
, kmp_int32 v
);
514 extern kmp_uint32
__kmp_test_then_or32(volatile kmp_uint32
*p
, kmp_uint32 v
);
515 extern kmp_uint32
__kmp_test_then_and32(volatile kmp_uint32
*p
, kmp_uint32 v
);
516 extern kmp_int64
__kmp_test_then_add64(volatile kmp_int64
*p
, kmp_int64 v
);
517 extern kmp_uint64
__kmp_test_then_or64(volatile kmp_uint64
*p
, kmp_uint64 v
);
518 extern kmp_uint64
__kmp_test_then_and64(volatile kmp_uint64
*p
, kmp_uint64 v
);
520 #if KMP_ARCH_AARCH64 && KMP_COMPILER_MSVC && !KMP_COMPILER_CLANG
521 #define KMP_TEST_THEN_INC64(p) _InterlockedExchangeAdd64((p), 1LL)
522 #define KMP_TEST_THEN_INC_ACQ64(p) _InterlockedExchangeAdd64_acq((p), 1LL)
523 #define KMP_TEST_THEN_ADD4_64(p) _InterlockedExchangeAdd64((p), 4LL)
524 // #define KMP_TEST_THEN_ADD4_ACQ64(p) _InterlockedExchangeAdd64_acq((p), 4LL)
525 // #define KMP_TEST_THEN_DEC64(p) _InterlockedExchangeAdd64((p), -1LL)
526 // #define KMP_TEST_THEN_DEC_ACQ64(p) _InterlockedExchangeAdd64_acq((p), -1LL)
527 // #define KMP_TEST_THEN_ADD8(p, v) _InterlockedExchangeAdd8((p), (v))
528 #define KMP_TEST_THEN_ADD64(p, v) _InterlockedExchangeAdd64((p), (v))
530 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
531 __kmp_compare_and_store_acq8((p), (cv), (sv))
532 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
533 __kmp_compare_and_store_rel8((p), (cv), (sv))
534 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
535 __kmp_compare_and_store_acq16((p), (cv), (sv))
537 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
538 __kmp_compare_and_store_rel16((p), (cv), (sv))
540 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
541 __kmp_compare_and_store_acq32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
543 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
544 __kmp_compare_and_store_rel32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
546 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
547 __kmp_compare_and_store_acq64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
549 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
550 __kmp_compare_and_store_rel64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
552 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
553 __kmp_compare_and_store_ptr((void *volatile *)(p), (void *)(cv), (void *)(sv))
555 // KMP_COMPARE_AND_STORE expects this order: pointer, compare, exchange
556 // _InterlockedCompareExchange expects this order: pointer, exchange, compare
557 // KMP_COMPARE_AND_STORE also returns a bool indicating a successful write. A
558 // write is successful if the return value of _InterlockedCompareExchange is the
559 // same as the compare value.
560 inline kmp_int8
__kmp_compare_and_store_acq8(volatile kmp_int8
*p
, kmp_int8 cv
,
562 return _InterlockedCompareExchange8_acq(p
, sv
, cv
) == cv
;
565 inline kmp_int8
__kmp_compare_and_store_rel8(volatile kmp_int8
*p
, kmp_int8 cv
,
567 return _InterlockedCompareExchange8_rel(p
, sv
, cv
) == cv
;
570 inline kmp_int16
__kmp_compare_and_store_acq16(volatile kmp_int16
*p
,
571 kmp_int16 cv
, kmp_int16 sv
) {
572 return _InterlockedCompareExchange16_acq(p
, sv
, cv
) == cv
;
575 inline kmp_int16
__kmp_compare_and_store_rel16(volatile kmp_int16
*p
,
576 kmp_int16 cv
, kmp_int16 sv
) {
577 return _InterlockedCompareExchange16_rel(p
, sv
, cv
) == cv
;
580 inline kmp_int32
__kmp_compare_and_store_acq32(volatile kmp_int32
*p
,
581 kmp_int32 cv
, kmp_int32 sv
) {
582 return _InterlockedCompareExchange_acq((volatile long *)p
, sv
, cv
) == cv
;
585 inline kmp_int32
__kmp_compare_and_store_rel32(volatile kmp_int32
*p
,
586 kmp_int32 cv
, kmp_int32 sv
) {
587 return _InterlockedCompareExchange_rel((volatile long *)p
, sv
, cv
) == cv
;
590 inline kmp_int32
__kmp_compare_and_store_acq64(volatile kmp_int64
*p
,
591 kmp_int64 cv
, kmp_int64 sv
) {
592 return _InterlockedCompareExchange64_acq(p
, sv
, cv
) == cv
;
595 inline kmp_int32
__kmp_compare_and_store_rel64(volatile kmp_int64
*p
,
596 kmp_int64 cv
, kmp_int64 sv
) {
597 return _InterlockedCompareExchange64_rel(p
, sv
, cv
) == cv
;
600 inline kmp_int32
__kmp_compare_and_store_ptr(void *volatile *p
, void *cv
,
602 return _InterlockedCompareExchangePointer(p
, sv
, cv
) == cv
;
605 // The _RET versions return the value instead of a bool
607 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
608 _InterlockedCompareExchange8((p), (sv), (cv))
609 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
610 _InterlockedCompareExchange16((p), (sv), (cv))
612 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
613 _InterlockedCompareExchange64((volatile kmp_int64 *)(p), (kmp_int64)(sv), \
617 #define KMP_XCHG_FIXED8(p, v) \
618 _InterlockedExchange8((volatile kmp_int8 *)(p), (kmp_int8)(v));
619 #define KMP_XCHG_FIXED16(p, v) _InterlockedExchange16((p), (v));
620 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
622 inline kmp_real64
__kmp_xchg_real64(volatile kmp_real64
*p
, kmp_real64 v
) {
623 kmp_int64 tmp
= _InterlockedExchange64((volatile kmp_int64
*)p
, *(kmp_int64
624 *)&v
); return *(kmp_real64
*)&tmp
;
627 #else // !KMP_ARCH_AARCH64
629 // Routines that we still need to implement in assembly.
630 extern kmp_int8
__kmp_test_then_add8(volatile kmp_int8
*p
, kmp_int8 v
);
632 extern kmp_int8
__kmp_compare_and_store8(volatile kmp_int8
*p
, kmp_int8 cv
,
634 extern kmp_int16
__kmp_compare_and_store16(volatile kmp_int16
*p
, kmp_int16 cv
,
636 extern kmp_int32
__kmp_compare_and_store32(volatile kmp_int32
*p
, kmp_int32 cv
,
638 extern kmp_int32
__kmp_compare_and_store64(volatile kmp_int64
*p
, kmp_int64 cv
,
640 extern kmp_int8
__kmp_compare_and_store_ret8(volatile kmp_int8
*p
, kmp_int8 cv
,
642 extern kmp_int16
__kmp_compare_and_store_ret16(volatile kmp_int16
*p
,
643 kmp_int16 cv
, kmp_int16 sv
);
644 extern kmp_int32
__kmp_compare_and_store_ret32(volatile kmp_int32
*p
,
645 kmp_int32 cv
, kmp_int32 sv
);
646 extern kmp_int64
__kmp_compare_and_store_ret64(volatile kmp_int64
*p
,
647 kmp_int64 cv
, kmp_int64 sv
);
649 extern kmp_int8
__kmp_xchg_fixed8(volatile kmp_int8
*p
, kmp_int8 v
);
650 extern kmp_int16
__kmp_xchg_fixed16(volatile kmp_int16
*p
, kmp_int16 v
);
651 extern kmp_int32
__kmp_xchg_fixed32(volatile kmp_int32
*p
, kmp_int32 v
);
652 extern kmp_int64
__kmp_xchg_fixed64(volatile kmp_int64
*p
, kmp_int64 v
);
653 extern kmp_real32
__kmp_xchg_real32(volatile kmp_real32
*p
, kmp_real32 v
);
654 extern kmp_real64
__kmp_xchg_real64(volatile kmp_real64
*p
, kmp_real64 v
);
656 //#define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32((p), 1)
657 //#define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32((p), 1)
658 #define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64((p), 1LL)
659 #define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64((p), 1LL)
660 //#define KMP_TEST_THEN_ADD4_32(p) __kmp_test_then_add32((p), 4)
661 //#define KMP_TEST_THEN_ADD4_ACQ32(p) __kmp_test_then_add32((p), 4)
662 #define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64((p), 4LL)
663 #define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64((p), 4LL)
664 //#define KMP_TEST_THEN_DEC32(p) __kmp_test_then_add32((p), -1)
665 //#define KMP_TEST_THEN_DEC_ACQ32(p) __kmp_test_then_add32((p), -1)
666 #define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64((p), -1LL)
667 #define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64((p), -1LL)
668 //#define KMP_TEST_THEN_ADD32(p, v) __kmp_test_then_add32((p), (v))
669 #define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8((p), (v))
670 #define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64((p), (v))
673 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
674 __kmp_compare_and_store8((p), (cv), (sv))
675 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
676 __kmp_compare_and_store8((p), (cv), (sv))
677 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
678 __kmp_compare_and_store16((p), (cv), (sv))
679 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
680 __kmp_compare_and_store16((p), (cv), (sv))
681 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
682 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
684 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
685 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
687 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
688 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
690 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
691 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
695 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
696 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
698 #else /* 64 bit pointers */
699 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
700 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
702 #endif /* KMP_ARCH_X86 */
704 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
705 __kmp_compare_and_store_ret8((p), (cv), (sv))
706 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
707 __kmp_compare_and_store_ret16((p), (cv), (sv))
708 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
709 __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
712 #define KMP_XCHG_FIXED8(p, v) \
713 __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
714 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
715 //#define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
716 //#define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
717 //#define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
718 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
721 #elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64)
723 /* cast p to correct type so that proper intrinsic will be used */
724 #define KMP_TEST_THEN_INC32(p) \
725 __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
726 #define KMP_TEST_THEN_INC_ACQ32(p) \
727 __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
729 #define KMP_TEST_THEN_INC64(p) \
730 __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
731 #define KMP_TEST_THEN_INC_ACQ64(p) \
732 __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
734 #define KMP_TEST_THEN_INC64(p) \
735 __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
736 #define KMP_TEST_THEN_INC_ACQ64(p) \
737 __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
739 #define KMP_TEST_THEN_ADD4_32(p) \
740 __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
741 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
742 __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
744 #define KMP_TEST_THEN_ADD4_64(p) \
745 __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
746 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
747 __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
748 #define KMP_TEST_THEN_DEC64(p) \
749 __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
750 #define KMP_TEST_THEN_DEC_ACQ64(p) \
751 __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
753 #define KMP_TEST_THEN_ADD4_64(p) \
754 __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
755 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
756 __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
757 #define KMP_TEST_THEN_DEC64(p) \
758 __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
759 #define KMP_TEST_THEN_DEC_ACQ64(p) \
760 __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
762 #define KMP_TEST_THEN_DEC32(p) \
763 __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
764 #define KMP_TEST_THEN_DEC_ACQ32(p) \
765 __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
766 #define KMP_TEST_THEN_ADD8(p, v) \
767 __sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v))
768 #define KMP_TEST_THEN_ADD32(p, v) \
769 __sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v))
771 #define KMP_TEST_THEN_ADD64(p, v) \
772 __atomic_fetch_add((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
775 #define KMP_TEST_THEN_ADD64(p, v) \
776 __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v))
779 #define KMP_TEST_THEN_OR8(p, v) \
780 __sync_fetch_and_or((volatile kmp_int8 *)(p), (kmp_int8)(v))
781 #define KMP_TEST_THEN_AND8(p, v) \
782 __sync_fetch_and_and((volatile kmp_int8 *)(p), (kmp_int8)(v))
783 #define KMP_TEST_THEN_OR32(p, v) \
784 __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
785 #define KMP_TEST_THEN_AND32(p, v) \
786 __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
788 #define KMP_TEST_THEN_OR64(p, v) \
789 __atomic_fetch_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
791 #define KMP_TEST_THEN_AND64(p, v) \
792 __atomic_fetch_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
795 #define KMP_TEST_THEN_OR64(p, v) \
796 __sync_fetch_and_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
797 #define KMP_TEST_THEN_AND64(p, v) \
798 __sync_fetch_and_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
801 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
802 __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
804 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
805 __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
807 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
808 __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
810 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
811 __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
813 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
814 __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
816 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
817 __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
819 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
820 __sync_bool_compare_and_swap((void *volatile *)(p), (void *)(cv), \
823 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
824 __sync_val_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
826 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
827 __sync_val_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
829 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
830 __sync_val_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
833 static inline bool mips_sync_bool_compare_and_swap(volatile kmp_uint64
*p
,
836 return __atomic_compare_exchange(p
, &cv
, &sv
, false, __ATOMIC_SEQ_CST
,
839 static inline bool mips_sync_val_compare_and_swap(volatile kmp_uint64
*p
,
842 __atomic_compare_exchange(p
, &cv
, &sv
, false, __ATOMIC_SEQ_CST
,
846 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
847 mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), \
848 (kmp_uint64)(cv), (kmp_uint64)(sv))
849 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
850 mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), \
851 (kmp_uint64)(cv), (kmp_uint64)(sv))
852 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
853 mips_sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
856 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
857 __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
859 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
860 __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
862 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
863 __sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
867 #if KMP_OS_DARWIN && defined(__INTEL_COMPILER) && __INTEL_COMPILER >= 1800
868 #define KMP_XCHG_FIXED8(p, v) \
869 __atomic_exchange_1((volatile kmp_uint8 *)(p), (kmp_uint8)(v), \
872 #define KMP_XCHG_FIXED8(p, v) \
873 __sync_lock_test_and_set((volatile kmp_uint8 *)(p), (kmp_uint8)(v))
875 #define KMP_XCHG_FIXED16(p, v) \
876 __sync_lock_test_and_set((volatile kmp_uint16 *)(p), (kmp_uint16)(v))
877 #define KMP_XCHG_FIXED32(p, v) \
878 __sync_lock_test_and_set((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
879 #define KMP_XCHG_FIXED64(p, v) \
880 __sync_lock_test_and_set((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
882 inline kmp_real32
KMP_XCHG_REAL32(volatile kmp_real32
*p
, kmp_real32 v
) {
883 volatile kmp_uint32
*up
;
885 memcpy(&up
, &p
, sizeof(up
));
886 memcpy(&uv
, &v
, sizeof(uv
));
887 kmp_int32 tmp
= __sync_lock_test_and_set(up
, uv
);
889 memcpy(&ftmp
, &tmp
, sizeof(tmp
));
893 inline kmp_real64
KMP_XCHG_REAL64(volatile kmp_real64
*p
, kmp_real64 v
) {
894 volatile kmp_uint64
*up
;
896 memcpy(&up
, &p
, sizeof(up
));
897 memcpy(&uv
, &v
, sizeof(uv
));
898 kmp_int64 tmp
= __sync_lock_test_and_set(up
, uv
);
900 memcpy(&dtmp
, &tmp
, sizeof(tmp
));
906 extern kmp_int8
__kmp_test_then_add8(volatile kmp_int8
*p
, kmp_int8 v
);
907 extern kmp_int8
__kmp_test_then_or8(volatile kmp_int8
*p
, kmp_int8 v
);
908 extern kmp_int8
__kmp_test_then_and8(volatile kmp_int8
*p
, kmp_int8 v
);
909 extern kmp_int32
__kmp_test_then_add32(volatile kmp_int32
*p
, kmp_int32 v
);
910 extern kmp_uint32
__kmp_test_then_or32(volatile kmp_uint32
*p
, kmp_uint32 v
);
911 extern kmp_uint32
__kmp_test_then_and32(volatile kmp_uint32
*p
, kmp_uint32 v
);
912 extern kmp_int64
__kmp_test_then_add64(volatile kmp_int64
*p
, kmp_int64 v
);
913 extern kmp_uint64
__kmp_test_then_or64(volatile kmp_uint64
*p
, kmp_uint64 v
);
914 extern kmp_uint64
__kmp_test_then_and64(volatile kmp_uint64
*p
, kmp_uint64 v
);
916 extern kmp_int8
__kmp_compare_and_store8(volatile kmp_int8
*p
, kmp_int8 cv
,
918 extern kmp_int16
__kmp_compare_and_store16(volatile kmp_int16
*p
, kmp_int16 cv
,
920 extern kmp_int32
__kmp_compare_and_store32(volatile kmp_int32
*p
, kmp_int32 cv
,
922 extern kmp_int32
__kmp_compare_and_store64(volatile kmp_int64
*p
, kmp_int64 cv
,
924 extern kmp_int8
__kmp_compare_and_store_ret8(volatile kmp_int8
*p
, kmp_int8 cv
,
926 extern kmp_int16
__kmp_compare_and_store_ret16(volatile kmp_int16
*p
,
927 kmp_int16 cv
, kmp_int16 sv
);
928 extern kmp_int32
__kmp_compare_and_store_ret32(volatile kmp_int32
*p
,
929 kmp_int32 cv
, kmp_int32 sv
);
930 extern kmp_int64
__kmp_compare_and_store_ret64(volatile kmp_int64
*p
,
931 kmp_int64 cv
, kmp_int64 sv
);
933 extern kmp_int8
__kmp_xchg_fixed8(volatile kmp_int8
*p
, kmp_int8 v
);
934 extern kmp_int16
__kmp_xchg_fixed16(volatile kmp_int16
*p
, kmp_int16 v
);
935 extern kmp_int32
__kmp_xchg_fixed32(volatile kmp_int32
*p
, kmp_int32 v
);
936 extern kmp_int64
__kmp_xchg_fixed64(volatile kmp_int64
*p
, kmp_int64 v
);
937 extern kmp_real32
__kmp_xchg_real32(volatile kmp_real32
*p
, kmp_real32 v
);
938 extern kmp_real64
__kmp_xchg_real64(volatile kmp_real64
*p
, kmp_real64 v
);
940 #define KMP_TEST_THEN_INC32(p) \
941 __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
942 #define KMP_TEST_THEN_INC_ACQ32(p) \
943 __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
944 #define KMP_TEST_THEN_INC64(p) \
945 __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
946 #define KMP_TEST_THEN_INC_ACQ64(p) \
947 __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
948 #define KMP_TEST_THEN_ADD4_32(p) \
949 __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
950 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
951 __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
952 #define KMP_TEST_THEN_ADD4_64(p) \
953 __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
954 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
955 __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
956 #define KMP_TEST_THEN_DEC32(p) \
957 __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
958 #define KMP_TEST_THEN_DEC_ACQ32(p) \
959 __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
960 #define KMP_TEST_THEN_DEC64(p) \
961 __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
962 #define KMP_TEST_THEN_DEC_ACQ64(p) \
963 __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
964 #define KMP_TEST_THEN_ADD8(p, v) \
965 __kmp_test_then_add8((volatile kmp_int8 *)(p), (kmp_int8)(v))
966 #define KMP_TEST_THEN_ADD32(p, v) \
967 __kmp_test_then_add32((volatile kmp_int32 *)(p), (kmp_int32)(v))
968 #define KMP_TEST_THEN_ADD64(p, v) \
969 __kmp_test_then_add64((volatile kmp_int64 *)(p), (kmp_int64)(v))
971 #define KMP_TEST_THEN_OR8(p, v) \
972 __kmp_test_then_or8((volatile kmp_int8 *)(p), (kmp_int8)(v))
973 #define KMP_TEST_THEN_AND8(p, v) \
974 __kmp_test_then_and8((volatile kmp_int8 *)(p), (kmp_int8)(v))
975 #define KMP_TEST_THEN_OR32(p, v) \
976 __kmp_test_then_or32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
977 #define KMP_TEST_THEN_AND32(p, v) \
978 __kmp_test_then_and32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
979 #define KMP_TEST_THEN_OR64(p, v) \
980 __kmp_test_then_or64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
981 #define KMP_TEST_THEN_AND64(p, v) \
982 __kmp_test_then_and64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
984 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
985 __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
987 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
988 __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
990 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
991 __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
993 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
994 __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
996 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
997 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
999 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
1000 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
1002 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
1003 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1005 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
1006 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1010 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
1011 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
1013 #else /* 64 bit pointers */
1014 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
1015 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1017 #endif /* KMP_ARCH_X86 */
1019 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
1020 __kmp_compare_and_store_ret8((p), (cv), (sv))
1021 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
1022 __kmp_compare_and_store_ret16((p), (cv), (sv))
1023 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
1024 __kmp_compare_and_store_ret32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
1026 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
1027 __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1030 #define KMP_XCHG_FIXED8(p, v) \
1031 __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
1032 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
1033 #define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
1034 #define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
1035 #define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
1036 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
1038 #endif /* KMP_ASM_INTRINS */
1040 /* ------------- relaxed consistency memory model stuff ------------------ */
1044 #define KMP_MB() asm("nop")
1045 #define KMP_IMB() asm("nop")
1047 #define KMP_MB() /* _asm{ nop } */
1048 #define KMP_IMB() /* _asm{ nop } */
1050 #endif /* KMP_OS_WINDOWS */
1052 #if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS || \
1053 KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64 || KMP_ARCH_LOONGARCH64 || \
1054 KMP_ARCH_VE || KMP_ARCH_S390X || KMP_ARCH_PPC || KMP_ARCH_AARCH64_32
1057 #define KMP_MB() std::atomic_thread_fence(std::memory_order_seq_cst)
1058 #else /* !KMP_OS_WINDOWS */
1059 #define KMP_MB() __sync_synchronize()
1064 #define KMP_MB() /* nothing to do */
1067 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
1069 // fence-style instructions do not exist, but lock; xaddl $0,(%rsp) can be used.
1070 // We shouldn't need it, though, since the ABI rules require that
1071 // * If the compiler generates NGO stores it also generates the fence
1072 // * If users hand-code NGO stores they should insert the fence
1073 // therefore no incomplete unordered stores should be visible.
1074 #define KMP_MFENCE() /* Nothing */
1075 #define KMP_SFENCE() /* Nothing */
1077 #if KMP_COMPILER_ICC || KMP_COMPILER_ICX
1078 #define KMP_MFENCE_() _mm_mfence()
1079 #define KMP_SFENCE_() _mm_sfence()
1080 #elif KMP_COMPILER_MSVC
1081 #define KMP_MFENCE_() MemoryBarrier()
1082 #define KMP_SFENCE_() MemoryBarrier()
1084 #define KMP_MFENCE_() __sync_synchronize()
1085 #define KMP_SFENCE_() __sync_synchronize()
1087 #define KMP_MFENCE() \
1088 if (UNLIKELY(!__kmp_cpuinfo.initialized)) { \
1089 __kmp_query_cpuid(&__kmp_cpuinfo); \
1091 if (__kmp_cpuinfo.flags.sse2) { \
1094 #define KMP_SFENCE() KMP_SFENCE_()
1097 #define KMP_MFENCE() KMP_MB()
1098 #define KMP_SFENCE() KMP_MB()
1102 #define KMP_IMB() /* nothing to do */
1105 #ifndef KMP_ST_REL32
1106 #define KMP_ST_REL32(A, D) (*(A) = (D))
1109 #ifndef KMP_ST_REL64
1110 #define KMP_ST_REL64(A, D) (*(A) = (D))
1113 #ifndef KMP_LD_ACQ32
1114 #define KMP_LD_ACQ32(A) (*(A))
1117 #ifndef KMP_LD_ACQ64
1118 #define KMP_LD_ACQ64(A) (*(A))
1121 /* ------------------------------------------------------------------------ */
1122 // FIXME - maybe this should this be
1124 // #define TCR_4(a) (*(volatile kmp_int32 *)(&a))
1125 // #define TCW_4(a,b) (a) = (*(volatile kmp_int32 *)&(b))
1127 // #define TCR_8(a) (*(volatile kmp_int64 *)(a))
1128 // #define TCW_8(a,b) (a) = (*(volatile kmp_int64 *)(&b))
1130 // I'm fairly certain this is the correct thing to do, but I'm afraid
1131 // of performance regressions.
1133 #define TCR_1(a) (a)
1134 #define TCW_1(a, b) (a) = (b)
1135 #define TCR_4(a) (a)
1136 #define TCW_4(a, b) (a) = (b)
1137 #define TCI_4(a) (++(a))
1138 #define TCD_4(a) (--(a))
1139 #define TCR_8(a) (a)
1140 #define TCW_8(a, b) (a) = (b)
1141 #define TCI_8(a) (++(a))
1142 #define TCD_8(a) (--(a))
1143 #define TCR_SYNC_4(a) (a)
1144 #define TCW_SYNC_4(a, b) (a) = (b)
1145 #define TCX_SYNC_4(a, b, c) \
1146 KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a), \
1147 (kmp_int32)(b), (kmp_int32)(c))
1148 #define TCR_SYNC_8(a) (a)
1149 #define TCW_SYNC_8(a, b) (a) = (b)
1150 #define TCX_SYNC_8(a, b, c) \
1151 KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a), \
1152 (kmp_int64)(b), (kmp_int64)(c))
1154 #if KMP_ARCH_X86 || KMP_ARCH_MIPS || KMP_ARCH_WASM || KMP_ARCH_PPC
1156 #define TCR_PTR(a) ((void *)TCR_4(a))
1157 #define TCW_PTR(a, b) TCW_4((a), (b))
1158 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a))
1159 #define TCW_SYNC_PTR(a, b) TCW_SYNC_4((a), (b))
1160 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_4((a), (b), (c)))
1162 #else /* 64 bit pointers */
1164 #define TCR_PTR(a) ((void *)TCR_8(a))
1165 #define TCW_PTR(a, b) TCW_8((a), (b))
1166 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a))
1167 #define TCW_SYNC_PTR(a, b) TCW_SYNC_8((a), (b))
1168 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_8((a), (b), (c)))
1170 #endif /* KMP_ARCH_X86 */
1172 /* If these FTN_{TRUE,FALSE} values change, may need to change several places
1173 where they are used to check that language is Fortran, not C. */
1176 #define FTN_TRUE TRUE
1180 #define FTN_FALSE FALSE
1183 typedef void (*microtask_t
)(int *gtid
, int *npr
, ...);
1185 #ifdef USE_VOLATILE_CAST
1186 #define VOLATILE_CAST(x) (volatile x)
1188 #define VOLATILE_CAST(x) (x)
1191 #define KMP_WAIT __kmp_wait_4
1192 #define KMP_WAIT_PTR __kmp_wait_4_ptr
1193 #define KMP_EQ __kmp_eq_4
1194 #define KMP_NEQ __kmp_neq_4
1195 #define KMP_LT __kmp_lt_4
1196 #define KMP_GE __kmp_ge_4
1197 #define KMP_LE __kmp_le_4
1199 /* Workaround for Intel(R) 64 code gen bug when taking address of static array
1200 * (Intel(R) 64 Tracker #138) */
1201 #if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX
1202 #define STATIC_EFI2_WORKAROUND
1204 #define STATIC_EFI2_WORKAROUND static
1207 // Support of BGET usage
1208 #ifndef KMP_USE_BGET
1209 #define KMP_USE_BGET 1
1212 // Switches for OSS builds
1213 #ifndef USE_CMPXCHG_FIX
1214 #define USE_CMPXCHG_FIX 1
1217 // Enable dynamic user lock
1218 #define KMP_USE_DYNAMIC_LOCK 1
1220 // Enable Intel(R) Transactional Synchronization Extensions (Intel(R) TSX) if
1221 // dynamic user lock is turned on
1222 #if KMP_USE_DYNAMIC_LOCK
1223 // Visual studio can't handle the asm sections in this code
1224 #define KMP_USE_TSX (KMP_ARCH_X86 || KMP_ARCH_X86_64) && !KMP_COMPILER_MSVC
1225 #ifdef KMP_USE_ADAPTIVE_LOCKS
1226 #undef KMP_USE_ADAPTIVE_LOCKS
1228 #define KMP_USE_ADAPTIVE_LOCKS KMP_USE_TSX
1231 // Enable tick time conversion of ticks to seconds
1232 #if KMP_STATS_ENABLED
1233 #define KMP_HAVE_TICK_TIME \
1234 (KMP_OS_LINUX && (KMP_MIC || KMP_ARCH_X86 || KMP_ARCH_X86_64))
1238 enum kmp_warnings_level
{
1239 kmp_warnings_off
= 0, /* No warnings */
1240 kmp_warnings_low
, /* Minimal warnings (default) */
1241 kmp_warnings_explicit
= 6, /* Explicitly set to ON - more warnings */
1242 kmp_warnings_verbose
/* reserved */
1247 #endif // __cplusplus
1250 #include "kmp_safe_c_api.h"
1252 // Macros for C++11 atomic functions
1253 #define KMP_ATOMIC_LD(p, order) (p)->load(std::memory_order_##order)
1254 #define KMP_ATOMIC_OP(op, p, v, order) (p)->op(v, std::memory_order_##order)
1256 // For non-default load/store
1257 #define KMP_ATOMIC_LD_ACQ(p) KMP_ATOMIC_LD(p, acquire)
1258 #define KMP_ATOMIC_LD_RLX(p) KMP_ATOMIC_LD(p, relaxed)
1259 #define KMP_ATOMIC_ST_REL(p, v) KMP_ATOMIC_OP(store, p, v, release)
1260 #define KMP_ATOMIC_ST_RLX(p, v) KMP_ATOMIC_OP(store, p, v, relaxed)
1262 // For non-default fetch_<op>
1263 #define KMP_ATOMIC_ADD(p, v) KMP_ATOMIC_OP(fetch_add, p, v, acq_rel)
1264 #define KMP_ATOMIC_SUB(p, v) KMP_ATOMIC_OP(fetch_sub, p, v, acq_rel)
1265 #define KMP_ATOMIC_AND(p, v) KMP_ATOMIC_OP(fetch_and, p, v, acq_rel)
1266 #define KMP_ATOMIC_OR(p, v) KMP_ATOMIC_OP(fetch_or, p, v, acq_rel)
1267 #define KMP_ATOMIC_INC(p) KMP_ATOMIC_OP(fetch_add, p, 1, acq_rel)
1268 #define KMP_ATOMIC_DEC(p) KMP_ATOMIC_OP(fetch_sub, p, 1, acq_rel)
1269 #define KMP_ATOMIC_ADD_RLX(p, v) KMP_ATOMIC_OP(fetch_add, p, v, relaxed)
1270 #define KMP_ATOMIC_INC_RLX(p) KMP_ATOMIC_OP(fetch_add, p, 1, relaxed)
1272 // Callers of the following functions cannot see the side effect on "expected".
1273 template <typename T
>
1274 bool __kmp_atomic_compare_store(std::atomic
<T
> *p
, T expected
, T desired
) {
1275 return p
->compare_exchange_strong(
1276 expected
, desired
, std::memory_order_acq_rel
, std::memory_order_relaxed
);
1279 template <typename T
>
1280 bool __kmp_atomic_compare_store_acq(std::atomic
<T
> *p
, T expected
, T desired
) {
1281 return p
->compare_exchange_strong(
1282 expected
, desired
, std::memory_order_acquire
, std::memory_order_relaxed
);
1285 template <typename T
>
1286 bool __kmp_atomic_compare_store_rel(std::atomic
<T
> *p
, T expected
, T desired
) {
1287 return p
->compare_exchange_strong(
1288 expected
, desired
, std::memory_order_release
, std::memory_order_relaxed
);
1291 // Symbol lookup on Linux/Windows
1293 extern void *__kmp_lookup_symbol(const char *name
, bool next
= false);
1294 #define KMP_DLSYM(name) __kmp_lookup_symbol(name)
1295 #define KMP_DLSYM_NEXT(name) __kmp_lookup_symbol(name, true)
1296 #elif KMP_OS_WASI || KMP_OS_EMSCRIPTEN
1297 #define KMP_DLSYM(name) nullptr
1298 #define KMP_DLSYM_NEXT(name) nullptr
1300 #define KMP_DLSYM(name) dlsym(RTLD_DEFAULT, name)
1301 #define KMP_DLSYM_NEXT(name) dlsym(RTLD_NEXT, name)
1304 // MSVC doesn't have this, but clang/clang-cl does.
1305 #ifndef __has_builtin
1306 #define __has_builtin(x) 0
1309 // Same as LLVM_BUILTIN_UNREACHABLE. States that it is UB to reach this point.
1310 #if __has_builtin(__builtin_unreachable) || defined(__GNUC__)
1311 #define KMP_BUILTIN_UNREACHABLE __builtin_unreachable()
1312 #elif defined(_MSC_VER)
1313 #define KMP_BUILTIN_UNREACHABLE __assume(false)
1315 #define KMP_BUILTIN_UNREACHABLE
1318 #endif /* KMP_OS_H */