1 //===----------------------------------------------------------------------===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 #ifndef _LIBCPP___ATOMIC_CXX_ATOMIC_IMPL_H
10 #define _LIBCPP___ATOMIC_CXX_ATOMIC_IMPL_H
12 #include <__atomic/memory_order.h>
13 #include <__atomic/to_gcc_order.h>
15 #include <__cstddef/ptrdiff_t.h>
16 #include <__memory/addressof.h>
17 #include <__type_traits/enable_if.h>
18 #include <__type_traits/is_assignable.h>
19 #include <__type_traits/is_trivially_copyable.h>
20 #include <__type_traits/remove_const.h>
22 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
23 # pragma GCC system_header
26 _LIBCPP_BEGIN_NAMESPACE_STD
28 #if _LIBCPP_HAS_GCC_ATOMIC_IMP
30 // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
31 // the default operator= in an object is not volatile, a byte-by-byte copy
33 template <typename _Tp
, typename _Tv
, __enable_if_t
<is_assignable
<_Tp
&, _Tv
>::value
, int> = 0>
34 _LIBCPP_HIDE_FROM_ABI
void __cxx_atomic_assign_volatile(_Tp
& __a_value
, _Tv
const& __val
) {
37 template <typename _Tp
, typename _Tv
, __enable_if_t
<is_assignable
<_Tp
&, _Tv
>::value
, int> = 0>
38 _LIBCPP_HIDE_FROM_ABI
void __cxx_atomic_assign_volatile(_Tp
volatile& __a_value
, _Tv
volatile const& __val
) {
39 volatile char* __to
= reinterpret_cast<volatile char*>(std::addressof(__a_value
));
40 volatile char* __end
= __to
+ sizeof(_Tp
);
41 volatile const char* __from
= reinterpret_cast<volatile const char*>(std::addressof(__val
));
46 template <typename _Tp
>
47 struct __cxx_atomic_base_impl
{
49 # ifndef _LIBCPP_CXX03_LANG
50 __cxx_atomic_base_impl() _NOEXCEPT
= default;
52 __cxx_atomic_base_impl() _NOEXCEPT
: __a_value() {
54 # endif // _LIBCPP_CXX03_LANG
55 _LIBCPP_CONSTEXPR
explicit __cxx_atomic_base_impl(_Tp value
) _NOEXCEPT
: __a_value(value
) {}
59 template <typename _Tp
>
60 _LIBCPP_HIDE_FROM_ABI
void __cxx_atomic_init(volatile __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __val
) {
61 __cxx_atomic_assign_volatile(__a
->__a_value
, __val
);
64 template <typename _Tp
>
65 _LIBCPP_HIDE_FROM_ABI
void __cxx_atomic_init(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __val
) {
66 __a
->__a_value
= __val
;
69 _LIBCPP_HIDE_FROM_ABI
inline void __cxx_atomic_thread_fence(memory_order __order
) {
70 __atomic_thread_fence(__to_gcc_order(__order
));
73 _LIBCPP_HIDE_FROM_ABI
inline void __cxx_atomic_signal_fence(memory_order __order
) {
74 __atomic_signal_fence(__to_gcc_order(__order
));
77 template <typename _Tp
>
78 _LIBCPP_HIDE_FROM_ABI
void
79 __cxx_atomic_store(volatile __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __val
, memory_order __order
) {
80 __atomic_store(std::addressof(__a
->__a_value
), std::addressof(__val
), __to_gcc_order(__order
));
83 template <typename _Tp
>
84 _LIBCPP_HIDE_FROM_ABI
void __cxx_atomic_store(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __val
, memory_order __order
) {
85 __atomic_store(std::addressof(__a
->__a_value
), std::addressof(__val
), __to_gcc_order(__order
));
88 template <typename _Tp
>
89 _LIBCPP_HIDE_FROM_ABI _Tp
__cxx_atomic_load(const volatile __cxx_atomic_base_impl
<_Tp
>* __a
, memory_order __order
) {
91 __atomic_load(std::addressof(__a
->__a_value
), std::addressof(__ret
), __to_gcc_order(__order
));
95 template <typename _Tp
>
96 _LIBCPP_HIDE_FROM_ABI
void
97 __cxx_atomic_load_inplace(const volatile __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp
* __dst
, memory_order __order
) {
98 __atomic_load(std::addressof(__a
->__a_value
), __dst
, __to_gcc_order(__order
));
101 template <typename _Tp
>
102 _LIBCPP_HIDE_FROM_ABI
void
103 __cxx_atomic_load_inplace(const __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp
* __dst
, memory_order __order
) {
104 __atomic_load(std::addressof(__a
->__a_value
), __dst
, __to_gcc_order(__order
));
107 template <typename _Tp
>
108 _LIBCPP_HIDE_FROM_ABI _Tp
__cxx_atomic_load(const __cxx_atomic_base_impl
<_Tp
>* __a
, memory_order __order
) {
110 __atomic_load(std::addressof(__a
->__a_value
), std::addressof(__ret
), __to_gcc_order(__order
));
114 template <typename _Tp
>
115 _LIBCPP_HIDE_FROM_ABI _Tp
116 __cxx_atomic_exchange(volatile __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __value
, memory_order __order
) {
119 std::addressof(__a
->__a_value
), std::addressof(__value
), std::addressof(__ret
), __to_gcc_order(__order
));
123 template <typename _Tp
>
124 _LIBCPP_HIDE_FROM_ABI _Tp
__cxx_atomic_exchange(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __value
, memory_order __order
) {
127 std::addressof(__a
->__a_value
), std::addressof(__value
), std::addressof(__ret
), __to_gcc_order(__order
));
131 template <typename _Tp
>
132 _LIBCPP_HIDE_FROM_ABI
bool __cxx_atomic_compare_exchange_strong(
133 volatile __cxx_atomic_base_impl
<_Tp
>* __a
,
136 memory_order __success
,
137 memory_order __failure
) {
138 return __atomic_compare_exchange(
139 std::addressof(__a
->__a_value
),
141 std::addressof(__value
),
143 __to_gcc_order(__success
),
144 __to_gcc_failure_order(__failure
));
147 template <typename _Tp
>
148 _LIBCPP_HIDE_FROM_ABI
bool __cxx_atomic_compare_exchange_strong(
149 __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp
* __expected
, _Tp __value
, memory_order __success
, memory_order __failure
) {
150 return __atomic_compare_exchange(
151 std::addressof(__a
->__a_value
),
153 std::addressof(__value
),
155 __to_gcc_order(__success
),
156 __to_gcc_failure_order(__failure
));
159 template <typename _Tp
>
160 _LIBCPP_HIDE_FROM_ABI
bool __cxx_atomic_compare_exchange_weak(
161 volatile __cxx_atomic_base_impl
<_Tp
>* __a
,
164 memory_order __success
,
165 memory_order __failure
) {
166 return __atomic_compare_exchange(
167 std::addressof(__a
->__a_value
),
169 std::addressof(__value
),
171 __to_gcc_order(__success
),
172 __to_gcc_failure_order(__failure
));
175 template <typename _Tp
>
176 _LIBCPP_HIDE_FROM_ABI
bool __cxx_atomic_compare_exchange_weak(
177 __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp
* __expected
, _Tp __value
, memory_order __success
, memory_order __failure
) {
178 return __atomic_compare_exchange(
179 std::addressof(__a
->__a_value
),
181 std::addressof(__value
),
183 __to_gcc_order(__success
),
184 __to_gcc_failure_order(__failure
));
187 template <typename _Tp
>
192 template <typename _Tp
>
193 struct __skip_amt
<_Tp
*> {
194 enum { value
= sizeof(_Tp
) };
197 // FIXME: Haven't figured out what the spec says about using arrays with
198 // atomic_fetch_add. Force a failure rather than creating bad behavior.
199 template <typename _Tp
>
200 struct __skip_amt
<_Tp
[]> {};
201 template <typename _Tp
, int n
>
202 struct __skip_amt
<_Tp
[n
]> {};
204 template <typename _Tp
, typename _Td
>
205 _LIBCPP_HIDE_FROM_ABI _Tp
206 __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl
<_Tp
>* __a
, _Td __delta
, memory_order __order
) {
207 return __atomic_fetch_add(std::addressof(__a
->__a_value
), __delta
* __skip_amt
<_Tp
>::value
, __to_gcc_order(__order
));
210 template <typename _Tp
, typename _Td
>
211 _LIBCPP_HIDE_FROM_ABI _Tp
__cxx_atomic_fetch_add(__cxx_atomic_base_impl
<_Tp
>* __a
, _Td __delta
, memory_order __order
) {
212 return __atomic_fetch_add(std::addressof(__a
->__a_value
), __delta
* __skip_amt
<_Tp
>::value
, __to_gcc_order(__order
));
215 template <typename _Tp
, typename _Td
>
216 _LIBCPP_HIDE_FROM_ABI _Tp
217 __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl
<_Tp
>* __a
, _Td __delta
, memory_order __order
) {
218 return __atomic_fetch_sub(std::addressof(__a
->__a_value
), __delta
* __skip_amt
<_Tp
>::value
, __to_gcc_order(__order
));
221 template <typename _Tp
, typename _Td
>
222 _LIBCPP_HIDE_FROM_ABI _Tp
__cxx_atomic_fetch_sub(__cxx_atomic_base_impl
<_Tp
>* __a
, _Td __delta
, memory_order __order
) {
223 return __atomic_fetch_sub(std::addressof(__a
->__a_value
), __delta
* __skip_amt
<_Tp
>::value
, __to_gcc_order(__order
));
226 template <typename _Tp
>
227 _LIBCPP_HIDE_FROM_ABI _Tp
228 __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __pattern
, memory_order __order
) {
229 return __atomic_fetch_and(std::addressof(__a
->__a_value
), __pattern
, __to_gcc_order(__order
));
232 template <typename _Tp
>
233 _LIBCPP_HIDE_FROM_ABI _Tp
234 __cxx_atomic_fetch_and(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __pattern
, memory_order __order
) {
235 return __atomic_fetch_and(std::addressof(__a
->__a_value
), __pattern
, __to_gcc_order(__order
));
238 template <typename _Tp
>
239 _LIBCPP_HIDE_FROM_ABI _Tp
240 __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __pattern
, memory_order __order
) {
241 return __atomic_fetch_or(std::addressof(__a
->__a_value
), __pattern
, __to_gcc_order(__order
));
244 template <typename _Tp
>
245 _LIBCPP_HIDE_FROM_ABI _Tp
__cxx_atomic_fetch_or(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __pattern
, memory_order __order
) {
246 return __atomic_fetch_or(std::addressof(__a
->__a_value
), __pattern
, __to_gcc_order(__order
));
249 template <typename _Tp
>
250 _LIBCPP_HIDE_FROM_ABI _Tp
251 __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __pattern
, memory_order __order
) {
252 return __atomic_fetch_xor(std::addressof(__a
->__a_value
), __pattern
, __to_gcc_order(__order
));
255 template <typename _Tp
>
256 _LIBCPP_HIDE_FROM_ABI _Tp
257 __cxx_atomic_fetch_xor(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __pattern
, memory_order __order
) {
258 return __atomic_fetch_xor(std::addressof(__a
->__a_value
), __pattern
, __to_gcc_order(__order
));
261 # define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0)
263 #elif _LIBCPP_HAS_C_ATOMIC_IMP
265 template <typename _Tp
>
266 struct __cxx_atomic_base_impl
{
267 _LIBCPP_HIDE_FROM_ABI
268 # ifndef _LIBCPP_CXX03_LANG
269 __cxx_atomic_base_impl() _NOEXCEPT
= default;
271 __cxx_atomic_base_impl() _NOEXCEPT
: __a_value() {
273 # endif // _LIBCPP_CXX03_LANG
274 _LIBCPP_CONSTEXPR
explicit __cxx_atomic_base_impl(_Tp __value
) _NOEXCEPT
: __a_value(__value
) {}
275 _LIBCPP_DISABLE_EXTENSION_WARNING
_Atomic(_Tp
) __a_value
;
278 # define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s)
280 _LIBCPP_HIDE_FROM_ABI
inline void __cxx_atomic_thread_fence(memory_order __order
) _NOEXCEPT
{
281 __c11_atomic_thread_fence(static_cast<__memory_order_underlying_t
>(__order
));
284 _LIBCPP_HIDE_FROM_ABI
inline void __cxx_atomic_signal_fence(memory_order __order
) _NOEXCEPT
{
285 __c11_atomic_signal_fence(static_cast<__memory_order_underlying_t
>(__order
));
289 _LIBCPP_HIDE_FROM_ABI
void __cxx_atomic_init(__cxx_atomic_base_impl
<_Tp
> volatile* __a
, _Tp __val
) _NOEXCEPT
{
290 __c11_atomic_init(std::addressof(__a
->__a_value
), __val
);
293 _LIBCPP_HIDE_FROM_ABI
void __cxx_atomic_init(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __val
) _NOEXCEPT
{
294 __c11_atomic_init(std::addressof(__a
->__a_value
), __val
);
298 _LIBCPP_HIDE_FROM_ABI
void
299 __cxx_atomic_store(__cxx_atomic_base_impl
<_Tp
> volatile* __a
, _Tp __val
, memory_order __order
) _NOEXCEPT
{
300 __c11_atomic_store(std::addressof(__a
->__a_value
), __val
, static_cast<__memory_order_underlying_t
>(__order
));
303 _LIBCPP_HIDE_FROM_ABI
void
304 __cxx_atomic_store(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __val
, memory_order __order
) _NOEXCEPT
{
305 __c11_atomic_store(std::addressof(__a
->__a_value
), __val
, static_cast<__memory_order_underlying_t
>(__order
));
309 _LIBCPP_HIDE_FROM_ABI _Tp
310 __cxx_atomic_load(__cxx_atomic_base_impl
<_Tp
> const volatile* __a
, memory_order __order
) _NOEXCEPT
{
311 using __ptr_type
= __remove_const_t
<decltype(__a
->__a_value
)>*;
312 return __c11_atomic_load(
313 const_cast<__ptr_type
>(std::addressof(__a
->__a_value
)), static_cast<__memory_order_underlying_t
>(__order
));
316 _LIBCPP_HIDE_FROM_ABI _Tp
__cxx_atomic_load(__cxx_atomic_base_impl
<_Tp
> const* __a
, memory_order __order
) _NOEXCEPT
{
317 using __ptr_type
= __remove_const_t
<decltype(__a
->__a_value
)>*;
318 return __c11_atomic_load(
319 const_cast<__ptr_type
>(std::addressof(__a
->__a_value
)), static_cast<__memory_order_underlying_t
>(__order
));
323 _LIBCPP_HIDE_FROM_ABI
void
324 __cxx_atomic_load_inplace(__cxx_atomic_base_impl
<_Tp
> const volatile* __a
, _Tp
* __dst
, memory_order __order
) _NOEXCEPT
{
325 using __ptr_type
= __remove_const_t
<decltype(__a
->__a_value
)>*;
326 *__dst
= __c11_atomic_load(
327 const_cast<__ptr_type
>(std::addressof(__a
->__a_value
)), static_cast<__memory_order_underlying_t
>(__order
));
330 _LIBCPP_HIDE_FROM_ABI
void
331 __cxx_atomic_load_inplace(__cxx_atomic_base_impl
<_Tp
> const* __a
, _Tp
* __dst
, memory_order __order
) _NOEXCEPT
{
332 using __ptr_type
= __remove_const_t
<decltype(__a
->__a_value
)>*;
333 *__dst
= __c11_atomic_load(
334 const_cast<__ptr_type
>(std::addressof(__a
->__a_value
)), static_cast<__memory_order_underlying_t
>(__order
));
338 _LIBCPP_HIDE_FROM_ABI _Tp
339 __cxx_atomic_exchange(__cxx_atomic_base_impl
<_Tp
> volatile* __a
, _Tp __value
, memory_order __order
) _NOEXCEPT
{
340 return __c11_atomic_exchange(
341 std::addressof(__a
->__a_value
), __value
, static_cast<__memory_order_underlying_t
>(__order
));
344 _LIBCPP_HIDE_FROM_ABI _Tp
345 __cxx_atomic_exchange(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __value
, memory_order __order
) _NOEXCEPT
{
346 return __c11_atomic_exchange(
347 std::addressof(__a
->__a_value
), __value
, static_cast<__memory_order_underlying_t
>(__order
));
350 _LIBCPP_HIDE_FROM_ABI
inline _LIBCPP_CONSTEXPR memory_order
__to_failure_order(memory_order __order
) {
351 // Avoid switch statement to make this a constexpr.
352 return __order
== memory_order_release
353 ? memory_order_relaxed
354 : (__order
== memory_order_acq_rel
? memory_order_acquire
: __order
);
358 _LIBCPP_HIDE_FROM_ABI
bool __cxx_atomic_compare_exchange_strong(
359 __cxx_atomic_base_impl
<_Tp
> volatile* __a
,
362 memory_order __success
,
363 memory_order __failure
) _NOEXCEPT
{
364 return __c11_atomic_compare_exchange_strong(
365 std::addressof(__a
->__a_value
),
368 static_cast<__memory_order_underlying_t
>(__success
),
369 static_cast<__memory_order_underlying_t
>(__to_failure_order(__failure
)));
372 _LIBCPP_HIDE_FROM_ABI
bool __cxx_atomic_compare_exchange_strong(
373 __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp
* __expected
, _Tp __value
, memory_order __success
, memory_order __failure
)
375 return __c11_atomic_compare_exchange_strong(
376 std::addressof(__a
->__a_value
),
379 static_cast<__memory_order_underlying_t
>(__success
),
380 static_cast<__memory_order_underlying_t
>(__to_failure_order(__failure
)));
384 _LIBCPP_HIDE_FROM_ABI
bool __cxx_atomic_compare_exchange_weak(
385 __cxx_atomic_base_impl
<_Tp
> volatile* __a
,
388 memory_order __success
,
389 memory_order __failure
) _NOEXCEPT
{
390 return __c11_atomic_compare_exchange_weak(
391 std::addressof(__a
->__a_value
),
394 static_cast<__memory_order_underlying_t
>(__success
),
395 static_cast<__memory_order_underlying_t
>(__to_failure_order(__failure
)));
398 _LIBCPP_HIDE_FROM_ABI
bool __cxx_atomic_compare_exchange_weak(
399 __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp
* __expected
, _Tp __value
, memory_order __success
, memory_order __failure
)
401 return __c11_atomic_compare_exchange_weak(
402 std::addressof(__a
->__a_value
),
405 static_cast<__memory_order_underlying_t
>(__success
),
406 static_cast<__memory_order_underlying_t
>(__to_failure_order(__failure
)));
410 _LIBCPP_HIDE_FROM_ABI _Tp
411 __cxx_atomic_fetch_add(__cxx_atomic_base_impl
<_Tp
> volatile* __a
, _Tp __delta
, memory_order __order
) _NOEXCEPT
{
412 return __c11_atomic_fetch_add(
413 std::addressof(__a
->__a_value
), __delta
, static_cast<__memory_order_underlying_t
>(__order
));
416 _LIBCPP_HIDE_FROM_ABI _Tp
417 __cxx_atomic_fetch_add(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __delta
, memory_order __order
) _NOEXCEPT
{
418 return __c11_atomic_fetch_add(
419 std::addressof(__a
->__a_value
), __delta
, static_cast<__memory_order_underlying_t
>(__order
));
423 _LIBCPP_HIDE_FROM_ABI _Tp
*
424 __cxx_atomic_fetch_add(__cxx_atomic_base_impl
<_Tp
*> volatile* __a
, ptrdiff_t __delta
, memory_order __order
) _NOEXCEPT
{
425 return __c11_atomic_fetch_add(
426 std::addressof(__a
->__a_value
), __delta
, static_cast<__memory_order_underlying_t
>(__order
));
429 _LIBCPP_HIDE_FROM_ABI _Tp
*
430 __cxx_atomic_fetch_add(__cxx_atomic_base_impl
<_Tp
*>* __a
, ptrdiff_t __delta
, memory_order __order
) _NOEXCEPT
{
431 return __c11_atomic_fetch_add(
432 std::addressof(__a
->__a_value
), __delta
, static_cast<__memory_order_underlying_t
>(__order
));
436 _LIBCPP_HIDE_FROM_ABI _Tp
437 __cxx_atomic_fetch_sub(__cxx_atomic_base_impl
<_Tp
> volatile* __a
, _Tp __delta
, memory_order __order
) _NOEXCEPT
{
438 return __c11_atomic_fetch_sub(
439 std::addressof(__a
->__a_value
), __delta
, static_cast<__memory_order_underlying_t
>(__order
));
442 _LIBCPP_HIDE_FROM_ABI _Tp
443 __cxx_atomic_fetch_sub(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __delta
, memory_order __order
) _NOEXCEPT
{
444 return __c11_atomic_fetch_sub(
445 std::addressof(__a
->__a_value
), __delta
, static_cast<__memory_order_underlying_t
>(__order
));
448 _LIBCPP_HIDE_FROM_ABI _Tp
*
449 __cxx_atomic_fetch_sub(__cxx_atomic_base_impl
<_Tp
*> volatile* __a
, ptrdiff_t __delta
, memory_order __order
) _NOEXCEPT
{
450 return __c11_atomic_fetch_sub(
451 std::addressof(__a
->__a_value
), __delta
, static_cast<__memory_order_underlying_t
>(__order
));
454 _LIBCPP_HIDE_FROM_ABI _Tp
*
455 __cxx_atomic_fetch_sub(__cxx_atomic_base_impl
<_Tp
*>* __a
, ptrdiff_t __delta
, memory_order __order
) _NOEXCEPT
{
456 return __c11_atomic_fetch_sub(
457 std::addressof(__a
->__a_value
), __delta
, static_cast<__memory_order_underlying_t
>(__order
));
461 _LIBCPP_HIDE_FROM_ABI _Tp
462 __cxx_atomic_fetch_and(__cxx_atomic_base_impl
<_Tp
> volatile* __a
, _Tp __pattern
, memory_order __order
) _NOEXCEPT
{
463 return __c11_atomic_fetch_and(
464 std::addressof(__a
->__a_value
), __pattern
, static_cast<__memory_order_underlying_t
>(__order
));
467 _LIBCPP_HIDE_FROM_ABI _Tp
468 __cxx_atomic_fetch_and(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __pattern
, memory_order __order
) _NOEXCEPT
{
469 return __c11_atomic_fetch_and(
470 std::addressof(__a
->__a_value
), __pattern
, static_cast<__memory_order_underlying_t
>(__order
));
474 _LIBCPP_HIDE_FROM_ABI _Tp
475 __cxx_atomic_fetch_or(__cxx_atomic_base_impl
<_Tp
> volatile* __a
, _Tp __pattern
, memory_order __order
) _NOEXCEPT
{
476 return __c11_atomic_fetch_or(
477 std::addressof(__a
->__a_value
), __pattern
, static_cast<__memory_order_underlying_t
>(__order
));
480 _LIBCPP_HIDE_FROM_ABI _Tp
481 __cxx_atomic_fetch_or(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __pattern
, memory_order __order
) _NOEXCEPT
{
482 return __c11_atomic_fetch_or(
483 std::addressof(__a
->__a_value
), __pattern
, static_cast<__memory_order_underlying_t
>(__order
));
487 _LIBCPP_HIDE_FROM_ABI _Tp
488 __cxx_atomic_fetch_xor(__cxx_atomic_base_impl
<_Tp
> volatile* __a
, _Tp __pattern
, memory_order __order
) _NOEXCEPT
{
489 return __c11_atomic_fetch_xor(
490 std::addressof(__a
->__a_value
), __pattern
, static_cast<__memory_order_underlying_t
>(__order
));
493 _LIBCPP_HIDE_FROM_ABI _Tp
494 __cxx_atomic_fetch_xor(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __pattern
, memory_order __order
) _NOEXCEPT
{
495 return __c11_atomic_fetch_xor(
496 std::addressof(__a
->__a_value
), __pattern
, static_cast<__memory_order_underlying_t
>(__order
));
499 #endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP
501 template <typename _Tp
, typename _Base
= __cxx_atomic_base_impl
<_Tp
> >
502 struct __cxx_atomic_impl
: public _Base
{
503 static_assert(is_trivially_copyable
<_Tp
>::value
, "std::atomic<T> requires that 'T' be a trivially copyable type");
505 _LIBCPP_HIDE_FROM_ABI
__cxx_atomic_impl() _NOEXCEPT
= default;
506 _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR
explicit __cxx_atomic_impl(_Tp __value
) _NOEXCEPT
: _Base(__value
) {}
509 _LIBCPP_END_NAMESPACE_STD
511 #endif // _LIBCPP___ATOMIC_CXX_ATOMIC_IMPL_H