1 //===----------------------------------------------------------------------===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 #ifndef _LIBCPP___ATOMIC_CXX_ATOMIC_IMPL_H
10 #define _LIBCPP___ATOMIC_CXX_ATOMIC_IMPL_H
12 #include <__atomic/memory_order.h>
13 #include <__atomic/to_gcc_order.h>
15 #include <__memory/addressof.h>
16 #include <__type_traits/is_assignable.h>
17 #include <__type_traits/is_trivially_copyable.h>
18 #include <__type_traits/remove_const.h>
21 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
22 # pragma GCC system_header
25 _LIBCPP_BEGIN_NAMESPACE_STD
27 #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
29 // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
30 // the default operator= in an object is not volatile, a byte-by-byte copy
32 template <typename _Tp
, typename _Tv
, __enable_if_t
<is_assignable
<_Tp
&, _Tv
>::value
, int> = 0>
33 _LIBCPP_HIDE_FROM_ABI
void __cxx_atomic_assign_volatile(_Tp
& __a_value
, _Tv
const& __val
) {
36 template <typename _Tp
, typename _Tv
, __enable_if_t
<is_assignable
<_Tp
&, _Tv
>::value
, int> = 0>
37 _LIBCPP_HIDE_FROM_ABI
void __cxx_atomic_assign_volatile(_Tp
volatile& __a_value
, _Tv
volatile const& __val
) {
38 volatile char* __to
= reinterpret_cast<volatile char*>(std::addressof(__a_value
));
39 volatile char* __end
= __to
+ sizeof(_Tp
);
40 volatile const char* __from
= reinterpret_cast<volatile const char*>(std::addressof(__val
));
45 template <typename _Tp
>
46 struct __cxx_atomic_base_impl
{
48 # ifndef _LIBCPP_CXX03_LANG
49 __cxx_atomic_base_impl() _NOEXCEPT
= default;
51 __cxx_atomic_base_impl() _NOEXCEPT
: __a_value() {
53 # endif // _LIBCPP_CXX03_LANG
54 _LIBCPP_CONSTEXPR
explicit __cxx_atomic_base_impl(_Tp value
) _NOEXCEPT
: __a_value(value
) {}
58 template <typename _Tp
>
59 _LIBCPP_HIDE_FROM_ABI
void __cxx_atomic_init(volatile __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __val
) {
60 __cxx_atomic_assign_volatile(__a
->__a_value
, __val
);
63 template <typename _Tp
>
64 _LIBCPP_HIDE_FROM_ABI
void __cxx_atomic_init(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __val
) {
65 __a
->__a_value
= __val
;
68 _LIBCPP_HIDE_FROM_ABI
inline void __cxx_atomic_thread_fence(memory_order __order
) {
69 __atomic_thread_fence(__to_gcc_order(__order
));
72 _LIBCPP_HIDE_FROM_ABI
inline void __cxx_atomic_signal_fence(memory_order __order
) {
73 __atomic_signal_fence(__to_gcc_order(__order
));
76 template <typename _Tp
>
77 _LIBCPP_HIDE_FROM_ABI
void
78 __cxx_atomic_store(volatile __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __val
, memory_order __order
) {
79 __atomic_store(std::addressof(__a
->__a_value
), std::addressof(__val
), __to_gcc_order(__order
));
82 template <typename _Tp
>
83 _LIBCPP_HIDE_FROM_ABI
void __cxx_atomic_store(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __val
, memory_order __order
) {
84 __atomic_store(std::addressof(__a
->__a_value
), std::addressof(__val
), __to_gcc_order(__order
));
87 template <typename _Tp
>
88 _LIBCPP_HIDE_FROM_ABI _Tp
__cxx_atomic_load(const volatile __cxx_atomic_base_impl
<_Tp
>* __a
, memory_order __order
) {
90 __atomic_load(std::addressof(__a
->__a_value
), std::addressof(__ret
), __to_gcc_order(__order
));
94 template <typename _Tp
>
95 _LIBCPP_HIDE_FROM_ABI
void
96 __cxx_atomic_load_inplace(const volatile __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp
* __dst
, memory_order __order
) {
97 __atomic_load(std::addressof(__a
->__a_value
), __dst
, __to_gcc_order(__order
));
100 template <typename _Tp
>
101 _LIBCPP_HIDE_FROM_ABI
void
102 __cxx_atomic_load_inplace(const __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp
* __dst
, memory_order __order
) {
103 __atomic_load(std::addressof(__a
->__a_value
), __dst
, __to_gcc_order(__order
));
106 template <typename _Tp
>
107 _LIBCPP_HIDE_FROM_ABI _Tp
__cxx_atomic_load(const __cxx_atomic_base_impl
<_Tp
>* __a
, memory_order __order
) {
109 __atomic_load(std::addressof(__a
->__a_value
), std::addressof(__ret
), __to_gcc_order(__order
));
113 template <typename _Tp
>
114 _LIBCPP_HIDE_FROM_ABI _Tp
115 __cxx_atomic_exchange(volatile __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __value
, memory_order __order
) {
118 std::addressof(__a
->__a_value
), std::addressof(__value
), std::addressof(__ret
), __to_gcc_order(__order
));
122 template <typename _Tp
>
123 _LIBCPP_HIDE_FROM_ABI _Tp
__cxx_atomic_exchange(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __value
, memory_order __order
) {
126 std::addressof(__a
->__a_value
), std::addressof(__value
), std::addressof(__ret
), __to_gcc_order(__order
));
130 template <typename _Tp
>
131 _LIBCPP_HIDE_FROM_ABI
bool __cxx_atomic_compare_exchange_strong(
132 volatile __cxx_atomic_base_impl
<_Tp
>* __a
,
135 memory_order __success
,
136 memory_order __failure
) {
137 return __atomic_compare_exchange(
138 std::addressof(__a
->__a_value
),
140 std::addressof(__value
),
142 __to_gcc_order(__success
),
143 __to_gcc_failure_order(__failure
));
146 template <typename _Tp
>
147 _LIBCPP_HIDE_FROM_ABI
bool __cxx_atomic_compare_exchange_strong(
148 __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp
* __expected
, _Tp __value
, memory_order __success
, memory_order __failure
) {
149 return __atomic_compare_exchange(
150 std::addressof(__a
->__a_value
),
152 std::addressof(__value
),
154 __to_gcc_order(__success
),
155 __to_gcc_failure_order(__failure
));
158 template <typename _Tp
>
159 _LIBCPP_HIDE_FROM_ABI
bool __cxx_atomic_compare_exchange_weak(
160 volatile __cxx_atomic_base_impl
<_Tp
>* __a
,
163 memory_order __success
,
164 memory_order __failure
) {
165 return __atomic_compare_exchange(
166 std::addressof(__a
->__a_value
),
168 std::addressof(__value
),
170 __to_gcc_order(__success
),
171 __to_gcc_failure_order(__failure
));
174 template <typename _Tp
>
175 _LIBCPP_HIDE_FROM_ABI
bool __cxx_atomic_compare_exchange_weak(
176 __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp
* __expected
, _Tp __value
, memory_order __success
, memory_order __failure
) {
177 return __atomic_compare_exchange(
178 std::addressof(__a
->__a_value
),
180 std::addressof(__value
),
182 __to_gcc_order(__success
),
183 __to_gcc_failure_order(__failure
));
186 template <typename _Tp
>
191 template <typename _Tp
>
192 struct __skip_amt
<_Tp
*> {
193 enum { value
= sizeof(_Tp
) };
196 // FIXME: Haven't figured out what the spec says about using arrays with
197 // atomic_fetch_add. Force a failure rather than creating bad behavior.
198 template <typename _Tp
>
199 struct __skip_amt
<_Tp
[]> {};
200 template <typename _Tp
, int n
>
201 struct __skip_amt
<_Tp
[n
]> {};
203 template <typename _Tp
, typename _Td
>
204 _LIBCPP_HIDE_FROM_ABI _Tp
205 __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl
<_Tp
>* __a
, _Td __delta
, memory_order __order
) {
206 return __atomic_fetch_add(std::addressof(__a
->__a_value
), __delta
* __skip_amt
<_Tp
>::value
, __to_gcc_order(__order
));
209 template <typename _Tp
, typename _Td
>
210 _LIBCPP_HIDE_FROM_ABI _Tp
__cxx_atomic_fetch_add(__cxx_atomic_base_impl
<_Tp
>* __a
, _Td __delta
, memory_order __order
) {
211 return __atomic_fetch_add(std::addressof(__a
->__a_value
), __delta
* __skip_amt
<_Tp
>::value
, __to_gcc_order(__order
));
214 template <typename _Tp
, typename _Td
>
215 _LIBCPP_HIDE_FROM_ABI _Tp
216 __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl
<_Tp
>* __a
, _Td __delta
, memory_order __order
) {
217 return __atomic_fetch_sub(std::addressof(__a
->__a_value
), __delta
* __skip_amt
<_Tp
>::value
, __to_gcc_order(__order
));
220 template <typename _Tp
, typename _Td
>
221 _LIBCPP_HIDE_FROM_ABI _Tp
__cxx_atomic_fetch_sub(__cxx_atomic_base_impl
<_Tp
>* __a
, _Td __delta
, memory_order __order
) {
222 return __atomic_fetch_sub(std::addressof(__a
->__a_value
), __delta
* __skip_amt
<_Tp
>::value
, __to_gcc_order(__order
));
225 template <typename _Tp
>
226 _LIBCPP_HIDE_FROM_ABI _Tp
227 __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __pattern
, memory_order __order
) {
228 return __atomic_fetch_and(std::addressof(__a
->__a_value
), __pattern
, __to_gcc_order(__order
));
231 template <typename _Tp
>
232 _LIBCPP_HIDE_FROM_ABI _Tp
233 __cxx_atomic_fetch_and(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __pattern
, memory_order __order
) {
234 return __atomic_fetch_and(std::addressof(__a
->__a_value
), __pattern
, __to_gcc_order(__order
));
237 template <typename _Tp
>
238 _LIBCPP_HIDE_FROM_ABI _Tp
239 __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __pattern
, memory_order __order
) {
240 return __atomic_fetch_or(std::addressof(__a
->__a_value
), __pattern
, __to_gcc_order(__order
));
243 template <typename _Tp
>
244 _LIBCPP_HIDE_FROM_ABI _Tp
__cxx_atomic_fetch_or(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __pattern
, memory_order __order
) {
245 return __atomic_fetch_or(std::addressof(__a
->__a_value
), __pattern
, __to_gcc_order(__order
));
248 template <typename _Tp
>
249 _LIBCPP_HIDE_FROM_ABI _Tp
250 __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __pattern
, memory_order __order
) {
251 return __atomic_fetch_xor(std::addressof(__a
->__a_value
), __pattern
, __to_gcc_order(__order
));
254 template <typename _Tp
>
255 _LIBCPP_HIDE_FROM_ABI _Tp
256 __cxx_atomic_fetch_xor(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __pattern
, memory_order __order
) {
257 return __atomic_fetch_xor(std::addressof(__a
->__a_value
), __pattern
, __to_gcc_order(__order
));
260 # define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0)
262 #elif defined(_LIBCPP_HAS_C_ATOMIC_IMP)
264 template <typename _Tp
>
265 struct __cxx_atomic_base_impl
{
266 _LIBCPP_HIDE_FROM_ABI
267 # ifndef _LIBCPP_CXX03_LANG
268 __cxx_atomic_base_impl() _NOEXCEPT
= default;
270 __cxx_atomic_base_impl() _NOEXCEPT
: __a_value() {
272 # endif // _LIBCPP_CXX03_LANG
273 _LIBCPP_CONSTEXPR
explicit __cxx_atomic_base_impl(_Tp __value
) _NOEXCEPT
: __a_value(__value
) {}
274 _LIBCPP_DISABLE_EXTENSION_WARNING
_Atomic(_Tp
) __a_value
;
277 # define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s)
279 _LIBCPP_HIDE_FROM_ABI
inline void __cxx_atomic_thread_fence(memory_order __order
) _NOEXCEPT
{
280 __c11_atomic_thread_fence(static_cast<__memory_order_underlying_t
>(__order
));
283 _LIBCPP_HIDE_FROM_ABI
inline void __cxx_atomic_signal_fence(memory_order __order
) _NOEXCEPT
{
284 __c11_atomic_signal_fence(static_cast<__memory_order_underlying_t
>(__order
));
288 _LIBCPP_HIDE_FROM_ABI
void __cxx_atomic_init(__cxx_atomic_base_impl
<_Tp
> volatile* __a
, _Tp __val
) _NOEXCEPT
{
289 __c11_atomic_init(std::addressof(__a
->__a_value
), __val
);
292 _LIBCPP_HIDE_FROM_ABI
void __cxx_atomic_init(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __val
) _NOEXCEPT
{
293 __c11_atomic_init(std::addressof(__a
->__a_value
), __val
);
297 _LIBCPP_HIDE_FROM_ABI
void
298 __cxx_atomic_store(__cxx_atomic_base_impl
<_Tp
> volatile* __a
, _Tp __val
, memory_order __order
) _NOEXCEPT
{
299 __c11_atomic_store(std::addressof(__a
->__a_value
), __val
, static_cast<__memory_order_underlying_t
>(__order
));
302 _LIBCPP_HIDE_FROM_ABI
void
303 __cxx_atomic_store(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __val
, memory_order __order
) _NOEXCEPT
{
304 __c11_atomic_store(std::addressof(__a
->__a_value
), __val
, static_cast<__memory_order_underlying_t
>(__order
));
308 _LIBCPP_HIDE_FROM_ABI _Tp
309 __cxx_atomic_load(__cxx_atomic_base_impl
<_Tp
> const volatile* __a
, memory_order __order
) _NOEXCEPT
{
310 using __ptr_type
= __remove_const_t
<decltype(__a
->__a_value
)>*;
311 return __c11_atomic_load(
312 const_cast<__ptr_type
>(std::addressof(__a
->__a_value
)), static_cast<__memory_order_underlying_t
>(__order
));
315 _LIBCPP_HIDE_FROM_ABI _Tp
__cxx_atomic_load(__cxx_atomic_base_impl
<_Tp
> const* __a
, memory_order __order
) _NOEXCEPT
{
316 using __ptr_type
= __remove_const_t
<decltype(__a
->__a_value
)>*;
317 return __c11_atomic_load(
318 const_cast<__ptr_type
>(std::addressof(__a
->__a_value
)), static_cast<__memory_order_underlying_t
>(__order
));
322 _LIBCPP_HIDE_FROM_ABI
void
323 __cxx_atomic_load_inplace(__cxx_atomic_base_impl
<_Tp
> const volatile* __a
, _Tp
* __dst
, memory_order __order
) _NOEXCEPT
{
324 using __ptr_type
= __remove_const_t
<decltype(__a
->__a_value
)>*;
325 *__dst
= __c11_atomic_load(
326 const_cast<__ptr_type
>(std::addressof(__a
->__a_value
)), static_cast<__memory_order_underlying_t
>(__order
));
329 _LIBCPP_HIDE_FROM_ABI
void
330 __cxx_atomic_load_inplace(__cxx_atomic_base_impl
<_Tp
> const* __a
, _Tp
* __dst
, memory_order __order
) _NOEXCEPT
{
331 using __ptr_type
= __remove_const_t
<decltype(__a
->__a_value
)>*;
332 *__dst
= __c11_atomic_load(
333 const_cast<__ptr_type
>(std::addressof(__a
->__a_value
)), static_cast<__memory_order_underlying_t
>(__order
));
337 _LIBCPP_HIDE_FROM_ABI _Tp
338 __cxx_atomic_exchange(__cxx_atomic_base_impl
<_Tp
> volatile* __a
, _Tp __value
, memory_order __order
) _NOEXCEPT
{
339 return __c11_atomic_exchange(
340 std::addressof(__a
->__a_value
), __value
, static_cast<__memory_order_underlying_t
>(__order
));
343 _LIBCPP_HIDE_FROM_ABI _Tp
344 __cxx_atomic_exchange(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __value
, memory_order __order
) _NOEXCEPT
{
345 return __c11_atomic_exchange(
346 std::addressof(__a
->__a_value
), __value
, static_cast<__memory_order_underlying_t
>(__order
));
349 _LIBCPP_HIDE_FROM_ABI
inline _LIBCPP_CONSTEXPR memory_order
__to_failure_order(memory_order __order
) {
350 // Avoid switch statement to make this a constexpr.
351 return __order
== memory_order_release
352 ? memory_order_relaxed
353 : (__order
== memory_order_acq_rel
? memory_order_acquire
: __order
);
357 _LIBCPP_HIDE_FROM_ABI
bool __cxx_atomic_compare_exchange_strong(
358 __cxx_atomic_base_impl
<_Tp
> volatile* __a
,
361 memory_order __success
,
362 memory_order __failure
) _NOEXCEPT
{
363 return __c11_atomic_compare_exchange_strong(
364 std::addressof(__a
->__a_value
),
367 static_cast<__memory_order_underlying_t
>(__success
),
368 static_cast<__memory_order_underlying_t
>(__to_failure_order(__failure
)));
371 _LIBCPP_HIDE_FROM_ABI
bool __cxx_atomic_compare_exchange_strong(
372 __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp
* __expected
, _Tp __value
, memory_order __success
, memory_order __failure
)
374 return __c11_atomic_compare_exchange_strong(
375 std::addressof(__a
->__a_value
),
378 static_cast<__memory_order_underlying_t
>(__success
),
379 static_cast<__memory_order_underlying_t
>(__to_failure_order(__failure
)));
383 _LIBCPP_HIDE_FROM_ABI
bool __cxx_atomic_compare_exchange_weak(
384 __cxx_atomic_base_impl
<_Tp
> volatile* __a
,
387 memory_order __success
,
388 memory_order __failure
) _NOEXCEPT
{
389 return __c11_atomic_compare_exchange_weak(
390 std::addressof(__a
->__a_value
),
393 static_cast<__memory_order_underlying_t
>(__success
),
394 static_cast<__memory_order_underlying_t
>(__to_failure_order(__failure
)));
397 _LIBCPP_HIDE_FROM_ABI
bool __cxx_atomic_compare_exchange_weak(
398 __cxx_atomic_base_impl
<_Tp
>* __a
, _Tp
* __expected
, _Tp __value
, memory_order __success
, memory_order __failure
)
400 return __c11_atomic_compare_exchange_weak(
401 std::addressof(__a
->__a_value
),
404 static_cast<__memory_order_underlying_t
>(__success
),
405 static_cast<__memory_order_underlying_t
>(__to_failure_order(__failure
)));
409 _LIBCPP_HIDE_FROM_ABI _Tp
410 __cxx_atomic_fetch_add(__cxx_atomic_base_impl
<_Tp
> volatile* __a
, _Tp __delta
, memory_order __order
) _NOEXCEPT
{
411 return __c11_atomic_fetch_add(
412 std::addressof(__a
->__a_value
), __delta
, static_cast<__memory_order_underlying_t
>(__order
));
415 _LIBCPP_HIDE_FROM_ABI _Tp
416 __cxx_atomic_fetch_add(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __delta
, memory_order __order
) _NOEXCEPT
{
417 return __c11_atomic_fetch_add(
418 std::addressof(__a
->__a_value
), __delta
, static_cast<__memory_order_underlying_t
>(__order
));
422 _LIBCPP_HIDE_FROM_ABI _Tp
*
423 __cxx_atomic_fetch_add(__cxx_atomic_base_impl
<_Tp
*> volatile* __a
, ptrdiff_t __delta
, memory_order __order
) _NOEXCEPT
{
424 return __c11_atomic_fetch_add(
425 std::addressof(__a
->__a_value
), __delta
, static_cast<__memory_order_underlying_t
>(__order
));
428 _LIBCPP_HIDE_FROM_ABI _Tp
*
429 __cxx_atomic_fetch_add(__cxx_atomic_base_impl
<_Tp
*>* __a
, ptrdiff_t __delta
, memory_order __order
) _NOEXCEPT
{
430 return __c11_atomic_fetch_add(
431 std::addressof(__a
->__a_value
), __delta
, static_cast<__memory_order_underlying_t
>(__order
));
435 _LIBCPP_HIDE_FROM_ABI _Tp
436 __cxx_atomic_fetch_sub(__cxx_atomic_base_impl
<_Tp
> volatile* __a
, _Tp __delta
, memory_order __order
) _NOEXCEPT
{
437 return __c11_atomic_fetch_sub(
438 std::addressof(__a
->__a_value
), __delta
, static_cast<__memory_order_underlying_t
>(__order
));
441 _LIBCPP_HIDE_FROM_ABI _Tp
442 __cxx_atomic_fetch_sub(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __delta
, memory_order __order
) _NOEXCEPT
{
443 return __c11_atomic_fetch_sub(
444 std::addressof(__a
->__a_value
), __delta
, static_cast<__memory_order_underlying_t
>(__order
));
447 _LIBCPP_HIDE_FROM_ABI _Tp
*
448 __cxx_atomic_fetch_sub(__cxx_atomic_base_impl
<_Tp
*> volatile* __a
, ptrdiff_t __delta
, memory_order __order
) _NOEXCEPT
{
449 return __c11_atomic_fetch_sub(
450 std::addressof(__a
->__a_value
), __delta
, static_cast<__memory_order_underlying_t
>(__order
));
453 _LIBCPP_HIDE_FROM_ABI _Tp
*
454 __cxx_atomic_fetch_sub(__cxx_atomic_base_impl
<_Tp
*>* __a
, ptrdiff_t __delta
, memory_order __order
) _NOEXCEPT
{
455 return __c11_atomic_fetch_sub(
456 std::addressof(__a
->__a_value
), __delta
, static_cast<__memory_order_underlying_t
>(__order
));
460 _LIBCPP_HIDE_FROM_ABI _Tp
461 __cxx_atomic_fetch_and(__cxx_atomic_base_impl
<_Tp
> volatile* __a
, _Tp __pattern
, memory_order __order
) _NOEXCEPT
{
462 return __c11_atomic_fetch_and(
463 std::addressof(__a
->__a_value
), __pattern
, static_cast<__memory_order_underlying_t
>(__order
));
466 _LIBCPP_HIDE_FROM_ABI _Tp
467 __cxx_atomic_fetch_and(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __pattern
, memory_order __order
) _NOEXCEPT
{
468 return __c11_atomic_fetch_and(
469 std::addressof(__a
->__a_value
), __pattern
, static_cast<__memory_order_underlying_t
>(__order
));
473 _LIBCPP_HIDE_FROM_ABI _Tp
474 __cxx_atomic_fetch_or(__cxx_atomic_base_impl
<_Tp
> volatile* __a
, _Tp __pattern
, memory_order __order
) _NOEXCEPT
{
475 return __c11_atomic_fetch_or(
476 std::addressof(__a
->__a_value
), __pattern
, static_cast<__memory_order_underlying_t
>(__order
));
479 _LIBCPP_HIDE_FROM_ABI _Tp
480 __cxx_atomic_fetch_or(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __pattern
, memory_order __order
) _NOEXCEPT
{
481 return __c11_atomic_fetch_or(
482 std::addressof(__a
->__a_value
), __pattern
, static_cast<__memory_order_underlying_t
>(__order
));
486 _LIBCPP_HIDE_FROM_ABI _Tp
487 __cxx_atomic_fetch_xor(__cxx_atomic_base_impl
<_Tp
> volatile* __a
, _Tp __pattern
, memory_order __order
) _NOEXCEPT
{
488 return __c11_atomic_fetch_xor(
489 std::addressof(__a
->__a_value
), __pattern
, static_cast<__memory_order_underlying_t
>(__order
));
492 _LIBCPP_HIDE_FROM_ABI _Tp
493 __cxx_atomic_fetch_xor(__cxx_atomic_base_impl
<_Tp
>* __a
, _Tp __pattern
, memory_order __order
) _NOEXCEPT
{
494 return __c11_atomic_fetch_xor(
495 std::addressof(__a
->__a_value
), __pattern
, static_cast<__memory_order_underlying_t
>(__order
));
498 #endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP
500 template <typename _Tp
, typename _Base
= __cxx_atomic_base_impl
<_Tp
> >
501 struct __cxx_atomic_impl
: public _Base
{
502 static_assert(is_trivially_copyable
<_Tp
>::value
, "std::atomic<T> requires that 'T' be a trivially copyable type");
504 _LIBCPP_HIDE_FROM_ABI
__cxx_atomic_impl() _NOEXCEPT
= default;
505 _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR
explicit __cxx_atomic_impl(_Tp __value
) _NOEXCEPT
: _Base(__value
) {}
508 _LIBCPP_END_NAMESPACE_STD
510 #endif // _LIBCPP___ATOMIC_CXX_ATOMIC_IMPL_H