1 //===-- tsan_interface_atomic.h ---------------------------------*- C++ -*-===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 // This file is a part of ThreadSanitizer (TSan), a race detector.
11 // Public interface header for TSan atomics.
12 //===----------------------------------------------------------------------===//
13 #ifndef TSAN_INTERFACE_ATOMIC_H
14 #define TSAN_INTERFACE_ATOMIC_H
20 typedef char __tsan_atomic8
;
21 typedef short __tsan_atomic16
;
22 typedef int __tsan_atomic32
;
23 typedef long __tsan_atomic64
;
24 #if defined(__SIZEOF_INT128__) \
25 || (__clang_major__ * 100 + __clang_minor__ >= 302)
26 __extension__
typedef __int128 __tsan_atomic128
;
27 # define __TSAN_HAS_INT128 1
29 # define __TSAN_HAS_INT128 0
32 // Part of ABI, do not change.
33 // https://github.com/llvm/llvm-project/blob/main/libcxx/include/atomic
35 __tsan_memory_order_relaxed
,
36 __tsan_memory_order_consume
,
37 __tsan_memory_order_acquire
,
38 __tsan_memory_order_release
,
39 __tsan_memory_order_acq_rel
,
40 __tsan_memory_order_seq_cst
41 } __tsan_memory_order
;
43 __tsan_atomic8
__tsan_atomic8_load(const volatile __tsan_atomic8
*a
,
44 __tsan_memory_order mo
);
45 __tsan_atomic16
__tsan_atomic16_load(const volatile __tsan_atomic16
*a
,
46 __tsan_memory_order mo
);
47 __tsan_atomic32
__tsan_atomic32_load(const volatile __tsan_atomic32
*a
,
48 __tsan_memory_order mo
);
49 __tsan_atomic64
__tsan_atomic64_load(const volatile __tsan_atomic64
*a
,
50 __tsan_memory_order mo
);
52 __tsan_atomic128
__tsan_atomic128_load(const volatile __tsan_atomic128
*a
,
53 __tsan_memory_order mo
);
56 void __tsan_atomic8_store(volatile __tsan_atomic8
*a
, __tsan_atomic8 v
,
57 __tsan_memory_order mo
);
58 void __tsan_atomic16_store(volatile __tsan_atomic16
*a
, __tsan_atomic16 v
,
59 __tsan_memory_order mo
);
60 void __tsan_atomic32_store(volatile __tsan_atomic32
*a
, __tsan_atomic32 v
,
61 __tsan_memory_order mo
);
62 void __tsan_atomic64_store(volatile __tsan_atomic64
*a
, __tsan_atomic64 v
,
63 __tsan_memory_order mo
);
65 void __tsan_atomic128_store(volatile __tsan_atomic128
*a
, __tsan_atomic128 v
,
66 __tsan_memory_order mo
);
69 __tsan_atomic8
__tsan_atomic8_exchange(volatile __tsan_atomic8
*a
,
70 __tsan_atomic8 v
, __tsan_memory_order mo
);
71 __tsan_atomic16
__tsan_atomic16_exchange(volatile __tsan_atomic16
*a
,
72 __tsan_atomic16 v
, __tsan_memory_order mo
);
73 __tsan_atomic32
__tsan_atomic32_exchange(volatile __tsan_atomic32
*a
,
74 __tsan_atomic32 v
, __tsan_memory_order mo
);
75 __tsan_atomic64
__tsan_atomic64_exchange(volatile __tsan_atomic64
*a
,
76 __tsan_atomic64 v
, __tsan_memory_order mo
);
78 __tsan_atomic128
__tsan_atomic128_exchange(volatile __tsan_atomic128
*a
,
79 __tsan_atomic128 v
, __tsan_memory_order mo
);
82 __tsan_atomic8
__tsan_atomic8_fetch_add(volatile __tsan_atomic8
*a
,
83 __tsan_atomic8 v
, __tsan_memory_order mo
);
84 __tsan_atomic16
__tsan_atomic16_fetch_add(volatile __tsan_atomic16
*a
,
85 __tsan_atomic16 v
, __tsan_memory_order mo
);
86 __tsan_atomic32
__tsan_atomic32_fetch_add(volatile __tsan_atomic32
*a
,
87 __tsan_atomic32 v
, __tsan_memory_order mo
);
88 __tsan_atomic64
__tsan_atomic64_fetch_add(volatile __tsan_atomic64
*a
,
89 __tsan_atomic64 v
, __tsan_memory_order mo
);
91 __tsan_atomic128
__tsan_atomic128_fetch_add(volatile __tsan_atomic128
*a
,
92 __tsan_atomic128 v
, __tsan_memory_order mo
);
95 __tsan_atomic8
__tsan_atomic8_fetch_sub(volatile __tsan_atomic8
*a
,
96 __tsan_atomic8 v
, __tsan_memory_order mo
);
97 __tsan_atomic16
__tsan_atomic16_fetch_sub(volatile __tsan_atomic16
*a
,
98 __tsan_atomic16 v
, __tsan_memory_order mo
);
99 __tsan_atomic32
__tsan_atomic32_fetch_sub(volatile __tsan_atomic32
*a
,
100 __tsan_atomic32 v
, __tsan_memory_order mo
);
101 __tsan_atomic64
__tsan_atomic64_fetch_sub(volatile __tsan_atomic64
*a
,
102 __tsan_atomic64 v
, __tsan_memory_order mo
);
103 #if __TSAN_HAS_INT128
104 __tsan_atomic128
__tsan_atomic128_fetch_sub(volatile __tsan_atomic128
*a
,
105 __tsan_atomic128 v
, __tsan_memory_order mo
);
108 __tsan_atomic8
__tsan_atomic8_fetch_and(volatile __tsan_atomic8
*a
,
109 __tsan_atomic8 v
, __tsan_memory_order mo
);
110 __tsan_atomic16
__tsan_atomic16_fetch_and(volatile __tsan_atomic16
*a
,
111 __tsan_atomic16 v
, __tsan_memory_order mo
);
112 __tsan_atomic32
__tsan_atomic32_fetch_and(volatile __tsan_atomic32
*a
,
113 __tsan_atomic32 v
, __tsan_memory_order mo
);
114 __tsan_atomic64
__tsan_atomic64_fetch_and(volatile __tsan_atomic64
*a
,
115 __tsan_atomic64 v
, __tsan_memory_order mo
);
116 #if __TSAN_HAS_INT128
117 __tsan_atomic128
__tsan_atomic128_fetch_and(volatile __tsan_atomic128
*a
,
118 __tsan_atomic128 v
, __tsan_memory_order mo
);
121 __tsan_atomic8
__tsan_atomic8_fetch_or(volatile __tsan_atomic8
*a
,
122 __tsan_atomic8 v
, __tsan_memory_order mo
);
123 __tsan_atomic16
__tsan_atomic16_fetch_or(volatile __tsan_atomic16
*a
,
124 __tsan_atomic16 v
, __tsan_memory_order mo
);
125 __tsan_atomic32
__tsan_atomic32_fetch_or(volatile __tsan_atomic32
*a
,
126 __tsan_atomic32 v
, __tsan_memory_order mo
);
127 __tsan_atomic64
__tsan_atomic64_fetch_or(volatile __tsan_atomic64
*a
,
128 __tsan_atomic64 v
, __tsan_memory_order mo
);
129 #if __TSAN_HAS_INT128
130 __tsan_atomic128
__tsan_atomic128_fetch_or(volatile __tsan_atomic128
*a
,
131 __tsan_atomic128 v
, __tsan_memory_order mo
);
134 __tsan_atomic8
__tsan_atomic8_fetch_xor(volatile __tsan_atomic8
*a
,
135 __tsan_atomic8 v
, __tsan_memory_order mo
);
136 __tsan_atomic16
__tsan_atomic16_fetch_xor(volatile __tsan_atomic16
*a
,
137 __tsan_atomic16 v
, __tsan_memory_order mo
);
138 __tsan_atomic32
__tsan_atomic32_fetch_xor(volatile __tsan_atomic32
*a
,
139 __tsan_atomic32 v
, __tsan_memory_order mo
);
140 __tsan_atomic64
__tsan_atomic64_fetch_xor(volatile __tsan_atomic64
*a
,
141 __tsan_atomic64 v
, __tsan_memory_order mo
);
142 #if __TSAN_HAS_INT128
143 __tsan_atomic128
__tsan_atomic128_fetch_xor(volatile __tsan_atomic128
*a
,
144 __tsan_atomic128 v
, __tsan_memory_order mo
);
147 __tsan_atomic8
__tsan_atomic8_fetch_nand(volatile __tsan_atomic8
*a
,
148 __tsan_atomic8 v
, __tsan_memory_order mo
);
149 __tsan_atomic16
__tsan_atomic16_fetch_nand(volatile __tsan_atomic16
*a
,
150 __tsan_atomic16 v
, __tsan_memory_order mo
);
151 __tsan_atomic32
__tsan_atomic32_fetch_nand(volatile __tsan_atomic32
*a
,
152 __tsan_atomic32 v
, __tsan_memory_order mo
);
153 __tsan_atomic64
__tsan_atomic64_fetch_nand(volatile __tsan_atomic64
*a
,
154 __tsan_atomic64 v
, __tsan_memory_order mo
);
155 #if __TSAN_HAS_INT128
156 __tsan_atomic128
__tsan_atomic128_fetch_nand(volatile __tsan_atomic128
*a
,
157 __tsan_atomic128 v
, __tsan_memory_order mo
);
160 int __tsan_atomic8_compare_exchange_weak(volatile __tsan_atomic8
*a
,
161 __tsan_atomic8
*c
, __tsan_atomic8 v
, __tsan_memory_order mo
,
162 __tsan_memory_order fail_mo
);
163 int __tsan_atomic16_compare_exchange_weak(volatile __tsan_atomic16
*a
,
164 __tsan_atomic16
*c
, __tsan_atomic16 v
, __tsan_memory_order mo
,
165 __tsan_memory_order fail_mo
);
166 int __tsan_atomic32_compare_exchange_weak(volatile __tsan_atomic32
*a
,
167 __tsan_atomic32
*c
, __tsan_atomic32 v
, __tsan_memory_order mo
,
168 __tsan_memory_order fail_mo
);
169 int __tsan_atomic64_compare_exchange_weak(volatile __tsan_atomic64
*a
,
170 __tsan_atomic64
*c
, __tsan_atomic64 v
, __tsan_memory_order mo
,
171 __tsan_memory_order fail_mo
);
172 #if __TSAN_HAS_INT128
173 int __tsan_atomic128_compare_exchange_weak(volatile __tsan_atomic128
*a
,
174 __tsan_atomic128
*c
, __tsan_atomic128 v
, __tsan_memory_order mo
,
175 __tsan_memory_order fail_mo
);
178 int __tsan_atomic8_compare_exchange_strong(volatile __tsan_atomic8
*a
,
179 __tsan_atomic8
*c
, __tsan_atomic8 v
, __tsan_memory_order mo
,
180 __tsan_memory_order fail_mo
);
181 int __tsan_atomic16_compare_exchange_strong(volatile __tsan_atomic16
*a
,
182 __tsan_atomic16
*c
, __tsan_atomic16 v
, __tsan_memory_order mo
,
183 __tsan_memory_order fail_mo
);
184 int __tsan_atomic32_compare_exchange_strong(volatile __tsan_atomic32
*a
,
185 __tsan_atomic32
*c
, __tsan_atomic32 v
, __tsan_memory_order mo
,
186 __tsan_memory_order fail_mo
);
187 int __tsan_atomic64_compare_exchange_strong(volatile __tsan_atomic64
*a
,
188 __tsan_atomic64
*c
, __tsan_atomic64 v
, __tsan_memory_order mo
,
189 __tsan_memory_order fail_mo
);
190 #if __TSAN_HAS_INT128
191 int __tsan_atomic128_compare_exchange_strong(volatile __tsan_atomic128
*a
,
192 __tsan_atomic128
*c
, __tsan_atomic128 v
, __tsan_memory_order mo
,
193 __tsan_memory_order fail_mo
);
196 __tsan_atomic8
__tsan_atomic8_compare_exchange_val(
197 volatile __tsan_atomic8
*a
, __tsan_atomic8 c
, __tsan_atomic8 v
,
198 __tsan_memory_order mo
, __tsan_memory_order fail_mo
);
199 __tsan_atomic16
__tsan_atomic16_compare_exchange_val(
200 volatile __tsan_atomic16
*a
, __tsan_atomic16 c
, __tsan_atomic16 v
,
201 __tsan_memory_order mo
, __tsan_memory_order fail_mo
);
202 __tsan_atomic32
__tsan_atomic32_compare_exchange_val(
203 volatile __tsan_atomic32
*a
, __tsan_atomic32 c
, __tsan_atomic32 v
,
204 __tsan_memory_order mo
, __tsan_memory_order fail_mo
);
205 __tsan_atomic64
__tsan_atomic64_compare_exchange_val(
206 volatile __tsan_atomic64
*a
, __tsan_atomic64 c
, __tsan_atomic64 v
,
207 __tsan_memory_order mo
, __tsan_memory_order fail_mo
);
208 #if __TSAN_HAS_INT128
209 __tsan_atomic128
__tsan_atomic128_compare_exchange_val(
210 volatile __tsan_atomic128
*a
, __tsan_atomic128 c
, __tsan_atomic128 v
,
211 __tsan_memory_order mo
, __tsan_memory_order fail_mo
);
214 void __tsan_atomic_thread_fence(__tsan_memory_order mo
);
215 void __tsan_atomic_signal_fence(__tsan_memory_order mo
);
221 #endif // TSAN_INTERFACE_ATOMIC_H