1 /* GCC instrumentation plugin for ThreadSanitizer.
2 Copyright (C) 2011-2025 Free Software Foundation, Inc.
3 Contributed by Dmitry Vyukov <dvyukov@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "tree-pass.h"
33 #include "fold-const.h"
35 #include "gimple-iterator.h"
36 #include "gimplify-me.h"
38 #include "tree-iterator.h"
39 #include "gimple-fold.h"
40 #include "tree-ssa-loop-ivopts.h"
43 #include "stringpool.h"
48 #include "diagnostic-core.h"
50 /* Number of instrumented memory accesses in the current function. */
52 /* Builds the following decl
53 void __tsan_read/writeX (void *addr); */
56 get_memory_access_decl (bool is_write
, unsigned size
, bool volatilep
)
58 enum built_in_function fcode
;
72 if (param_tsan_distinguish_volatile
&& volatilep
)
73 fcode
= is_write
? BUILT_IN_TSAN_VOLATILE_WRITE1
74 : BUILT_IN_TSAN_VOLATILE_READ1
;
76 fcode
= is_write
? BUILT_IN_TSAN_WRITE1
77 : BUILT_IN_TSAN_READ1
;
78 fcode
= (built_in_function
)(fcode
+ pos
);
80 return builtin_decl_implicit (fcode
);
83 /* Check as to whether EXPR refers to a store to vptr. */
86 is_vptr_store (gimple
*stmt
, tree expr
, bool is_write
)
89 && gimple_assign_single_p (stmt
)
90 && TREE_CODE (expr
) == COMPONENT_REF
)
92 tree field
= TREE_OPERAND (expr
, 1);
93 if (TREE_CODE (field
) == FIELD_DECL
94 && DECL_VIRTUAL_P (field
))
95 return gimple_assign_rhs1 (stmt
);
100 /* Instruments EXPR if needed. If any instrumentation is inserted,
104 instrument_expr (gimple_stmt_iterator gsi
, tree expr
, bool is_write
)
106 tree base
, rhs
, expr_ptr
, builtin_decl
;
114 size
= int_size_in_bytes (TREE_TYPE (expr
));
118 poly_int64 unused_bitsize
, unused_bitpos
;
121 int unsignedp
, reversep
, volatilep
= 0;
122 base
= get_inner_reference (expr
, &unused_bitsize
, &unused_bitpos
, &offset
,
123 &mode
, &unsignedp
, &reversep
, &volatilep
);
125 /* No need to instrument accesses to decls that don't escape,
126 they can't escape to other threads then. */
127 if (DECL_P (base
) && !is_global_var (base
))
129 struct pt_solution pt
;
130 memset (&pt
, 0, sizeof (pt
));
132 pt
.ipa_escaped
= flag_ipa_pta
!= 0;
133 if (!pt_solution_includes (&pt
, base
))
135 if (!may_be_aliased (base
))
139 if (TREE_READONLY (base
) || (VAR_P (base
) && DECL_HARD_REGISTER (base
)))
142 if (!ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (base
))))
145 stmt
= gsi_stmt (gsi
);
146 loc
= gimple_location (stmt
);
147 rhs
= is_vptr_store (stmt
, expr
, is_write
);
149 if ((TREE_CODE (expr
) == COMPONENT_REF
150 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (expr
, 1)))
151 || TREE_CODE (expr
) == BIT_FIELD_REF
)
153 HOST_WIDE_INT bitpos
, bitsize
;
154 base
= TREE_OPERAND (expr
, 0);
155 if (TREE_CODE (expr
) == COMPONENT_REF
)
157 expr
= TREE_OPERAND (expr
, 1);
158 if (is_write
&& DECL_BIT_FIELD_REPRESENTATIVE (expr
))
159 expr
= DECL_BIT_FIELD_REPRESENTATIVE (expr
);
160 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (expr
))
161 || !tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (expr
))
162 || !tree_fits_uhwi_p (DECL_SIZE (expr
)))
164 bitpos
= tree_to_uhwi (DECL_FIELD_OFFSET (expr
)) * BITS_PER_UNIT
165 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (expr
));
166 bitsize
= tree_to_uhwi (DECL_SIZE (expr
));
170 if (!tree_fits_uhwi_p (TREE_OPERAND (expr
, 2))
171 || !tree_fits_uhwi_p (TREE_OPERAND (expr
, 1)))
173 bitpos
= tree_to_uhwi (TREE_OPERAND (expr
, 2));
174 bitsize
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
176 if (bitpos
< 0 || bitsize
<= 0)
178 size
= (bitpos
% BITS_PER_UNIT
+ bitsize
+ BITS_PER_UNIT
- 1)
180 if (may_be_nonaddressable_p (base
))
182 align
= get_object_alignment (base
);
183 if (align
< BITS_PER_UNIT
)
185 bitpos
= bitpos
& ~(BITS_PER_UNIT
- 1);
186 if ((align
- 1) & bitpos
)
188 align
= (align
- 1) & bitpos
;
189 align
= least_bit_hwi (align
);
191 expr
= build_fold_addr_expr (unshare_expr (base
));
192 expr
= build2 (MEM_REF
, char_type_node
, expr
,
193 build_int_cst (TREE_TYPE (expr
), bitpos
/ BITS_PER_UNIT
));
194 expr_ptr
= build_fold_addr_expr (expr
);
198 if (may_be_nonaddressable_p (expr
))
200 align
= get_object_alignment (expr
);
201 if (align
< BITS_PER_UNIT
)
203 expr_ptr
= build_fold_addr_expr (unshare_expr (expr
));
205 expr_ptr
= force_gimple_operand (expr_ptr
, &seq
, true, NULL_TREE
);
206 if ((size
& (size
- 1)) != 0 || size
> 16
207 || align
< MIN (size
, 8) * BITS_PER_UNIT
)
209 builtin_decl
= builtin_decl_implicit (is_write
210 ? BUILT_IN_TSAN_WRITE_RANGE
211 : BUILT_IN_TSAN_READ_RANGE
);
212 g
= gimple_build_call (builtin_decl
, 2, expr_ptr
, size_int (size
));
214 else if (rhs
== NULL
)
215 g
= gimple_build_call (get_memory_access_decl (is_write
, size
,
216 TREE_THIS_VOLATILE (expr
)),
220 builtin_decl
= builtin_decl_implicit (BUILT_IN_TSAN_VPTR_UPDATE
);
221 g
= gimple_build_call (builtin_decl
, 2, expr_ptr
, unshare_expr (rhs
));
223 gimple_set_location (g
, loc
);
224 gimple_seq_add_stmt_without_update (&seq
, g
);
225 /* Instrumentation for assignment of a function result
226 must be inserted after the call. Instrumentation for
227 reads of function arguments must be inserted before the call.
228 That's because the call can contain synchronization. */
229 if (is_gimple_call (stmt
) && is_write
)
231 /* If the call can throw, it must be the last stmt in
232 a basic block, so the instrumented stmts need to be
233 inserted in successor bbs. */
234 if (is_ctrl_altering_stmt (stmt
))
239 e
= find_fallthru_edge (bb
->succs
);
241 gsi_insert_seq_on_edge_immediate (e
, seq
);
244 gsi_insert_seq_after (&gsi
, seq
, GSI_NEW_STMT
);
247 gsi_insert_seq_before (&gsi
, seq
, GSI_SAME_STMT
);
252 /* Actions for sync/atomic builtin transformations. */
253 enum tsan_atomic_action
255 check_last
, add_seq_cst
, add_acquire
, weak_cas
, strong_cas
,
256 bool_cas
, val_cas
, lock_release
, fetch_op
, fetch_op_seq_cst
,
257 bool_clear
, bool_test_and_set
260 /* Table how to map sync/atomic builtins to their corresponding
262 static const struct tsan_map_atomic
264 enum built_in_function fcode
, tsan_fcode
;
265 enum tsan_atomic_action action
;
267 } tsan_atomic_table
[] =
269 #define TRANSFORM(fcode, tsan_fcode, action, code) \
270 { BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code }
271 #define CHECK_LAST(fcode, tsan_fcode) \
272 TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK)
273 #define ADD_SEQ_CST(fcode, tsan_fcode) \
274 TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK)
275 #define ADD_ACQUIRE(fcode, tsan_fcode) \
276 TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK)
277 #define WEAK_CAS(fcode, tsan_fcode) \
278 TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK)
279 #define STRONG_CAS(fcode, tsan_fcode) \
280 TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK)
281 #define BOOL_CAS(fcode, tsan_fcode) \
282 TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK)
283 #define VAL_CAS(fcode, tsan_fcode) \
284 TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK)
285 #define LOCK_RELEASE(fcode, tsan_fcode) \
286 TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK)
287 #define FETCH_OP(fcode, tsan_fcode, code) \
288 TRANSFORM (fcode, tsan_fcode, fetch_op, code)
289 #define FETCH_OPS(fcode, tsan_fcode, code) \
290 TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code)
291 #define BOOL_CLEAR(fcode, tsan_fcode) \
292 TRANSFORM (fcode, tsan_fcode, bool_clear, ERROR_MARK)
293 #define BOOL_TEST_AND_SET(fcode, tsan_fcode) \
294 TRANSFORM (fcode, tsan_fcode, bool_test_and_set, ERROR_MARK)
296 CHECK_LAST (ATOMIC_LOAD_1
, TSAN_ATOMIC8_LOAD
),
297 CHECK_LAST (ATOMIC_LOAD_2
, TSAN_ATOMIC16_LOAD
),
298 CHECK_LAST (ATOMIC_LOAD_4
, TSAN_ATOMIC32_LOAD
),
299 CHECK_LAST (ATOMIC_LOAD_8
, TSAN_ATOMIC64_LOAD
),
300 CHECK_LAST (ATOMIC_LOAD_16
, TSAN_ATOMIC128_LOAD
),
301 CHECK_LAST (ATOMIC_STORE_1
, TSAN_ATOMIC8_STORE
),
302 CHECK_LAST (ATOMIC_STORE_2
, TSAN_ATOMIC16_STORE
),
303 CHECK_LAST (ATOMIC_STORE_4
, TSAN_ATOMIC32_STORE
),
304 CHECK_LAST (ATOMIC_STORE_8
, TSAN_ATOMIC64_STORE
),
305 CHECK_LAST (ATOMIC_STORE_16
, TSAN_ATOMIC128_STORE
),
306 CHECK_LAST (ATOMIC_EXCHANGE_1
, TSAN_ATOMIC8_EXCHANGE
),
307 CHECK_LAST (ATOMIC_EXCHANGE_2
, TSAN_ATOMIC16_EXCHANGE
),
308 CHECK_LAST (ATOMIC_EXCHANGE_4
, TSAN_ATOMIC32_EXCHANGE
),
309 CHECK_LAST (ATOMIC_EXCHANGE_8
, TSAN_ATOMIC64_EXCHANGE
),
310 CHECK_LAST (ATOMIC_EXCHANGE_16
, TSAN_ATOMIC128_EXCHANGE
),
311 CHECK_LAST (ATOMIC_FETCH_ADD_1
, TSAN_ATOMIC8_FETCH_ADD
),
312 CHECK_LAST (ATOMIC_FETCH_ADD_2
, TSAN_ATOMIC16_FETCH_ADD
),
313 CHECK_LAST (ATOMIC_FETCH_ADD_4
, TSAN_ATOMIC32_FETCH_ADD
),
314 CHECK_LAST (ATOMIC_FETCH_ADD_8
, TSAN_ATOMIC64_FETCH_ADD
),
315 CHECK_LAST (ATOMIC_FETCH_ADD_16
, TSAN_ATOMIC128_FETCH_ADD
),
316 CHECK_LAST (ATOMIC_FETCH_SUB_1
, TSAN_ATOMIC8_FETCH_SUB
),
317 CHECK_LAST (ATOMIC_FETCH_SUB_2
, TSAN_ATOMIC16_FETCH_SUB
),
318 CHECK_LAST (ATOMIC_FETCH_SUB_4
, TSAN_ATOMIC32_FETCH_SUB
),
319 CHECK_LAST (ATOMIC_FETCH_SUB_8
, TSAN_ATOMIC64_FETCH_SUB
),
320 CHECK_LAST (ATOMIC_FETCH_SUB_16
, TSAN_ATOMIC128_FETCH_SUB
),
321 CHECK_LAST (ATOMIC_FETCH_AND_1
, TSAN_ATOMIC8_FETCH_AND
),
322 CHECK_LAST (ATOMIC_FETCH_AND_2
, TSAN_ATOMIC16_FETCH_AND
),
323 CHECK_LAST (ATOMIC_FETCH_AND_4
, TSAN_ATOMIC32_FETCH_AND
),
324 CHECK_LAST (ATOMIC_FETCH_AND_8
, TSAN_ATOMIC64_FETCH_AND
),
325 CHECK_LAST (ATOMIC_FETCH_AND_16
, TSAN_ATOMIC128_FETCH_AND
),
326 CHECK_LAST (ATOMIC_FETCH_OR_1
, TSAN_ATOMIC8_FETCH_OR
),
327 CHECK_LAST (ATOMIC_FETCH_OR_2
, TSAN_ATOMIC16_FETCH_OR
),
328 CHECK_LAST (ATOMIC_FETCH_OR_4
, TSAN_ATOMIC32_FETCH_OR
),
329 CHECK_LAST (ATOMIC_FETCH_OR_8
, TSAN_ATOMIC64_FETCH_OR
),
330 CHECK_LAST (ATOMIC_FETCH_OR_16
, TSAN_ATOMIC128_FETCH_OR
),
331 CHECK_LAST (ATOMIC_FETCH_XOR_1
, TSAN_ATOMIC8_FETCH_XOR
),
332 CHECK_LAST (ATOMIC_FETCH_XOR_2
, TSAN_ATOMIC16_FETCH_XOR
),
333 CHECK_LAST (ATOMIC_FETCH_XOR_4
, TSAN_ATOMIC32_FETCH_XOR
),
334 CHECK_LAST (ATOMIC_FETCH_XOR_8
, TSAN_ATOMIC64_FETCH_XOR
),
335 CHECK_LAST (ATOMIC_FETCH_XOR_16
, TSAN_ATOMIC128_FETCH_XOR
),
336 CHECK_LAST (ATOMIC_FETCH_NAND_1
, TSAN_ATOMIC8_FETCH_NAND
),
337 CHECK_LAST (ATOMIC_FETCH_NAND_2
, TSAN_ATOMIC16_FETCH_NAND
),
338 CHECK_LAST (ATOMIC_FETCH_NAND_4
, TSAN_ATOMIC32_FETCH_NAND
),
339 CHECK_LAST (ATOMIC_FETCH_NAND_8
, TSAN_ATOMIC64_FETCH_NAND
),
340 CHECK_LAST (ATOMIC_FETCH_NAND_16
, TSAN_ATOMIC128_FETCH_NAND
),
342 CHECK_LAST (ATOMIC_THREAD_FENCE
, TSAN_ATOMIC_THREAD_FENCE
),
343 CHECK_LAST (ATOMIC_SIGNAL_FENCE
, TSAN_ATOMIC_SIGNAL_FENCE
),
345 FETCH_OP (ATOMIC_ADD_FETCH_1
, TSAN_ATOMIC8_FETCH_ADD
, PLUS_EXPR
),
346 FETCH_OP (ATOMIC_ADD_FETCH_2
, TSAN_ATOMIC16_FETCH_ADD
, PLUS_EXPR
),
347 FETCH_OP (ATOMIC_ADD_FETCH_4
, TSAN_ATOMIC32_FETCH_ADD
, PLUS_EXPR
),
348 FETCH_OP (ATOMIC_ADD_FETCH_8
, TSAN_ATOMIC64_FETCH_ADD
, PLUS_EXPR
),
349 FETCH_OP (ATOMIC_ADD_FETCH_16
, TSAN_ATOMIC128_FETCH_ADD
, PLUS_EXPR
),
350 FETCH_OP (ATOMIC_SUB_FETCH_1
, TSAN_ATOMIC8_FETCH_SUB
, MINUS_EXPR
),
351 FETCH_OP (ATOMIC_SUB_FETCH_2
, TSAN_ATOMIC16_FETCH_SUB
, MINUS_EXPR
),
352 FETCH_OP (ATOMIC_SUB_FETCH_4
, TSAN_ATOMIC32_FETCH_SUB
, MINUS_EXPR
),
353 FETCH_OP (ATOMIC_SUB_FETCH_8
, TSAN_ATOMIC64_FETCH_SUB
, MINUS_EXPR
),
354 FETCH_OP (ATOMIC_SUB_FETCH_16
, TSAN_ATOMIC128_FETCH_SUB
, MINUS_EXPR
),
355 FETCH_OP (ATOMIC_AND_FETCH_1
, TSAN_ATOMIC8_FETCH_AND
, BIT_AND_EXPR
),
356 FETCH_OP (ATOMIC_AND_FETCH_2
, TSAN_ATOMIC16_FETCH_AND
, BIT_AND_EXPR
),
357 FETCH_OP (ATOMIC_AND_FETCH_4
, TSAN_ATOMIC32_FETCH_AND
, BIT_AND_EXPR
),
358 FETCH_OP (ATOMIC_AND_FETCH_8
, TSAN_ATOMIC64_FETCH_AND
, BIT_AND_EXPR
),
359 FETCH_OP (ATOMIC_AND_FETCH_16
, TSAN_ATOMIC128_FETCH_AND
, BIT_AND_EXPR
),
360 FETCH_OP (ATOMIC_OR_FETCH_1
, TSAN_ATOMIC8_FETCH_OR
, BIT_IOR_EXPR
),
361 FETCH_OP (ATOMIC_OR_FETCH_2
, TSAN_ATOMIC16_FETCH_OR
, BIT_IOR_EXPR
),
362 FETCH_OP (ATOMIC_OR_FETCH_4
, TSAN_ATOMIC32_FETCH_OR
, BIT_IOR_EXPR
),
363 FETCH_OP (ATOMIC_OR_FETCH_8
, TSAN_ATOMIC64_FETCH_OR
, BIT_IOR_EXPR
),
364 FETCH_OP (ATOMIC_OR_FETCH_16
, TSAN_ATOMIC128_FETCH_OR
, BIT_IOR_EXPR
),
365 FETCH_OP (ATOMIC_XOR_FETCH_1
, TSAN_ATOMIC8_FETCH_XOR
, BIT_XOR_EXPR
),
366 FETCH_OP (ATOMIC_XOR_FETCH_2
, TSAN_ATOMIC16_FETCH_XOR
, BIT_XOR_EXPR
),
367 FETCH_OP (ATOMIC_XOR_FETCH_4
, TSAN_ATOMIC32_FETCH_XOR
, BIT_XOR_EXPR
),
368 FETCH_OP (ATOMIC_XOR_FETCH_8
, TSAN_ATOMIC64_FETCH_XOR
, BIT_XOR_EXPR
),
369 FETCH_OP (ATOMIC_XOR_FETCH_16
, TSAN_ATOMIC128_FETCH_XOR
, BIT_XOR_EXPR
),
370 FETCH_OP (ATOMIC_NAND_FETCH_1
, TSAN_ATOMIC8_FETCH_NAND
, BIT_NOT_EXPR
),
371 FETCH_OP (ATOMIC_NAND_FETCH_2
, TSAN_ATOMIC16_FETCH_NAND
, BIT_NOT_EXPR
),
372 FETCH_OP (ATOMIC_NAND_FETCH_4
, TSAN_ATOMIC32_FETCH_NAND
, BIT_NOT_EXPR
),
373 FETCH_OP (ATOMIC_NAND_FETCH_8
, TSAN_ATOMIC64_FETCH_NAND
, BIT_NOT_EXPR
),
374 FETCH_OP (ATOMIC_NAND_FETCH_16
, TSAN_ATOMIC128_FETCH_NAND
, BIT_NOT_EXPR
),
376 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1
, TSAN_ATOMIC8_EXCHANGE
),
377 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2
, TSAN_ATOMIC16_EXCHANGE
),
378 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4
, TSAN_ATOMIC32_EXCHANGE
),
379 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8
, TSAN_ATOMIC64_EXCHANGE
),
380 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16
, TSAN_ATOMIC128_EXCHANGE
),
382 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1
, TSAN_ATOMIC8_FETCH_ADD
),
383 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2
, TSAN_ATOMIC16_FETCH_ADD
),
384 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4
, TSAN_ATOMIC32_FETCH_ADD
),
385 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8
, TSAN_ATOMIC64_FETCH_ADD
),
386 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16
, TSAN_ATOMIC128_FETCH_ADD
),
387 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1
, TSAN_ATOMIC8_FETCH_SUB
),
388 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2
, TSAN_ATOMIC16_FETCH_SUB
),
389 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4
, TSAN_ATOMIC32_FETCH_SUB
),
390 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8
, TSAN_ATOMIC64_FETCH_SUB
),
391 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16
, TSAN_ATOMIC128_FETCH_SUB
),
392 ADD_SEQ_CST (SYNC_FETCH_AND_AND_1
, TSAN_ATOMIC8_FETCH_AND
),
393 ADD_SEQ_CST (SYNC_FETCH_AND_AND_2
, TSAN_ATOMIC16_FETCH_AND
),
394 ADD_SEQ_CST (SYNC_FETCH_AND_AND_4
, TSAN_ATOMIC32_FETCH_AND
),
395 ADD_SEQ_CST (SYNC_FETCH_AND_AND_8
, TSAN_ATOMIC64_FETCH_AND
),
396 ADD_SEQ_CST (SYNC_FETCH_AND_AND_16
, TSAN_ATOMIC128_FETCH_AND
),
397 ADD_SEQ_CST (SYNC_FETCH_AND_OR_1
, TSAN_ATOMIC8_FETCH_OR
),
398 ADD_SEQ_CST (SYNC_FETCH_AND_OR_2
, TSAN_ATOMIC16_FETCH_OR
),
399 ADD_SEQ_CST (SYNC_FETCH_AND_OR_4
, TSAN_ATOMIC32_FETCH_OR
),
400 ADD_SEQ_CST (SYNC_FETCH_AND_OR_8
, TSAN_ATOMIC64_FETCH_OR
),
401 ADD_SEQ_CST (SYNC_FETCH_AND_OR_16
, TSAN_ATOMIC128_FETCH_OR
),
402 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1
, TSAN_ATOMIC8_FETCH_XOR
),
403 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2
, TSAN_ATOMIC16_FETCH_XOR
),
404 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4
, TSAN_ATOMIC32_FETCH_XOR
),
405 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8
, TSAN_ATOMIC64_FETCH_XOR
),
406 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16
, TSAN_ATOMIC128_FETCH_XOR
),
407 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1
, TSAN_ATOMIC8_FETCH_NAND
),
408 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2
, TSAN_ATOMIC16_FETCH_NAND
),
409 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4
, TSAN_ATOMIC32_FETCH_NAND
),
410 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8
, TSAN_ATOMIC64_FETCH_NAND
),
411 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16
, TSAN_ATOMIC128_FETCH_NAND
),
413 ADD_SEQ_CST (SYNC_SYNCHRONIZE
, TSAN_ATOMIC_THREAD_FENCE
),
415 FETCH_OPS (SYNC_ADD_AND_FETCH_1
, TSAN_ATOMIC8_FETCH_ADD
, PLUS_EXPR
),
416 FETCH_OPS (SYNC_ADD_AND_FETCH_2
, TSAN_ATOMIC16_FETCH_ADD
, PLUS_EXPR
),
417 FETCH_OPS (SYNC_ADD_AND_FETCH_4
, TSAN_ATOMIC32_FETCH_ADD
, PLUS_EXPR
),
418 FETCH_OPS (SYNC_ADD_AND_FETCH_8
, TSAN_ATOMIC64_FETCH_ADD
, PLUS_EXPR
),
419 FETCH_OPS (SYNC_ADD_AND_FETCH_16
, TSAN_ATOMIC128_FETCH_ADD
, PLUS_EXPR
),
420 FETCH_OPS (SYNC_SUB_AND_FETCH_1
, TSAN_ATOMIC8_FETCH_SUB
, MINUS_EXPR
),
421 FETCH_OPS (SYNC_SUB_AND_FETCH_2
, TSAN_ATOMIC16_FETCH_SUB
, MINUS_EXPR
),
422 FETCH_OPS (SYNC_SUB_AND_FETCH_4
, TSAN_ATOMIC32_FETCH_SUB
, MINUS_EXPR
),
423 FETCH_OPS (SYNC_SUB_AND_FETCH_8
, TSAN_ATOMIC64_FETCH_SUB
, MINUS_EXPR
),
424 FETCH_OPS (SYNC_SUB_AND_FETCH_16
, TSAN_ATOMIC128_FETCH_SUB
, MINUS_EXPR
),
425 FETCH_OPS (SYNC_AND_AND_FETCH_1
, TSAN_ATOMIC8_FETCH_AND
, BIT_AND_EXPR
),
426 FETCH_OPS (SYNC_AND_AND_FETCH_2
, TSAN_ATOMIC16_FETCH_AND
, BIT_AND_EXPR
),
427 FETCH_OPS (SYNC_AND_AND_FETCH_4
, TSAN_ATOMIC32_FETCH_AND
, BIT_AND_EXPR
),
428 FETCH_OPS (SYNC_AND_AND_FETCH_8
, TSAN_ATOMIC64_FETCH_AND
, BIT_AND_EXPR
),
429 FETCH_OPS (SYNC_AND_AND_FETCH_16
, TSAN_ATOMIC128_FETCH_AND
, BIT_AND_EXPR
),
430 FETCH_OPS (SYNC_OR_AND_FETCH_1
, TSAN_ATOMIC8_FETCH_OR
, BIT_IOR_EXPR
),
431 FETCH_OPS (SYNC_OR_AND_FETCH_2
, TSAN_ATOMIC16_FETCH_OR
, BIT_IOR_EXPR
),
432 FETCH_OPS (SYNC_OR_AND_FETCH_4
, TSAN_ATOMIC32_FETCH_OR
, BIT_IOR_EXPR
),
433 FETCH_OPS (SYNC_OR_AND_FETCH_8
, TSAN_ATOMIC64_FETCH_OR
, BIT_IOR_EXPR
),
434 FETCH_OPS (SYNC_OR_AND_FETCH_16
, TSAN_ATOMIC128_FETCH_OR
, BIT_IOR_EXPR
),
435 FETCH_OPS (SYNC_XOR_AND_FETCH_1
, TSAN_ATOMIC8_FETCH_XOR
, BIT_XOR_EXPR
),
436 FETCH_OPS (SYNC_XOR_AND_FETCH_2
, TSAN_ATOMIC16_FETCH_XOR
, BIT_XOR_EXPR
),
437 FETCH_OPS (SYNC_XOR_AND_FETCH_4
, TSAN_ATOMIC32_FETCH_XOR
, BIT_XOR_EXPR
),
438 FETCH_OPS (SYNC_XOR_AND_FETCH_8
, TSAN_ATOMIC64_FETCH_XOR
, BIT_XOR_EXPR
),
439 FETCH_OPS (SYNC_XOR_AND_FETCH_16
, TSAN_ATOMIC128_FETCH_XOR
, BIT_XOR_EXPR
),
440 FETCH_OPS (SYNC_NAND_AND_FETCH_1
, TSAN_ATOMIC8_FETCH_NAND
, BIT_NOT_EXPR
),
441 FETCH_OPS (SYNC_NAND_AND_FETCH_2
, TSAN_ATOMIC16_FETCH_NAND
, BIT_NOT_EXPR
),
442 FETCH_OPS (SYNC_NAND_AND_FETCH_4
, TSAN_ATOMIC32_FETCH_NAND
, BIT_NOT_EXPR
),
443 FETCH_OPS (SYNC_NAND_AND_FETCH_8
, TSAN_ATOMIC64_FETCH_NAND
, BIT_NOT_EXPR
),
444 FETCH_OPS (SYNC_NAND_AND_FETCH_16
, TSAN_ATOMIC128_FETCH_NAND
, BIT_NOT_EXPR
),
446 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1
, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK
),
447 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2
, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK
),
448 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4
, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK
),
449 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8
, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK
),
450 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16
, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK
),
452 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1
, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG
),
453 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2
,
454 TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG
),
455 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4
,
456 TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG
),
457 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8
,
458 TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG
),
459 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16
,
460 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG
),
462 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1
,
463 TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG
),
464 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2
,
465 TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG
),
466 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4
,
467 TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG
),
468 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8
,
469 TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG
),
470 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16
,
471 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG
),
473 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1
, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG
),
474 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2
, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG
),
475 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4
, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG
),
476 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8
, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG
),
477 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16
,
478 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG
),
480 LOCK_RELEASE (SYNC_LOCK_RELEASE_1
, TSAN_ATOMIC8_STORE
),
481 LOCK_RELEASE (SYNC_LOCK_RELEASE_2
, TSAN_ATOMIC16_STORE
),
482 LOCK_RELEASE (SYNC_LOCK_RELEASE_4
, TSAN_ATOMIC32_STORE
),
483 LOCK_RELEASE (SYNC_LOCK_RELEASE_8
, TSAN_ATOMIC64_STORE
),
484 LOCK_RELEASE (SYNC_LOCK_RELEASE_16
, TSAN_ATOMIC128_STORE
),
486 BOOL_CLEAR (ATOMIC_CLEAR
, TSAN_ATOMIC8_STORE
),
488 BOOL_TEST_AND_SET (ATOMIC_TEST_AND_SET
, TSAN_ATOMIC8_EXCHANGE
)
491 /* Instrument an atomic builtin. */
494 instrument_builtin_call (gimple_stmt_iterator
*gsi
)
496 gimple
*stmt
= gsi_stmt (*gsi
), *g
;
497 tree callee
= gimple_call_fndecl (stmt
), last_arg
, args
[6], t
, lhs
;
498 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
499 unsigned int i
, num
= gimple_call_num_args (stmt
), j
;
500 for (j
= 0; j
< 6 && j
< num
; j
++)
501 args
[j
] = gimple_call_arg (stmt
, j
);
502 for (i
= 0; i
< ARRAY_SIZE (tsan_atomic_table
); i
++)
503 if (fcode
!= tsan_atomic_table
[i
].fcode
)
507 if (fcode
== BUILT_IN_ATOMIC_THREAD_FENCE
)
508 warning_at (gimple_location (stmt
), OPT_Wtsan
,
509 "%qs is not supported with %qs", "atomic_thread_fence",
510 "-fsanitize=thread");
512 tree decl
= builtin_decl_implicit (tsan_atomic_table
[i
].tsan_fcode
);
513 if (decl
== NULL_TREE
)
515 switch (tsan_atomic_table
[i
].action
)
519 last_arg
= gimple_call_arg (stmt
, num
- 1);
520 if (tree_fits_uhwi_p (last_arg
)
521 && memmodel_base (tree_to_uhwi (last_arg
)) >= MEMMODEL_LAST
)
523 gimple_call_set_fndecl (stmt
, decl
);
525 maybe_clean_eh_stmt (stmt
);
526 if (tsan_atomic_table
[i
].action
== fetch_op
)
528 args
[1] = gimple_call_arg (stmt
, 1);
534 case fetch_op_seq_cst
:
535 gcc_assert (num
<= 2);
536 for (j
= 0; j
< num
; j
++)
537 args
[j
] = gimple_call_arg (stmt
, j
);
540 args
[num
] = build_int_cst (NULL_TREE
,
541 tsan_atomic_table
[i
].action
545 update_gimple_call (gsi
, decl
, num
+ 1, args
[0], args
[1], args
[2]);
546 maybe_clean_or_replace_eh_stmt (stmt
, gsi_stmt (*gsi
));
547 stmt
= gsi_stmt (*gsi
);
548 if (tsan_atomic_table
[i
].action
== fetch_op_seq_cst
)
551 lhs
= gimple_call_lhs (stmt
);
552 if (lhs
== NULL_TREE
)
554 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
555 TREE_TYPE (args
[1])))
557 tree var
= make_ssa_name (TREE_TYPE (lhs
));
558 g
= gimple_build_assign (var
, NOP_EXPR
, args
[1]);
559 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
562 gimple_call_set_lhs (stmt
, make_ssa_name (TREE_TYPE (lhs
)));
563 /* BIT_NOT_EXPR stands for NAND. */
564 if (tsan_atomic_table
[i
].code
== BIT_NOT_EXPR
)
566 tree var
= make_ssa_name (TREE_TYPE (lhs
));
567 g
= gimple_build_assign (var
, BIT_AND_EXPR
,
568 gimple_call_lhs (stmt
), args
[1]);
569 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
570 g
= gimple_build_assign (lhs
, BIT_NOT_EXPR
, var
);
573 g
= gimple_build_assign (lhs
, tsan_atomic_table
[i
].code
,
574 gimple_call_lhs (stmt
), args
[1]);
576 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
580 if (!integer_nonzerop (gimple_call_arg (stmt
, 3)))
584 gcc_assert (num
== 6);
585 for (j
= 0; j
< 6; j
++)
586 args
[j
] = gimple_call_arg (stmt
, j
);
587 if (tree_fits_uhwi_p (args
[4])
588 && memmodel_base (tree_to_uhwi (args
[4])) >= MEMMODEL_LAST
)
590 if (tree_fits_uhwi_p (args
[5])
591 && memmodel_base (tree_to_uhwi (args
[5])) >= MEMMODEL_LAST
)
593 update_gimple_call (gsi
, decl
, 5, args
[0], args
[1], args
[2],
595 maybe_clean_or_replace_eh_stmt (stmt
, gsi_stmt (*gsi
));
599 gcc_assert (num
== 3);
600 for (j
= 0; j
< 3; j
++)
601 args
[j
] = gimple_call_arg (stmt
, j
);
602 t
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
603 t
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t
)));
604 t
= create_tmp_var (t
);
605 mark_addressable (t
);
606 if (!useless_type_conversion_p (TREE_TYPE (t
),
607 TREE_TYPE (args
[1])))
609 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)),
611 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
612 args
[1] = gimple_assign_lhs (g
);
614 g
= gimple_build_assign (t
, args
[1]);
615 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
616 lhs
= gimple_call_lhs (stmt
);
617 update_gimple_call (gsi
, decl
, 5, args
[0],
618 build_fold_addr_expr (t
), args
[2],
619 build_int_cst (NULL_TREE
,
621 build_int_cst (NULL_TREE
,
623 maybe_clean_or_replace_eh_stmt (stmt
, gsi_stmt (*gsi
));
624 if (tsan_atomic_table
[i
].action
== val_cas
&& lhs
)
626 stmt
= gsi_stmt (*gsi
);
627 tree t2
= make_ssa_name (TREE_TYPE (t
));
628 g
= gimple_build_assign (t2
, t
);
629 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
630 t
= make_ssa_name (TREE_TYPE (TREE_TYPE (decl
)), stmt
);
631 tree cond
= make_ssa_name (boolean_type_node
);
632 g
= gimple_build_assign (cond
, NE_EXPR
,
633 t
, build_zero_cst (TREE_TYPE (t
)));
634 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
635 g
= gimple_build_assign (lhs
, COND_EXPR
, cond
, args
[1], t2
);
636 gimple_call_set_lhs (stmt
, t
);
638 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
642 gcc_assert (num
== 1);
643 t
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
644 t
= TREE_VALUE (TREE_CHAIN (t
));
645 update_gimple_call (gsi
, decl
, 3, gimple_call_arg (stmt
, 0),
646 build_int_cst (t
, 0),
647 build_int_cst (NULL_TREE
,
649 maybe_clean_or_replace_eh_stmt (stmt
, gsi_stmt (*gsi
));
652 case bool_test_and_set
:
653 if (BOOL_TYPE_SIZE
!= 8)
656 for (j
= 1; j
< 5; j
++)
657 if (BOOL_TYPE_SIZE
== (8 << j
))
659 enum built_in_function tsan_fcode
660 = (enum built_in_function
)
661 (tsan_atomic_table
[i
].tsan_fcode
+ j
);
662 decl
= builtin_decl_implicit (tsan_fcode
);
665 if (decl
== NULL_TREE
)
668 last_arg
= gimple_call_arg (stmt
, num
- 1);
669 if (tree_fits_uhwi_p (last_arg
)
670 && memmodel_base (tree_to_uhwi (last_arg
)) >= MEMMODEL_LAST
)
672 t
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
673 t
= TREE_VALUE (TREE_CHAIN (t
));
674 if (tsan_atomic_table
[i
].action
== bool_clear
)
676 update_gimple_call (gsi
, decl
, 3, gimple_call_arg (stmt
, 0),
677 build_int_cst (t
, 0), last_arg
);
678 maybe_clean_or_replace_eh_stmt (stmt
, gsi_stmt (*gsi
));
681 t
= build_int_cst (t
, targetm
.atomic_test_and_set_trueval
);
682 update_gimple_call (gsi
, decl
, 3, gimple_call_arg (stmt
, 0),
684 maybe_clean_or_replace_eh_stmt (stmt
, gsi_stmt (*gsi
));
685 stmt
= gsi_stmt (*gsi
);
686 lhs
= gimple_call_lhs (stmt
);
687 if (lhs
== NULL_TREE
)
689 if (targetm
.atomic_test_and_set_trueval
!= 1
690 || !useless_type_conversion_p (TREE_TYPE (lhs
),
693 tree new_lhs
= make_ssa_name (TREE_TYPE (t
));
694 gimple_call_set_lhs (stmt
, new_lhs
);
695 if (targetm
.atomic_test_and_set_trueval
!= 1)
696 g
= gimple_build_assign (lhs
, NE_EXPR
, new_lhs
,
697 build_int_cst (TREE_TYPE (t
), 0));
699 g
= gimple_build_assign (lhs
, NOP_EXPR
, new_lhs
);
700 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
710 /* Instruments the gimple pointed to by GSI. Return
711 true if func entry/exit should be instrumented. */
714 instrument_gimple (gimple_stmt_iterator
*gsi
)
718 bool instrumented
= false;
720 stmt
= gsi_stmt (*gsi
);
721 if (is_gimple_call (stmt
)
722 && (gimple_call_fndecl (stmt
)
723 != builtin_decl_implicit (BUILT_IN_TSAN_INIT
)))
725 /* All functions with function call will have exit instrumented,
726 therefore no function calls other than __tsan_func_exit
727 shall appear in the functions. */
728 gimple_call_set_tail (as_a
<gcall
*> (stmt
), false);
729 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
730 instrument_builtin_call (gsi
);
733 else if (is_gimple_assign (stmt
)
734 && !gimple_clobber_p (stmt
))
736 if (gimple_store_p (stmt
))
738 lhs
= gimple_assign_lhs (stmt
);
739 instrumented
= instrument_expr (*gsi
, lhs
, true);
741 if (gimple_assign_load_p (stmt
))
743 rhs
= gimple_assign_rhs1 (stmt
);
744 instrumented
= instrument_expr (*gsi
, rhs
, false);
750 /* Replace TSAN_FUNC_EXIT internal call with function exit tsan builtin. */
753 replace_func_exit (gimple
*stmt
)
755 tree builtin_decl
= builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT
);
756 gimple
*g
= gimple_build_call (builtin_decl
, 0);
757 gimple_set_location (g
, cfun
->function_end_locus
);
758 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
759 gsi_replace (&gsi
, g
, true);
762 /* Instrument function exit. Used when TSAN_FUNC_EXIT does not exist. */
765 instrument_func_exit (void)
769 gimple_stmt_iterator gsi
;
775 /* Find all function exits. */
776 exit_bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
);
777 FOR_EACH_EDGE (e
, ei
, exit_bb
->preds
)
779 gsi
= gsi_last_bb (e
->src
);
780 stmt
= gsi_stmt (gsi
);
781 gcc_assert (gimple_code (stmt
) == GIMPLE_RETURN
782 || gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
));
783 loc
= gimple_location (stmt
);
784 builtin_decl
= builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT
);
785 g
= gimple_build_call (builtin_decl
, 0);
786 gimple_set_location (g
, loc
);
787 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
791 /* Instruments all interesting memory accesses in the current function.
792 Return true if func entry/exit should be instrumented. */
795 instrument_memory_accesses (bool *cfg_changed
)
798 gimple_stmt_iterator gsi
;
799 bool fentry_exit_instrument
= false;
800 bool func_exit_seen
= false;
801 auto_vec
<gimple
*> tsan_func_exits
;
803 FOR_EACH_BB_FN (bb
, cfun
)
805 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
807 gimple
*stmt
= gsi_stmt (gsi
);
808 if (gimple_call_internal_p (stmt
, IFN_TSAN_FUNC_EXIT
))
810 if (fentry_exit_instrument
)
811 replace_func_exit (stmt
);
813 tsan_func_exits
.safe_push (stmt
);
814 func_exit_seen
= true;
817 fentry_exit_instrument
818 |= (instrument_gimple (&gsi
)
819 && param_tsan_instrument_func_entry_exit
);
821 if (gimple_purge_dead_eh_edges (bb
))
826 FOR_EACH_VEC_ELT (tsan_func_exits
, i
, stmt
)
827 if (fentry_exit_instrument
)
828 replace_func_exit (stmt
);
831 gsi
= gsi_for_stmt (stmt
);
832 gsi_remove (&gsi
, true);
834 if (fentry_exit_instrument
&& !func_exit_seen
)
835 instrument_func_exit ();
836 return fentry_exit_instrument
;
839 /* Instruments function entry. */
842 instrument_func_entry (void)
844 tree ret_addr
, builtin_decl
;
846 gimple_seq seq
= NULL
;
848 builtin_decl
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
849 g
= gimple_build_call (builtin_decl
, 1, integer_zero_node
);
850 ret_addr
= make_ssa_name (ptr_type_node
);
851 gimple_call_set_lhs (g
, ret_addr
);
852 gimple_set_location (g
, cfun
->function_start_locus
);
853 gimple_seq_add_stmt_without_update (&seq
, g
);
855 builtin_decl
= builtin_decl_implicit (BUILT_IN_TSAN_FUNC_ENTRY
);
856 g
= gimple_build_call (builtin_decl
, 1, ret_addr
);
857 gimple_set_location (g
, cfun
->function_start_locus
);
858 gimple_seq_add_stmt_without_update (&seq
, g
);
860 edge e
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
861 gsi_insert_seq_on_edge_immediate (e
, seq
);
864 /* ThreadSanitizer instrumentation pass. */
869 initialize_sanitizer_builtins ();
870 bool cfg_changed
= false;
871 if (instrument_memory_accesses (&cfg_changed
))
872 instrument_func_entry ();
873 return cfg_changed
? TODO_cleanup_cfg
: 0;
876 /* Inserts __tsan_init () into the list of CTORs. */
879 tsan_finish_file (void)
881 tree ctor_statements
= NULL_TREE
;
883 initialize_sanitizer_builtins ();
884 tree init_decl
= builtin_decl_implicit (BUILT_IN_TSAN_INIT
);
885 append_to_statement_list (build_call_expr (init_decl
, 0),
887 cgraph_build_static_cdtor ('I', ctor_statements
,
888 MAX_RESERVED_INIT_PRIORITY
- 1);
891 /* The pass descriptor. */
895 const pass_data pass_data_tsan
=
897 GIMPLE_PASS
, /* type */
899 OPTGROUP_NONE
, /* optinfo_flags */
901 ( PROP_ssa
| PROP_cfg
), /* properties_required */
902 0, /* properties_provided */
903 0, /* properties_destroyed */
904 0, /* todo_flags_start */
905 TODO_update_ssa
, /* todo_flags_finish */
908 class pass_tsan
: public gimple_opt_pass
911 pass_tsan (gcc::context
*ctxt
)
912 : gimple_opt_pass (pass_data_tsan
, ctxt
)
915 /* opt_pass methods: */
916 opt_pass
* clone () final override
{ return new pass_tsan (m_ctxt
); }
917 bool gate (function
*) final override
919 return sanitize_flags_p (SANITIZE_THREAD
);
922 unsigned int execute (function
*) final override
{ return tsan_pass (); }
924 }; // class pass_tsan
929 make_pass_tsan (gcc::context
*ctxt
)
931 return new pass_tsan (ctxt
);
936 const pass_data pass_data_tsan_O0
=
938 GIMPLE_PASS
, /* type */
940 OPTGROUP_NONE
, /* optinfo_flags */
942 ( PROP_ssa
| PROP_cfg
), /* properties_required */
943 0, /* properties_provided */
944 0, /* properties_destroyed */
945 0, /* todo_flags_start */
946 TODO_update_ssa
, /* todo_flags_finish */
949 class pass_tsan_O0
: public gimple_opt_pass
952 pass_tsan_O0 (gcc::context
*ctxt
)
953 : gimple_opt_pass (pass_data_tsan_O0
, ctxt
)
956 /* opt_pass methods: */
957 bool gate (function
*) final override
959 return (sanitize_flags_p (SANITIZE_THREAD
) && !optimize
);
962 unsigned int execute (function
*) final override
{ return tsan_pass (); }
964 }; // class pass_tsan_O0
969 make_pass_tsan_O0 (gcc::context
*ctxt
)
971 return new pass_tsan_O0 (ctxt
);