1 /* C++-specific tree lowering bits; see also c-gimplify.cc and gimple.cc.
3 Copyright (C) 2002-2024 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "basic-block.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
37 #include "gcc-rich-location.h"
41 #include "file-prefix-map.h"
43 #include "omp-general.h"
46 /* Keep track of forward references to immediate-escalating functions in
47 case they become consteval. This vector contains ADDR_EXPRs and
48 PTRMEM_CSTs; it also stores FUNCTION_DECLs that had an escalating
49 function call in them, to check that they can be evaluated to a constant,
50 and immediate-escalating functions that may become consteval. */
51 static GTY(()) hash_set
<tree
> *deferred_escalating_exprs
;
54 remember_escalating_expr (tree t
)
56 if (uses_template_parms (t
))
57 /* Templates don't escalate, and cp_fold_immediate can get confused by
58 other template trees in the function body (c++/115986). */
60 if (!deferred_escalating_exprs
)
61 deferred_escalating_exprs
= hash_set
<tree
>::create_ggc (37);
62 deferred_escalating_exprs
->add (t
);
65 /* Flags for cp_fold and cp_fold_r. */
69 /* Whether we're being called from cp_fold_function. */
70 ff_genericize
= 1 << 0,
71 /* Whether we're folding a point where we know we're
72 definitely not in a manifestly constant-evaluated
74 ff_mce_false
= 1 << 1,
77 using fold_flags_t
= int;
83 cp_fold_data (fold_flags_t flags
): flags (flags
) {}
86 /* Forward declarations. */
88 static tree
cp_genericize_r (tree
*, int *, void *);
89 static tree
cp_fold_r (tree
*, int *, void *);
90 static void cp_genericize_tree (tree
*, bool);
91 static tree
cp_fold (tree
, fold_flags_t
);
92 static tree
cp_fold_immediate_r (tree
*, int *, void *);
94 /* Genericize a TRY_BLOCK. */
97 genericize_try_block (tree
*stmt_p
)
99 tree body
= TRY_STMTS (*stmt_p
);
100 tree cleanup
= TRY_HANDLERS (*stmt_p
);
102 *stmt_p
= build2 (TRY_CATCH_EXPR
, void_type_node
, body
, cleanup
);
105 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
108 genericize_catch_block (tree
*stmt_p
)
110 tree type
= HANDLER_TYPE (*stmt_p
);
111 tree body
= HANDLER_BODY (*stmt_p
);
113 /* FIXME should the caught type go in TREE_TYPE? */
114 *stmt_p
= build2 (CATCH_EXPR
, void_type_node
, type
, body
);
117 /* A terser interface for building a representation of an exception
121 build_gimple_eh_filter_tree (tree body
, tree allowed
, tree failure
)
125 /* FIXME should the allowed types go in TREE_TYPE? */
126 t
= build2 (EH_FILTER_EXPR
, void_type_node
, allowed
, NULL_TREE
);
127 append_to_statement_list (failure
, &EH_FILTER_FAILURE (t
));
129 t
= build2 (TRY_CATCH_EXPR
, void_type_node
, NULL_TREE
, t
);
130 append_to_statement_list (body
, &TREE_OPERAND (t
, 0));
135 /* Genericize an EH_SPEC_BLOCK by converting it to a
136 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
139 genericize_eh_spec_block (tree
*stmt_p
)
141 tree body
= EH_SPEC_STMTS (*stmt_p
);
142 tree allowed
= EH_SPEC_RAISES (*stmt_p
);
143 tree failure
= build_call_n (call_unexpected_fn
, 1, build_exc_ptr ());
145 *stmt_p
= build_gimple_eh_filter_tree (body
, allowed
, failure
);
146 suppress_warning (*stmt_p
);
147 suppress_warning (TREE_OPERAND (*stmt_p
, 1));
150 /* Return the first non-compound statement in STMT. */
153 first_stmt (tree stmt
)
155 switch (TREE_CODE (stmt
))
158 if (tree_statement_list_node
*p
= STATEMENT_LIST_HEAD (stmt
))
159 return first_stmt (p
->stmt
);
163 return first_stmt (BIND_EXPR_BODY (stmt
));
170 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
173 genericize_if_stmt (tree
*stmt_p
)
175 tree stmt
, cond
, then_
, else_
;
176 location_t locus
= EXPR_LOCATION (*stmt_p
);
179 cond
= IF_COND (stmt
);
180 then_
= THEN_CLAUSE (stmt
);
181 else_
= ELSE_CLAUSE (stmt
);
185 tree ft
= first_stmt (then_
);
186 tree fe
= first_stmt (else_
);
188 if (TREE_CODE (ft
) == PREDICT_EXPR
189 && TREE_CODE (fe
) == PREDICT_EXPR
190 && (pr
= PREDICT_EXPR_PREDICTOR (ft
)) == PREDICT_EXPR_PREDICTOR (fe
)
191 && (pr
== PRED_HOT_LABEL
|| pr
== PRED_COLD_LABEL
))
193 gcc_rich_location
richloc (EXPR_LOC_OR_LOC (ft
, locus
));
194 richloc
.add_range (EXPR_LOC_OR_LOC (fe
, locus
));
195 warning_at (&richloc
, OPT_Wattributes
,
196 "both branches of %<if%> statement marked as %qs",
197 pr
== PRED_HOT_LABEL
? "likely" : "unlikely");
202 then_
= build_empty_stmt (locus
);
204 else_
= build_empty_stmt (locus
);
206 /* consteval if has been verified not to have the then_/else_ blocks
207 entered by gotos/case labels from elsewhere, and as then_ block
208 can contain unfolded immediate function calls, we have to discard
209 the then_ block regardless of whether else_ has side-effects or not. */
210 if (IF_STMT_CONSTEVAL_P (stmt
))
212 if (block_may_fallthru (then_
))
213 stmt
= build3 (COND_EXPR
, void_type_node
, boolean_false_node
,
218 else if (IF_STMT_CONSTEXPR_P (stmt
))
219 stmt
= integer_nonzerop (cond
) ? then_
: else_
;
220 /* ??? This optimization doesn't seem to belong here, but removing it
221 causes -Wreturn-type regressions (e.g. 107310). */
222 else if (integer_nonzerop (cond
) && !TREE_SIDE_EFFECTS (else_
))
224 else if (integer_zerop (cond
) && !TREE_SIDE_EFFECTS (then_
))
227 stmt
= build3 (COND_EXPR
, void_type_node
, cond
, then_
, else_
);
228 protected_set_expr_location_if_unset (stmt
, locus
);
232 /* Hook into the middle of gimplifying an OMP_FOR node. */
234 static enum gimplify_status
235 cp_gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
237 tree for_stmt
= *expr_p
;
238 gimple_seq seq
= NULL
;
240 /* Protect ourselves from recursion. */
241 if (OMP_FOR_GIMPLIFYING_P (for_stmt
))
243 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 1;
245 gimplify_and_add (for_stmt
, &seq
);
246 gimple_seq_add_seq (pre_p
, seq
);
248 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 0;
253 /* Gimplify an EXPR_STMT node. */
256 gimplify_expr_stmt (tree
*stmt_p
)
258 tree stmt
= EXPR_STMT_EXPR (*stmt_p
);
260 if (stmt
== error_mark_node
)
263 /* Gimplification of a statement expression will nullify the
264 statement if all its side effects are moved to *PRE_P and *POST_P.
266 In this case we will not want to emit the gimplified statement.
267 However, we may still want to emit a warning, so we do that before
269 if (stmt
&& warn_unused_value
)
271 if (!TREE_SIDE_EFFECTS (stmt
))
273 if (!IS_EMPTY_STMT (stmt
)
274 && !VOID_TYPE_P (TREE_TYPE (stmt
))
275 && !warning_suppressed_p (stmt
, OPT_Wunused_value
))
276 warning (OPT_Wunused_value
, "statement with no effect");
279 warn_if_unused_value (stmt
, input_location
);
282 if (stmt
== NULL_TREE
)
283 stmt
= alloc_stmt_list ();
288 /* Gimplify initialization from an AGGR_INIT_EXPR. */
291 cp_gimplify_init_expr (tree
*expr_p
)
293 tree from
= TREE_OPERAND (*expr_p
, 1);
294 tree to
= TREE_OPERAND (*expr_p
, 0);
297 if (TREE_CODE (from
) == TARGET_EXPR
)
298 if (tree init
= TARGET_EXPR_INITIAL (from
))
300 /* Make sure that we expected to elide this temporary. But also allow
301 gimplify_modify_expr_rhs to elide temporaries of trivial type. */
302 gcc_checking_assert (TARGET_EXPR_ELIDING_P (from
)
303 || !TREE_ADDRESSABLE (TREE_TYPE (from
)));
304 if (target_expr_needs_replace (from
))
306 /* If this was changed by cp_genericize_target_expr, we need to
307 walk into it to replace uses of the slot. */
308 replace_decl (&init
, TARGET_EXPR_SLOT (from
), to
);
316 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
317 inside the TARGET_EXPR. */
320 tree sub
= TREE_CODE (t
) == COMPOUND_EXPR
? TREE_OPERAND (t
, 0) : t
;
322 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
323 replace the slot operand with our target.
325 Should we add a target parm to gimplify_expr instead? No, as in this
326 case we want to replace the INIT_EXPR. */
327 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
328 || TREE_CODE (sub
) == VEC_INIT_EXPR
)
330 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
)
331 AGGR_INIT_EXPR_SLOT (sub
) = to
;
333 VEC_INIT_EXPR_SLOT (sub
) = to
;
336 /* The initialization is now a side-effect, so the container can
339 TREE_TYPE (from
) = void_type_node
;
342 /* Handle aggregate NSDMI. */
343 replace_placeholders (sub
, to
);
348 t
= TREE_OPERAND (t
, 1);
353 /* Gimplify a MUST_NOT_THROW_EXPR. */
355 static enum gimplify_status
356 gimplify_must_not_throw_expr (tree
*expr_p
, gimple_seq
*pre_p
)
359 tree temp
= voidify_wrapper_expr (stmt
, NULL
);
360 tree body
= TREE_OPERAND (stmt
, 0);
361 gimple_seq try_
= NULL
;
362 gimple_seq catch_
= NULL
;
365 gimplify_and_add (body
, &try_
);
366 mnt
= gimple_build_eh_must_not_throw (call_terminate_fn
);
367 gimple_seq_add_stmt_without_update (&catch_
, mnt
);
368 mnt
= gimple_build_try (try_
, catch_
, GIMPLE_TRY_CATCH
);
370 gimple_seq_add_stmt_without_update (pre_p
, mnt
);
381 /* Return TRUE if an operand (OP) of a given TYPE being copied is
382 really just an empty class copy.
384 Check that the operand has a simple form so that TARGET_EXPRs and
385 non-empty CONSTRUCTORs get reduced properly, and we leave the
386 return slot optimization alone because it isn't a copy. */
389 simple_empty_class_p (tree type
, tree op
, tree_code code
)
391 if (TREE_CODE (op
) == COMPOUND_EXPR
)
392 return simple_empty_class_p (type
, TREE_OPERAND (op
, 1), code
);
393 if (SIMPLE_TARGET_EXPR_P (op
)
394 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type
))
395 /* The TARGET_EXPR is itself a simple copy, look through it. */
396 return simple_empty_class_p (type
, TARGET_EXPR_INITIAL (op
), code
);
398 if (TREE_CODE (op
) == PARM_DECL
399 && TREE_ADDRESSABLE (TREE_TYPE (op
)))
401 tree fn
= DECL_CONTEXT (op
);
402 if (DECL_THUNK_P (fn
)
403 || lambda_static_thunk_p (fn
))
404 /* In a thunk, we pass through invisible reference parms, so this isn't
410 (TREE_CODE (op
) == EMPTY_CLASS_EXPR
411 || code
== MODIFY_EXPR
412 || is_gimple_lvalue (op
)
413 || INDIRECT_REF_P (op
)
414 || (TREE_CODE (op
) == CONSTRUCTOR
415 && CONSTRUCTOR_NELTS (op
) == 0)
416 || (TREE_CODE (op
) == CALL_EXPR
417 && !CALL_EXPR_RETURN_SLOT_OPT (op
)))
418 && !TREE_CLOBBER_P (op
)
419 && is_really_empty_class (type
, /*ignore_vptr*/true);
422 /* Returns true if evaluating E as an lvalue has side-effects;
423 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
424 have side-effects until there is a read or write through it. */
427 lvalue_has_side_effects (tree e
)
429 if (!TREE_SIDE_EFFECTS (e
))
431 while (handled_component_p (e
))
433 if (TREE_CODE (e
) == ARRAY_REF
434 && TREE_SIDE_EFFECTS (TREE_OPERAND (e
, 1)))
436 e
= TREE_OPERAND (e
, 0);
439 /* Just naming a variable has no side-effects. */
441 else if (INDIRECT_REF_P (e
))
442 /* Similarly, indirection has no side-effects. */
443 return TREE_SIDE_EFFECTS (TREE_OPERAND (e
, 0));
445 /* For anything else, trust TREE_SIDE_EFFECTS. */
446 return TREE_SIDE_EFFECTS (e
);
449 /* Return true if FN is an immediate-escalating function. */
452 immediate_escalating_function_p (tree fn
)
454 if (!fn
|| !flag_immediate_escalation
)
457 gcc_checking_assert (TREE_CODE (fn
) == FUNCTION_DECL
);
459 if (DECL_IMMEDIATE_FUNCTION_P (fn
))
462 /* An immediate-escalating function is
463 -- the call operator of a lambda that is not declared with the consteval
465 if (LAMBDA_FUNCTION_P (fn
))
467 /* -- a defaulted special member function that is not declared with the
468 consteval specifier */
469 special_function_kind sfk
= special_memfn_p (fn
);
470 if (sfk
!= sfk_none
&& DECL_DEFAULTED_FN (fn
))
472 /* -- a function that results from the instantiation of a templated entity
473 defined with the constexpr specifier. */
474 return is_instantiation_of_constexpr (fn
);
477 /* Return true if FN is an immediate-escalating function that has not been
478 checked for escalating expressions.. */
481 unchecked_immediate_escalating_function_p (tree fn
)
483 return (immediate_escalating_function_p (fn
)
484 && !DECL_ESCALATION_CHECKED_P (fn
));
487 /* Promote FN to an immediate function, including its clones. */
490 promote_function_to_consteval (tree fn
)
492 SET_DECL_IMMEDIATE_FUNCTION_P (fn
);
493 DECL_ESCALATION_CHECKED_P (fn
) = true;
495 FOR_EACH_CLONE (clone
, fn
)
497 SET_DECL_IMMEDIATE_FUNCTION_P (clone
);
498 DECL_ESCALATION_CHECKED_P (clone
) = true;
502 /* A wrapper around cp_fold_immediate_r. Return a non-null tree if
503 we found a non-constant immediate function, or taking the address
504 of an immediate function. */
507 cp_fold_immediate (tree
*tp
, mce_value manifestly_const_eval
,
508 tree decl
/*= current_function_decl*/)
510 if (cxx_dialect
<= cxx17
)
513 temp_override
<tree
> cfd (current_function_decl
, decl
);
515 fold_flags_t flags
= ff_none
;
516 if (manifestly_const_eval
== mce_false
)
517 flags
|= ff_mce_false
;
519 cp_fold_data
data (flags
);
520 int save_errorcount
= errorcount
;
521 tree r
= cp_walk_tree_without_duplicates (tp
, cp_fold_immediate_r
, &data
);
522 if (errorcount
> save_errorcount
)
523 return integer_one_node
;
527 /* Maybe say that FN (a function decl with DECL_IMMEDIATE_FUNCTION_P set)
528 was initially not an immediate function, but was promoted to one because
529 its body contained an immediate-escalating expression or conversion. */
532 maybe_explain_promoted_consteval (location_t loc
, tree fn
)
534 if (DECL_ESCALATION_CHECKED_P (fn
))
536 /* See if we can figure out what made the function consteval. */
537 tree x
= cp_fold_immediate (&DECL_SAVED_TREE (fn
), mce_unknown
, NULL_TREE
);
539 inform (cp_expr_loc_or_loc (x
, loc
),
540 "%qD was promoted to an immediate function because its "
541 "body contains an immediate-escalating expression %qE", fn
, x
);
543 inform (loc
, "%qD was promoted to an immediate function", fn
);
547 /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
548 by expressions with side-effects in other operands. */
550 static enum gimplify_status
551 gimplify_to_rvalue (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
552 bool (*gimple_test_f
) (tree
))
554 enum gimplify_status t
555 = gimplify_expr (expr_p
, pre_p
, post_p
, gimple_test_f
, fb_rvalue
);
558 else if (is_gimple_variable (*expr_p
) && TREE_CODE (*expr_p
) != SSA_NAME
)
559 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
);
563 /* Like gimplify_arg, but if ORDERED is set (which should be set if
564 any of the arguments this argument is sequenced before has
565 TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
566 are gimplified into SSA_NAME or a fresh temporary and for
567 non-is_gimple_reg_type we don't optimize away TARGET_EXPRs. */
569 static enum gimplify_status
570 cp_gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
573 enum gimplify_status t
;
575 && !is_gimple_reg_type (TREE_TYPE (*arg_p
))
576 && TREE_CODE (*arg_p
) == TARGET_EXPR
)
578 /* gimplify_arg would strip away the TARGET_EXPR, but
579 that can mean we don't copy the argument and some following
580 argument with side-effect could modify it. */
581 protected_set_expr_location (*arg_p
, call_location
);
582 return gimplify_expr (arg_p
, pre_p
, NULL
, is_gimple_lvalue
, fb_either
);
586 t
= gimplify_arg (arg_p
, pre_p
, call_location
);
590 && is_gimple_reg_type (TREE_TYPE (*arg_p
))
591 && is_gimple_variable (*arg_p
)
592 && TREE_CODE (*arg_p
) != SSA_NAME
593 /* No need to force references into register, references
594 can't be modified. */
595 && !TYPE_REF_P (TREE_TYPE (*arg_p
))
596 /* And this can't be modified either. */
597 && *arg_p
!= current_class_ptr
)
598 *arg_p
= get_initialized_tmp_var (*arg_p
, pre_p
);
604 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
607 cp_gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
609 int saved_stmts_are_full_exprs_p
= 0;
610 location_t loc
= cp_expr_loc_or_input_loc (*expr_p
);
611 enum tree_code code
= TREE_CODE (*expr_p
);
612 enum gimplify_status ret
;
614 if (STATEMENT_CODE_P (code
))
616 saved_stmts_are_full_exprs_p
= stmts_are_full_exprs_p ();
617 current_stmt_tree ()->stmts_are_full_exprs_p
618 = STMT_IS_FULL_EXPR_P (*expr_p
);
624 simplify_aggr_init_expr (expr_p
);
630 *expr_p
= expand_vec_init_expr (NULL_TREE
, *expr_p
,
631 tf_warning_or_error
);
633 cp_fold_data
data (ff_genericize
| ff_mce_false
);
634 cp_walk_tree (expr_p
, cp_fold_r
, &data
, NULL
);
635 cp_genericize_tree (expr_p
, false);
636 copy_if_shared (expr_p
);
642 /* FIXME communicate throw type to back end, probably by moving
643 THROW_EXPR into ../tree.def. */
644 *expr_p
= TREE_OPERAND (*expr_p
, 0);
648 case MUST_NOT_THROW_EXPR
:
649 ret
= gimplify_must_not_throw_expr (expr_p
, pre_p
);
652 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
653 LHS of an assignment might also be involved in the RHS, as in bug
656 cp_gimplify_init_expr (expr_p
);
657 if (TREE_CODE (*expr_p
) != INIT_EXPR
)
663 /* If the back end isn't clever enough to know that the lhs and rhs
664 types are the same, add an explicit conversion. */
665 tree op0
= TREE_OPERAND (*expr_p
, 0);
666 tree op1
= TREE_OPERAND (*expr_p
, 1);
668 if (!error_operand_p (op0
)
669 && !error_operand_p (op1
)
670 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0
))
671 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1
)))
672 && !useless_type_conversion_p (TREE_TYPE (op1
), TREE_TYPE (op0
)))
673 TREE_OPERAND (*expr_p
, 1) = build1 (VIEW_CONVERT_EXPR
,
674 TREE_TYPE (op0
), op1
);
676 else if (simple_empty_class_p (TREE_TYPE (op0
), op1
, code
))
678 while (TREE_CODE (op1
) == TARGET_EXPR
)
679 /* We're disconnecting the initializer from its target,
680 don't create a temporary. */
681 op1
= TARGET_EXPR_INITIAL (op1
);
683 /* Remove any copies of empty classes. Also drop volatile
684 variables on the RHS to avoid infinite recursion from
685 gimplify_expr trying to load the value. */
686 if (TREE_SIDE_EFFECTS (op1
))
688 if (TREE_THIS_VOLATILE (op1
)
689 && (REFERENCE_CLASS_P (op1
) || DECL_P (op1
)))
690 op1
= build_fold_addr_expr (op1
);
692 gimplify_and_add (op1
, pre_p
);
694 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
695 is_gimple_lvalue
, fb_lvalue
);
696 *expr_p
= TREE_OPERAND (*expr_p
, 0);
697 if (code
== RETURN_EXPR
&& REFERENCE_CLASS_P (*expr_p
))
698 /* Avoid 'return *<retval>;' */
699 *expr_p
= TREE_OPERAND (*expr_p
, 0);
701 /* P0145 says that the RHS is sequenced before the LHS.
702 gimplify_modify_expr gimplifies the RHS before the LHS, but that
703 isn't quite strong enough in two cases:
705 1) gimplify.cc wants to leave a CALL_EXPR on the RHS, which would
706 mean it's evaluated after the LHS.
708 2) the value calculation of the RHS is also sequenced before the
709 LHS, so for scalar assignment we need to preevaluate if the
710 RHS could be affected by LHS side-effects even if it has no
711 side-effects of its own. We don't need this for classes because
712 class assignment takes its RHS by reference. */
713 else if (flag_strong_eval_order
> 1
714 && TREE_CODE (*expr_p
) == MODIFY_EXPR
715 && lvalue_has_side_effects (op0
)
716 && (TREE_CODE (op1
) == CALL_EXPR
717 || (SCALAR_TYPE_P (TREE_TYPE (op1
))
718 && !TREE_CONSTANT (op1
))))
719 TREE_OPERAND (*expr_p
, 1) = get_initialized_tmp_var (op1
, pre_p
);
724 case EMPTY_CLASS_EXPR
:
725 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
726 *expr_p
= build_constructor (TREE_TYPE (*expr_p
), NULL
);
731 *expr_p
= BASELINK_FUNCTIONS (*expr_p
);
736 genericize_try_block (expr_p
);
741 genericize_catch_block (expr_p
);
746 genericize_eh_spec_block (expr_p
);
768 ret
= cp_gimplify_omp_for (expr_p
, pre_p
);
772 gimplify_expr_stmt (expr_p
);
776 case UNARY_PLUS_EXPR
:
778 tree arg
= TREE_OPERAND (*expr_p
, 0);
779 tree type
= TREE_TYPE (*expr_p
);
780 *expr_p
= (TREE_TYPE (arg
) != type
) ? fold_convert (type
, arg
)
788 if (flag_strong_eval_order
== 2
789 && CALL_EXPR_FN (*expr_p
)
790 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p
)
791 && cp_get_callee_fndecl_nofold (*expr_p
) == NULL_TREE
)
793 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
794 enum gimplify_status t
795 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
796 is_gimple_call_addr
);
799 /* GIMPLE considers most pointer conversion useless, but for
800 calls we actually care about the exact function pointer type. */
801 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p
)) != fnptrtype
)
802 CALL_EXPR_FN (*expr_p
)
803 = build1 (NOP_EXPR
, fnptrtype
, CALL_EXPR_FN (*expr_p
));
805 if (!CALL_EXPR_FN (*expr_p
))
806 /* Internal function call. */;
807 else if (CALL_EXPR_REVERSE_ARGS (*expr_p
))
809 /* This is a call to a (compound) assignment operator that used
810 the operator syntax; gimplify the RHS first. */
811 gcc_assert (call_expr_nargs (*expr_p
) == 2);
812 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p
));
813 enum gimplify_status t
814 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p
, 1), pre_p
, loc
,
815 TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p
, 0)));
819 else if (CALL_EXPR_ORDERED_ARGS (*expr_p
))
821 /* Leave the last argument for gimplify_call_expr, to avoid problems
822 with __builtin_va_arg_pack(). */
823 int nargs
= call_expr_nargs (*expr_p
) - 1;
824 int last_side_effects_arg
= -1;
825 for (int i
= nargs
; i
> 0; --i
)
826 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p
, i
)))
828 last_side_effects_arg
= i
;
831 for (int i
= 0; i
< nargs
; ++i
)
833 enum gimplify_status t
834 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
, loc
,
835 i
< last_side_effects_arg
);
840 else if (flag_strong_eval_order
841 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p
))
843 /* If flag_strong_eval_order, evaluate the object argument first. */
844 tree fntype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
845 if (INDIRECT_TYPE_P (fntype
))
846 fntype
= TREE_TYPE (fntype
);
847 if (TREE_CODE (fntype
) == METHOD_TYPE
)
849 int nargs
= call_expr_nargs (*expr_p
);
850 bool side_effects
= false;
851 for (int i
= 1; i
< nargs
; ++i
)
852 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p
, i
)))
857 enum gimplify_status t
858 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p
, 0), pre_p
, loc
,
866 tree decl
= cp_get_callee_fndecl_nofold (*expr_p
);
869 if (fndecl_built_in_p (decl
, BUILT_IN_FRONTEND
))
870 switch (DECL_FE_FUNCTION_CODE (decl
))
872 case CP_BUILT_IN_IS_CONSTANT_EVALUATED
:
873 *expr_p
= boolean_false_node
;
875 case CP_BUILT_IN_SOURCE_LOCATION
:
877 = fold_builtin_source_location (*expr_p
);
879 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER
:
881 = fold_builtin_is_corresponding_member
882 (EXPR_LOCATION (*expr_p
), call_expr_nargs (*expr_p
),
883 &CALL_EXPR_ARG (*expr_p
, 0));
885 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS
:
887 = fold_builtin_is_pointer_inverconvertible_with_class
888 (EXPR_LOCATION (*expr_p
), call_expr_nargs (*expr_p
),
889 &CALL_EXPR_ARG (*expr_p
, 0));
894 else if (fndecl_built_in_p (decl
, BUILT_IN_CLZG
, BUILT_IN_CTZG
))
895 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
,
898 /* All consteval functions should have been processed by now. */
899 gcc_checking_assert (!immediate_invocation_p (decl
));
904 /* A TARGET_EXPR that expresses direct-initialization should have been
905 elided by cp_gimplify_init_expr. */
906 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p
));
907 /* Likewise, but allow extra temps of trivial type so that
908 gimplify_init_ctor_preeval can materialize subobjects of a CONSTRUCTOR
909 on the rhs of an assignment, as in constexpr-aggr1.C. */
910 gcc_checking_assert (!TARGET_EXPR_ELIDING_P (*expr_p
)
911 || !TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)));
916 *expr_p
= cplus_expand_constant (*expr_p
);
917 if (TREE_CODE (*expr_p
) == PTRMEM_CST
)
924 if (TREE_OPERAND (*expr_p
, 0)
925 && (TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == INIT_EXPR
926 || TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == MODIFY_EXPR
))
928 expr_p
= &TREE_OPERAND (*expr_p
, 0);
929 /* Avoid going through the INIT_EXPR case, which can
930 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
931 goto modify_expr_case
;
936 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
, post_p
);
940 /* Restore saved state. */
941 if (STATEMENT_CODE_P (code
))
942 current_stmt_tree ()->stmts_are_full_exprs_p
943 = saved_stmts_are_full_exprs_p
;
949 is_invisiref_parm (const_tree t
)
951 return ((TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
952 && DECL_BY_REFERENCE (t
));
955 /* A stable comparison routine for use with splay trees and DECLs. */
958 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
963 return DECL_UID (a
) - DECL_UID (b
);
966 /* OpenMP context during genericization. */
968 struct cp_genericize_omp_taskreg
972 struct cp_genericize_omp_taskreg
*outer
;
973 splay_tree variables
;
976 /* Return true if genericization should try to determine if
977 DECL is firstprivate or shared within task regions. */
980 omp_var_to_track (tree decl
)
982 tree type
= TREE_TYPE (decl
);
983 if (is_invisiref_parm (decl
))
984 type
= TREE_TYPE (type
);
985 else if (TYPE_REF_P (type
))
986 type
= TREE_TYPE (type
);
987 while (TREE_CODE (type
) == ARRAY_TYPE
)
988 type
= TREE_TYPE (type
);
989 if (type
== error_mark_node
|| !CLASS_TYPE_P (type
))
991 if (VAR_P (decl
) && CP_DECL_THREAD_LOCAL_P (decl
))
993 if (cxx_omp_predetermined_sharing (decl
) != OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
998 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
1001 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg
*omp_ctx
, tree decl
)
1003 splay_tree_node n
= splay_tree_lookup (omp_ctx
->variables
,
1004 (splay_tree_key
) decl
);
1007 int flags
= OMP_CLAUSE_DEFAULT_SHARED
;
1009 omp_cxx_notice_variable (omp_ctx
->outer
, decl
);
1010 if (!omp_ctx
->default_shared
)
1012 struct cp_genericize_omp_taskreg
*octx
;
1014 for (octx
= omp_ctx
->outer
; octx
; octx
= octx
->outer
)
1016 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
1017 if (n
&& n
->value
!= OMP_CLAUSE_DEFAULT_SHARED
)
1019 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
1022 if (octx
->is_parallel
)
1026 && (TREE_CODE (decl
) == PARM_DECL
1027 || (!(TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
1028 && DECL_CONTEXT (decl
) == current_function_decl
)))
1029 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
1030 if (flags
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
1032 /* DECL is implicitly determined firstprivate in
1033 the current task construct. Ensure copy ctor and
1034 dtor are instantiated, because during gimplification
1035 it will be already too late. */
1036 tree type
= TREE_TYPE (decl
);
1037 if (is_invisiref_parm (decl
))
1038 type
= TREE_TYPE (type
);
1039 else if (TYPE_REF_P (type
))
1040 type
= TREE_TYPE (type
);
1041 while (TREE_CODE (type
) == ARRAY_TYPE
)
1042 type
= TREE_TYPE (type
);
1043 get_copy_ctor (type
, tf_none
);
1044 get_dtor (type
, tf_none
);
1047 splay_tree_insert (omp_ctx
->variables
, (splay_tree_key
) decl
, flags
);
1051 /* True if any of the element initializers in CTOR are TARGET_EXPRs that are
1052 not expected to elide, e.g. because unsafe_copy_elision_p is true. */
1055 any_non_eliding_target_exprs (tree ctor
)
1057 for (const constructor_elt
&e
: *CONSTRUCTOR_ELTS (ctor
))
1059 if (TREE_CODE (e
.value
) == TARGET_EXPR
1060 && !TARGET_EXPR_ELIDING_P (e
.value
))
1066 /* If we might need to clean up a partially constructed object, break down the
1067 CONSTRUCTOR with split_nonconstant_init. Also expand VEC_INIT_EXPR at this
1068 point. If initializing TO with FROM is non-trivial, overwrite *REPLACE with
1072 cp_genericize_init (tree
*replace
, tree from
, tree to
, vec
<tree
,va_gc
>** flags
)
1074 tree init
= NULL_TREE
;
1075 if (TREE_CODE (from
) == VEC_INIT_EXPR
)
1076 init
= expand_vec_init_expr (to
, from
, tf_warning_or_error
, flags
);
1077 else if (TREE_CODE (from
) == CONSTRUCTOR
1078 && TREE_SIDE_EFFECTS (from
)
1079 && ((flag_exceptions
1080 && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (from
)))
1081 || any_non_eliding_target_exprs (from
)))
1083 to
= cp_stabilize_reference (to
);
1084 replace_placeholders (from
, to
);
1085 init
= split_nonconstant_init (to
, from
);
1090 if (*replace
== from
)
1091 /* Make cp_gimplify_init_expr call replace_decl on this
1092 TARGET_EXPR_INITIAL. */
1093 init
= fold_convert (void_type_node
, init
);
1098 /* For an INIT_EXPR, replace the INIT_EXPR itself. */
1101 cp_genericize_init_expr (tree
*stmt_p
)
1103 iloc_sentinel ils
= EXPR_LOCATION (*stmt_p
);
1104 tree to
= TREE_OPERAND (*stmt_p
, 0);
1105 tree from
= TREE_OPERAND (*stmt_p
, 1);
1106 if (SIMPLE_TARGET_EXPR_P (from
)
1107 /* Return gets confused if we clobber its INIT_EXPR this soon. */
1108 && TREE_CODE (to
) != RESULT_DECL
)
1109 from
= TARGET_EXPR_INITIAL (from
);
1110 cp_genericize_init (stmt_p
, from
, to
, nullptr);
1113 /* For a TARGET_EXPR, change the TARGET_EXPR_INITIAL. We will need to use
1114 replace_decl later when we know what we're initializing. */
1117 cp_genericize_target_expr (tree
*stmt_p
)
1119 iloc_sentinel ils
= EXPR_LOCATION (*stmt_p
);
1120 tree slot
= TARGET_EXPR_SLOT (*stmt_p
);
1121 vec
<tree
, va_gc
> *flags
= make_tree_vector ();
1122 cp_genericize_init (&TARGET_EXPR_INITIAL (*stmt_p
),
1123 TARGET_EXPR_INITIAL (*stmt_p
), slot
, &flags
);
1124 gcc_assert (!DECL_INITIAL (slot
));
1125 for (tree f
: flags
)
1127 /* Once initialization is complete TARGET_EXPR_CLEANUP becomes active, so
1128 disable any subobject cleanups. */
1129 tree d
= build_disable_temp_cleanup (f
);
1130 auto &r
= TARGET_EXPR_INITIAL (*stmt_p
);
1131 r
= add_stmt_to_compound (r
, d
);
1133 release_tree_vector (flags
);
1136 /* Similar to if (target_expr_needs_replace) replace_decl, but TP is the
1137 TARGET_EXPR_INITIAL, and this also updates *_SLOT. We need this extra
1138 replacement when cp_folding TARGET_EXPR to preserve the invariant that
1139 AGGR_INIT_EXPR_SLOT agrees with the enclosing TARGET_EXPR_SLOT. */
1142 maybe_replace_decl (tree
*tp
, tree decl
, tree replacement
)
1144 if (!*tp
|| !VOID_TYPE_P (TREE_TYPE (*tp
)))
1147 while (TREE_CODE (t
) == COMPOUND_EXPR
)
1148 t
= TREE_OPERAND (t
, 1);
1149 if (TREE_CODE (t
) == AGGR_INIT_EXPR
)
1150 replace_decl (&AGGR_INIT_EXPR_SLOT (t
), decl
, replacement
);
1151 else if (TREE_CODE (t
) == VEC_INIT_EXPR
)
1152 replace_decl (&VEC_INIT_EXPR_SLOT (t
), decl
, replacement
);
1154 replace_decl (tp
, decl
, replacement
);
1158 /* Genericization context. */
1160 struct cp_genericize_data
1162 hash_set
<tree
> *p_set
;
1163 auto_vec
<tree
> bind_expr_stack
;
1164 struct cp_genericize_omp_taskreg
*omp_ctx
;
1167 bool handle_invisiref_parm_p
;
1170 /* Emit an error about taking the address of an immediate function.
1171 EXPR is the whole expression; DECL is the immediate function. */
1174 taking_address_of_imm_fn_error (tree expr
, tree decl
)
1176 auto_diagnostic_group d
;
1177 const location_t loc
= (TREE_CODE (expr
) == PTRMEM_CST
1178 ? PTRMEM_CST_LOCATION (expr
)
1179 : EXPR_LOCATION (expr
));
1180 error_at (loc
, "taking address of an immediate function %qD", decl
);
1181 maybe_explain_promoted_consteval (loc
, decl
);
1184 /* A subroutine of cp_fold_r to handle immediate functions. */
1187 cp_fold_immediate_r (tree
*stmt_p
, int *walk_subtrees
, void *data_
)
1189 auto data
= static_cast<cp_fold_data
*>(data_
);
1190 tree stmt
= *stmt_p
;
1191 /* The purpose of this is not to emit errors for mce_unknown. */
1192 const tsubst_flags_t complain
= (data
->flags
& ff_mce_false
1193 ? tf_error
: tf_none
);
1194 const tree_code code
= TREE_CODE (stmt
);
1196 /* No need to look into types or unevaluated operands.
1197 NB: This affects cp_fold_r as well. */
1199 || unevaluated_p (code
)
1200 /* We do not use in_immediate_context here because it checks
1201 more than is desirable, e.g., sk_template_parms. */
1202 || cp_unevaluated_operand
1203 || (current_function_decl
1204 && DECL_IMMEDIATE_FUNCTION_P (current_function_decl
)))
1210 tree decl
= NULL_TREE
;
1211 bool call_p
= false;
1213 /* We are looking for &fn or fn(). */
1217 case AGGR_INIT_EXPR
:
1218 if (tree fn
= cp_get_callee (stmt
))
1219 if (TREE_CODE (fn
) != ADDR_EXPR
|| ADDR_EXPR_DENOTES_CALL_P (fn
))
1220 decl
= cp_get_fndecl_from_callee (fn
, /*fold*/false);
1224 decl
= PTRMEM_CST_MEMBER (stmt
);
1227 if (!ADDR_EXPR_DENOTES_CALL_P (stmt
))
1228 decl
= TREE_OPERAND (stmt
, 0);
1234 if (!decl
|| TREE_CODE (decl
) != FUNCTION_DECL
)
1237 /* Fully escalate once all templates have been instantiated. What we're
1238 calling is not a consteval function but it may become one. This
1239 requires recursing; DECL may be promoted to consteval because it
1240 contains an escalating expression E, but E itself may have to be
1241 promoted first, etc. */
1242 if (at_eof
> 1 && unchecked_immediate_escalating_function_p (decl
))
1244 /* Set before the actual walk to avoid endless recursion. */
1245 DECL_ESCALATION_CHECKED_P (decl
) = true;
1246 /* We're only looking for the first escalating expression. Let us not
1247 walk more trees than necessary, hence mce_unknown. */
1248 cp_fold_immediate (&DECL_SAVED_TREE (decl
), mce_unknown
, decl
);
1251 /* [expr.const]p16 "An expression or conversion is immediate-escalating if
1252 it is not initially in an immediate function context and it is either
1253 -- an immediate invocation that is not a constant expression and is not
1254 a subexpression of an immediate invocation."
1256 If we are in an immediate-escalating function, the immediate-escalating
1257 expression or conversion makes it an immediate function. So STMT does
1258 not need to produce a constant expression. */
1259 if (DECL_IMMEDIATE_FUNCTION_P (decl
))
1261 tree e
= cxx_constant_value (stmt
, tf_none
);
1262 if (e
== error_mark_node
)
1264 /* This takes care of, e.g.,
1265 template <typename T>
1266 constexpr int f(T t)
1270 where id (consteval) causes f<int> to be promoted. */
1271 if (immediate_escalating_function_p (current_function_decl
))
1272 promote_function_to_consteval (current_function_decl
);
1273 else if (complain
& tf_error
)
1277 auto_diagnostic_group d
;
1278 location_t loc
= cp_expr_loc_or_input_loc (stmt
);
1279 error_at (loc
, "call to consteval function %qE is "
1280 "not a constant expression", stmt
);
1281 /* Explain why it's not a constant expression. */
1282 *stmt_p
= cxx_constant_value (stmt
, complain
);
1283 maybe_explain_promoted_consteval (loc
, decl
);
1285 else if (!data
->pset
.add (stmt
))
1287 taking_address_of_imm_fn_error (stmt
, decl
);
1288 *stmt_p
= build_zero_cst (TREE_TYPE (stmt
));
1290 /* If we're giving hard errors, continue the walk rather than
1291 bailing out after the first error. */
1297 /* We've evaluated the consteval function call. */
1301 /* We've encountered a function call that may turn out to be consteval
1302 later. Store its caller so that we can ensure that the call is
1303 a constant expression. */
1304 else if (unchecked_immediate_escalating_function_p (decl
))
1306 /* Make sure we're not inserting new elements while walking
1307 the deferred_escalating_exprs hash table; if we are, it's
1308 likely that a function wasn't properly marked checked for
1310 gcc_checking_assert (at_eof
<= 1);
1311 if (current_function_decl
)
1312 remember_escalating_expr (current_function_decl
);
1313 /* auto p = &f<int>; in the global scope won't be ensconced in
1314 a function we could store for later at this point. (If there's
1315 no c_f_d at this point and we're dealing with a call, we should
1316 see the call when cp_fold_function __static_i_and_d.) */
1318 remember_escalating_expr (stmt
);
1324 /* Perform any pre-gimplification folding of C++ front end trees to
1326 Note: The folding of non-omp cases is something to move into
1327 the middle-end. As for now we have most foldings only on GENERIC
1328 in fold-const, we need to perform this before transformation to
1331 ??? This is algorithmically weird because walk_tree works in pre-order, so
1332 we see outer expressions before inner expressions. This isn't as much of an
1333 issue because cp_fold recurses into subexpressions in many cases, but then
1334 walk_tree walks back into those subexpressions again. We avoid the
1335 resulting complexity problem by caching the result of cp_fold, but it's
1339 cp_fold_r (tree
*stmt_p
, int *walk_subtrees
, void *data_
)
1341 cp_fold_data
*data
= (cp_fold_data
*)data_
;
1342 tree stmt
= *stmt_p
;
1343 enum tree_code code
= TREE_CODE (stmt
);
1345 if (cxx_dialect
>= cxx20
)
1347 /* Unfortunately we must handle code like
1349 where we have to check bar too. The cp_fold call below could
1350 fold the ?: into a constant before we've checked it. */
1351 if (code
== COND_EXPR
)
1353 auto then_fn
= cp_fold_r
, else_fn
= cp_fold_r
;
1354 /* See if we can figure out if either of the branches is dead. If it
1355 is, we don't need to do everything that cp_fold_r does. */
1356 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_fold_r
, data
, nullptr);
1357 if (integer_zerop (TREE_OPERAND (stmt
, 0)))
1358 then_fn
= cp_fold_immediate_r
;
1359 else if (integer_nonzerop (TREE_OPERAND (stmt
, 0)))
1360 else_fn
= cp_fold_immediate_r
;
1362 if (TREE_OPERAND (stmt
, 1))
1363 cp_walk_tree (&TREE_OPERAND (stmt
, 1), then_fn
, data
,
1365 if (TREE_OPERAND (stmt
, 2))
1366 cp_walk_tree (&TREE_OPERAND (stmt
, 2), else_fn
, data
,
1369 /* Don't return yet, still need the cp_fold below. */
1372 cp_fold_immediate_r (stmt_p
, walk_subtrees
, data
);
1375 *stmt_p
= stmt
= cp_fold (*stmt_p
, data
->flags
);
1377 /* For certain trees, like +foo(), the cp_fold above will remove the +,
1378 and the subsequent tree walk would go straight down to the CALL_EXPR's
1379 operands, meaning that cp_fold_immediate_r would never see the
1380 CALL_EXPR. Ew :(. */
1381 if (TREE_CODE (stmt
) == CALL_EXPR
&& code
!= CALL_EXPR
)
1382 cp_fold_immediate_r (stmt_p
, walk_subtrees
, data
);
1384 if (data
->pset
.add (stmt
))
1386 /* Don't walk subtrees of stmts we've already walked once, otherwise
1387 we can have exponential complexity with e.g. lots of nested
1388 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1389 always the same tree, which the first time cp_fold_r has been
1390 called on it had the subtrees walked. */
1395 code
= TREE_CODE (stmt
);
1402 case OMP_DISTRIBUTE
:
1408 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_fold_r
, data
, NULL
);
1409 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_fold_r
, data
, NULL
);
1410 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_fold_r
, data
, NULL
);
1411 x
= OMP_FOR_COND (stmt
);
1412 if (x
&& TREE_CODE_CLASS (TREE_CODE (x
)) == tcc_comparison
)
1414 cp_walk_tree (&TREE_OPERAND (x
, 0), cp_fold_r
, data
, NULL
);
1415 cp_walk_tree (&TREE_OPERAND (x
, 1), cp_fold_r
, data
, NULL
);
1417 else if (x
&& TREE_CODE (x
) == TREE_VEC
)
1419 n
= TREE_VEC_LENGTH (x
);
1420 for (i
= 0; i
< n
; i
++)
1422 tree o
= TREE_VEC_ELT (x
, i
);
1423 if (o
&& TREE_CODE_CLASS (TREE_CODE (o
)) == tcc_comparison
)
1424 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
1427 x
= OMP_FOR_INCR (stmt
);
1428 if (x
&& TREE_CODE (x
) == TREE_VEC
)
1430 n
= TREE_VEC_LENGTH (x
);
1431 for (i
= 0; i
< n
; i
++)
1433 tree o
= TREE_VEC_ELT (x
, i
);
1434 if (o
&& TREE_CODE (o
) == MODIFY_EXPR
)
1435 o
= TREE_OPERAND (o
, 1);
1436 if (o
&& (TREE_CODE (o
) == PLUS_EXPR
|| TREE_CODE (o
) == MINUS_EXPR
1437 || TREE_CODE (o
) == POINTER_PLUS_EXPR
))
1439 cp_walk_tree (&TREE_OPERAND (o
, 0), cp_fold_r
, data
, NULL
);
1440 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
1444 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_fold_r
, data
, NULL
);
1449 if (IF_STMT_CONSTEVAL_P (stmt
))
1451 /* Don't walk THEN_CLAUSE (stmt) for consteval if. IF_COND is always
1452 boolean_false_node. */
1453 cp_walk_tree (&ELSE_CLAUSE (stmt
), cp_fold_r
, data
, NULL
);
1454 cp_walk_tree (&IF_SCOPE (stmt
), cp_fold_r
, data
, NULL
);
1460 /* cp_genericize_{init,target}_expr are only for genericize time; they're
1461 here rather than in cp_genericize to avoid problems with the invisible
1462 reference transition. */
1464 if (data
->flags
& ff_genericize
)
1465 cp_genericize_init_expr (stmt_p
);
1469 if (data
->flags
& ff_genericize
)
1470 cp_genericize_target_expr (stmt_p
);
1472 /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
1473 that case, strip it in favor of this one. */
1474 if (tree
&init
= TARGET_EXPR_INITIAL (stmt
))
1476 cp_walk_tree (&init
, cp_fold_r
, data
, NULL
);
1477 cp_walk_tree (&TARGET_EXPR_CLEANUP (stmt
), cp_fold_r
, data
, NULL
);
1479 if (TREE_CODE (init
) == TARGET_EXPR
)
1481 tree sub
= TARGET_EXPR_INITIAL (init
);
1482 maybe_replace_decl (&sub
, TARGET_EXPR_SLOT (init
),
1483 TARGET_EXPR_SLOT (stmt
));
1496 /* Fold ALL the trees! FIXME we should be able to remove this, but
1497 apparently that still causes optimization regressions. */
1500 cp_fold_function (tree fndecl
)
1502 /* By now all manifestly-constant-evaluated expressions will have
1503 been constant-evaluated already if possible, so we can safely
1504 pass ff_mce_false. */
1505 cp_fold_data
data (ff_genericize
| ff_mce_false
);
1506 cp_walk_tree (&DECL_SAVED_TREE (fndecl
), cp_fold_r
, &data
, NULL
);
1508 /* This is merely an optimization: if FNDECL has no i-e expressions,
1509 we'll not save c_f_d, and we can safely say that FNDECL will not
1510 be promoted to consteval. */
1511 if (deferred_escalating_exprs
1512 && !deferred_escalating_exprs
->contains (current_function_decl
))
1513 DECL_ESCALATION_CHECKED_P (fndecl
) = true;
1516 /* We've stashed immediate-escalating functions. Now see if they indeed
1517 ought to be promoted to consteval. */
1520 process_and_check_pending_immediate_escalating_fns ()
1522 /* This will be null for -fno-immediate-escalation. */
1523 if (!deferred_escalating_exprs
)
1526 for (auto e
: *deferred_escalating_exprs
)
1527 if (TREE_CODE (e
) == FUNCTION_DECL
&& !DECL_ESCALATION_CHECKED_P (e
))
1528 cp_fold_immediate (&DECL_SAVED_TREE (e
), mce_false
, e
);
1530 /* We've escalated every function that could have been promoted to
1531 consteval. Check that we are not taking the address of a consteval
1533 for (auto e
: *deferred_escalating_exprs
)
1535 if (TREE_CODE (e
) == FUNCTION_DECL
)
1537 tree decl
= (TREE_CODE (e
) == PTRMEM_CST
1538 ? PTRMEM_CST_MEMBER (e
)
1539 : TREE_OPERAND (e
, 0));
1540 if (DECL_IMMEDIATE_FUNCTION_P (decl
))
1541 taking_address_of_imm_fn_error (e
, decl
);
1544 deferred_escalating_exprs
= nullptr;
1547 /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1549 static tree
genericize_spaceship (tree expr
)
1551 iloc_sentinel
s (cp_expr_location (expr
));
1552 tree type
= TREE_TYPE (expr
);
1553 tree op0
= TREE_OPERAND (expr
, 0);
1554 tree op1
= TREE_OPERAND (expr
, 1);
1555 return genericize_spaceship (input_location
, type
, op0
, op1
);
1558 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1559 to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1560 the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1561 NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
1564 predeclare_vla (tree expr
)
1566 tree type
= TREE_TYPE (expr
);
1567 if (type
== error_mark_node
)
1569 if (is_typedef_decl (expr
))
1570 type
= DECL_ORIGINAL_TYPE (expr
);
1572 /* We need to strip pointers for gimplify_type_sizes. */
1574 while (POINTER_TYPE_P (vla
))
1576 if (TYPE_NAME (vla
))
1578 vla
= TREE_TYPE (vla
);
1580 if (vla
== type
|| TYPE_NAME (vla
)
1581 || !variably_modified_type_p (vla
, NULL_TREE
))
1584 tree decl
= build_decl (input_location
, TYPE_DECL
, NULL_TREE
, vla
);
1585 DECL_ARTIFICIAL (decl
) = 1;
1586 TYPE_NAME (vla
) = decl
;
1587 tree dexp
= build_stmt (input_location
, DECL_EXPR
, decl
);
1595 expr
= build2 (COMPOUND_EXPR
, type
, dexp
, expr
);
1600 /* Perform any pre-gimplification lowering of C++ front end trees to
1604 cp_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
1606 tree stmt
= *stmt_p
;
1607 struct cp_genericize_data
*wtd
= (struct cp_genericize_data
*) data
;
1608 hash_set
<tree
> *p_set
= wtd
->p_set
;
1610 /* If in an OpenMP context, note var uses. */
1611 if (UNLIKELY (wtd
->omp_ctx
!= NULL
)
1613 || TREE_CODE (stmt
) == PARM_DECL
1614 || TREE_CODE (stmt
) == RESULT_DECL
)
1615 && omp_var_to_track (stmt
))
1616 omp_cxx_notice_variable (wtd
->omp_ctx
, stmt
);
1618 /* Don't dereference parms in a thunk, pass the references through. */
1619 if ((TREE_CODE (stmt
) == CALL_EXPR
&& call_from_lambda_thunk_p (stmt
))
1620 || (TREE_CODE (stmt
) == AGGR_INIT_EXPR
&& AGGR_INIT_FROM_THUNK_P (stmt
)))
1626 /* Dereference invisible reference parms. */
1627 if (wtd
->handle_invisiref_parm_p
&& is_invisiref_parm (stmt
))
1629 *stmt_p
= convert_from_reference (stmt
);
1630 p_set
->add (*stmt_p
);
1635 /* Map block scope extern declarations to visible declarations with the
1636 same name and type in outer scopes if any. */
1637 if (VAR_OR_FUNCTION_DECL_P (stmt
) && DECL_LOCAL_DECL_P (stmt
))
1638 if (tree alias
= DECL_LOCAL_DECL_ALIAS (stmt
))
1640 if (alias
!= error_mark_node
)
1643 TREE_USED (alias
) |= TREE_USED (stmt
);
1649 if (TREE_CODE (stmt
) == INTEGER_CST
1650 && TYPE_REF_P (TREE_TYPE (stmt
))
1651 && (flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1652 && !wtd
->no_sanitize_p
)
1654 ubsan_maybe_instrument_reference (stmt_p
);
1655 if (*stmt_p
!= stmt
)
1662 /* Other than invisiref parms, don't walk the same tree twice. */
1663 if (p_set
->contains (stmt
))
1669 switch (TREE_CODE (stmt
))
1672 if (is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1674 /* If in an OpenMP context, note var uses. */
1675 if (UNLIKELY (wtd
->omp_ctx
!= NULL
)
1676 && omp_var_to_track (TREE_OPERAND (stmt
, 0)))
1677 omp_cxx_notice_variable (wtd
->omp_ctx
, TREE_OPERAND (stmt
, 0));
1678 *stmt_p
= fold_convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
1684 if (TREE_OPERAND (stmt
, 0))
1686 if (is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1687 /* Don't dereference an invisiref RESULT_DECL inside a
1690 if (RETURN_EXPR_LOCAL_ADDR_P (stmt
))
1692 /* Don't return the address of a local variable. */
1693 tree
*p
= &TREE_OPERAND (stmt
, 0);
1694 while (TREE_CODE (*p
) == COMPOUND_EXPR
)
1695 p
= &TREE_OPERAND (*p
, 0);
1696 if (TREE_CODE (*p
) == INIT_EXPR
)
1698 tree op
= TREE_OPERAND (*p
, 1);
1699 tree new_op
= build2 (COMPOUND_EXPR
, TREE_TYPE (op
), op
,
1700 build_zero_cst (TREE_TYPE (op
)));
1701 TREE_OPERAND (*p
, 1) = new_op
;
1708 switch (OMP_CLAUSE_CODE (stmt
))
1710 case OMP_CLAUSE_LASTPRIVATE
:
1711 /* Don't dereference an invisiref in OpenMP clauses. */
1712 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1715 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt
))
1716 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt
),
1717 cp_genericize_r
, data
, NULL
);
1720 case OMP_CLAUSE_PRIVATE
:
1721 /* Don't dereference an invisiref in OpenMP clauses. */
1722 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1724 else if (wtd
->omp_ctx
!= NULL
)
1726 /* Private clause doesn't cause any references to the
1727 var in outer contexts, avoid calling
1728 omp_cxx_notice_variable for it. */
1729 struct cp_genericize_omp_taskreg
*old
= wtd
->omp_ctx
;
1730 wtd
->omp_ctx
= NULL
;
1731 cp_walk_tree (&OMP_CLAUSE_DECL (stmt
), cp_genericize_r
,
1737 case OMP_CLAUSE_SHARED
:
1738 case OMP_CLAUSE_FIRSTPRIVATE
:
1739 case OMP_CLAUSE_COPYIN
:
1740 case OMP_CLAUSE_COPYPRIVATE
:
1741 case OMP_CLAUSE_INCLUSIVE
:
1742 case OMP_CLAUSE_EXCLUSIVE
:
1743 /* Don't dereference an invisiref in OpenMP clauses. */
1744 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1747 case OMP_CLAUSE_REDUCTION
:
1748 case OMP_CLAUSE_IN_REDUCTION
:
1749 case OMP_CLAUSE_TASK_REDUCTION
:
1750 /* Don't dereference an invisiref in reduction clause's
1751 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1752 still needs to be genericized. */
1753 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1756 if (OMP_CLAUSE_REDUCTION_INIT (stmt
))
1757 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt
),
1758 cp_genericize_r
, data
, NULL
);
1759 if (OMP_CLAUSE_REDUCTION_MERGE (stmt
))
1760 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt
),
1761 cp_genericize_r
, data
, NULL
);
1769 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1770 to lower this construct before scanning it, so we need to lower these
1771 before doing anything else. */
1773 *stmt_p
= build2_loc (EXPR_LOCATION (stmt
),
1774 CLEANUP_EH_ONLY (stmt
) ? TRY_CATCH_EXPR
1777 CLEANUP_BODY (stmt
),
1778 CLEANUP_EXPR (stmt
));
1782 genericize_if_stmt (stmt_p
);
1783 /* *stmt_p has changed, tail recurse to handle it again. */
1784 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1786 /* COND_EXPR might have incompatible types in branches if one or both
1787 arms are bitfields. Fix it up now. */
1791 = (TREE_OPERAND (stmt
, 1)
1792 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 1))
1795 = (TREE_OPERAND (stmt
, 2)
1796 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 2))
1799 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1800 TREE_TYPE (TREE_OPERAND (stmt
, 1))))
1802 TREE_OPERAND (stmt
, 1)
1803 = fold_convert (type_left
, TREE_OPERAND (stmt
, 1));
1804 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1808 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1809 TREE_TYPE (TREE_OPERAND (stmt
, 2))))
1811 TREE_OPERAND (stmt
, 2)
1812 = fold_convert (type_right
, TREE_OPERAND (stmt
, 2));
1813 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1820 if (UNLIKELY (wtd
->omp_ctx
!= NULL
))
1823 for (decl
= BIND_EXPR_VARS (stmt
); decl
; decl
= DECL_CHAIN (decl
))
1825 && !DECL_EXTERNAL (decl
)
1826 && omp_var_to_track (decl
))
1829 = splay_tree_lookup (wtd
->omp_ctx
->variables
,
1830 (splay_tree_key
) decl
);
1832 splay_tree_insert (wtd
->omp_ctx
->variables
,
1833 (splay_tree_key
) decl
,
1835 ? OMP_CLAUSE_DEFAULT_SHARED
1836 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1839 if (sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
| SANITIZE_VPTR
))
1841 /* The point here is to not sanitize static initializers. */
1842 bool no_sanitize_p
= wtd
->no_sanitize_p
;
1843 wtd
->no_sanitize_p
= true;
1844 for (tree decl
= BIND_EXPR_VARS (stmt
);
1846 decl
= DECL_CHAIN (decl
))
1848 && TREE_STATIC (decl
)
1849 && DECL_INITIAL (decl
))
1850 cp_walk_tree (&DECL_INITIAL (decl
), cp_genericize_r
, data
, NULL
);
1851 wtd
->no_sanitize_p
= no_sanitize_p
;
1853 wtd
->bind_expr_stack
.safe_push (stmt
);
1854 cp_walk_tree (&BIND_EXPR_BODY (stmt
),
1855 cp_genericize_r
, data
, NULL
);
1856 wtd
->bind_expr_stack
.pop ();
1859 case ASSERTION_STMT
:
1860 case PRECONDITION_STMT
:
1861 case POSTCONDITION_STMT
:
1863 if (tree check
= build_contract_check (stmt
))
1866 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1869 /* If we didn't build a check, replace it with void_node so we don't
1870 leak contracts into GENERIC. */
1871 *stmt_p
= void_node
;
1878 tree block
= NULL_TREE
;
1880 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1881 BLOCK, and append an IMPORTED_DECL to its
1882 BLOCK_VARS chained list. */
1883 if (wtd
->bind_expr_stack
.exists ())
1886 for (i
= wtd
->bind_expr_stack
.length () - 1; i
>= 0; i
--)
1887 if ((block
= BIND_EXPR_BLOCK (wtd
->bind_expr_stack
[i
])))
1892 tree decl
= TREE_OPERAND (stmt
, 0);
1895 if (undeduced_auto_decl (decl
))
1896 /* Omit from the GENERIC, the back-end can't handle it. */;
1899 tree using_directive
= make_node (IMPORTED_DECL
);
1900 TREE_TYPE (using_directive
) = void_type_node
;
1901 DECL_CONTEXT (using_directive
) = current_function_decl
;
1902 DECL_SOURCE_LOCATION (using_directive
)
1903 = cp_expr_loc_or_input_loc (stmt
);
1905 IMPORTED_DECL_ASSOCIATED_DECL (using_directive
) = decl
;
1906 DECL_CHAIN (using_directive
) = BLOCK_VARS (block
);
1907 BLOCK_VARS (block
) = using_directive
;
1910 /* The USING_STMT won't appear in GENERIC. */
1911 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1917 if (TREE_CODE (DECL_EXPR_DECL (stmt
)) == USING_DECL
)
1919 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1920 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1925 tree d
= DECL_EXPR_DECL (stmt
);
1927 gcc_assert (CP_DECL_THREAD_LOCAL_P (d
) == DECL_THREAD_LOCAL_P (d
));
1935 struct cp_genericize_omp_taskreg omp_ctx
;
1940 cp_walk_tree (&OMP_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
1941 omp_ctx
.is_parallel
= TREE_CODE (stmt
) == OMP_PARALLEL
;
1942 omp_ctx
.default_shared
= omp_ctx
.is_parallel
;
1943 omp_ctx
.outer
= wtd
->omp_ctx
;
1944 omp_ctx
.variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
1945 wtd
->omp_ctx
= &omp_ctx
;
1946 for (c
= OMP_CLAUSES (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
1947 switch (OMP_CLAUSE_CODE (c
))
1949 case OMP_CLAUSE_SHARED
:
1950 case OMP_CLAUSE_PRIVATE
:
1951 case OMP_CLAUSE_FIRSTPRIVATE
:
1952 case OMP_CLAUSE_LASTPRIVATE
:
1953 decl
= OMP_CLAUSE_DECL (c
);
1954 if (decl
== error_mark_node
|| !omp_var_to_track (decl
))
1956 n
= splay_tree_lookup (omp_ctx
.variables
, (splay_tree_key
) decl
);
1959 splay_tree_insert (omp_ctx
.variables
, (splay_tree_key
) decl
,
1960 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1961 ? OMP_CLAUSE_DEFAULT_SHARED
1962 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1963 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
&& omp_ctx
.outer
)
1964 omp_cxx_notice_variable (omp_ctx
.outer
, decl
);
1966 case OMP_CLAUSE_DEFAULT
:
1967 if (OMP_CLAUSE_DEFAULT_KIND (c
) == OMP_CLAUSE_DEFAULT_SHARED
)
1968 omp_ctx
.default_shared
= true;
1972 if (TREE_CODE (stmt
) == OMP_TASKLOOP
)
1973 c_genericize_control_stmt (stmt_p
, walk_subtrees
, data
,
1974 cp_genericize_r
, cp_walk_subtrees
);
1976 cp_walk_tree (&OMP_BODY (stmt
), cp_genericize_r
, data
, NULL
);
1977 wtd
->omp_ctx
= omp_ctx
.outer
;
1978 splay_tree_delete (omp_ctx
.variables
);
1983 cfun
->has_omp_target
= true;
1989 tree try_block
= wtd
->try_block
;
1990 wtd
->try_block
= stmt
;
1991 cp_walk_tree (&TRY_STMTS (stmt
), cp_genericize_r
, data
, NULL
);
1992 wtd
->try_block
= try_block
;
1993 cp_walk_tree (&TRY_HANDLERS (stmt
), cp_genericize_r
, data
, NULL
);
1997 case MUST_NOT_THROW_EXPR
:
1998 /* MUST_NOT_THROW_COND might be something else with TM. */
1999 if (MUST_NOT_THROW_COND (stmt
) == NULL_TREE
)
2002 tree try_block
= wtd
->try_block
;
2003 wtd
->try_block
= stmt
;
2004 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
2005 wtd
->try_block
= try_block
;
2011 location_t loc
= location_of (stmt
);
2012 if (warning_suppressed_p (stmt
/* What warning? */))
2014 else if (wtd
->try_block
)
2016 if (TREE_CODE (wtd
->try_block
) == MUST_NOT_THROW_EXPR
)
2018 auto_diagnostic_group d
;
2019 if (warning_at (loc
, OPT_Wterminate
,
2020 "%<throw%> will always call %<terminate%>")
2021 && cxx_dialect
>= cxx11
2022 && DECL_DESTRUCTOR_P (current_function_decl
))
2023 inform (loc
, "in C++11 destructors default to %<noexcept%>");
2028 if (warn_cxx11_compat
&& cxx_dialect
< cxx11
2029 && DECL_DESTRUCTOR_P (current_function_decl
)
2030 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl
))
2032 && (get_defaulted_eh_spec (current_function_decl
)
2033 == empty_except_spec
))
2034 warning_at (loc
, OPT_Wc__11_compat
,
2035 "in C++11 this %<throw%> will call %<terminate%> "
2036 "because destructors default to %<noexcept%>");
2042 gcc_checking_assert (!AGGREGATE_TYPE_P (TREE_TYPE (stmt
)));
2043 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt
));
2046 case SPACESHIP_EXPR
:
2047 *stmt_p
= genericize_spaceship (*stmt_p
);
2051 /* By the time we get here we're handing off to the back end, so we don't
2052 need or want to preserve PTRMEM_CST anymore. */
2053 *stmt_p
= cplus_expand_constant (stmt
);
2058 /* For MEM_REF, make sure not to sanitize the second operand even
2059 if it has reference type. It is just an offset with a type
2060 holding other information. There is no other processing we
2061 need to do for INTEGER_CSTs, so just ignore the second argument
2063 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
2068 *stmt_p
= predeclare_vla (*stmt_p
);
2070 /* Warn of new allocations that are not big enough for the target
2073 && TREE_CODE (TREE_OPERAND (stmt
, 0)) == CALL_EXPR
2074 && POINTER_TYPE_P (TREE_TYPE (stmt
)))
2076 if (tree fndecl
= get_callee_fndecl (TREE_OPERAND (stmt
, 0)))
2077 if (DECL_IS_MALLOC (fndecl
))
2079 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (fndecl
));
2080 tree alloc_size
= lookup_attribute ("alloc_size", attrs
);
2082 warn_for_alloc_size (EXPR_LOCATION (stmt
),
2083 TREE_TYPE (TREE_TYPE (stmt
)),
2084 TREE_OPERAND (stmt
, 0), alloc_size
);
2088 if (!wtd
->no_sanitize_p
2089 && sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
)
2090 && TYPE_REF_P (TREE_TYPE (stmt
)))
2091 ubsan_maybe_instrument_reference (stmt_p
);
2095 if (!wtd
->no_sanitize_p
2096 && sanitize_flags_p ((SANITIZE_NULL
2097 | SANITIZE_ALIGNMENT
| SANITIZE_VPTR
)))
2099 tree fn
= CALL_EXPR_FN (stmt
);
2101 && !error_operand_p (fn
)
2102 && INDIRECT_TYPE_P (TREE_TYPE (fn
))
2103 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) == METHOD_TYPE
)
2106 = TREE_CODE (fn
) == ADDR_EXPR
2107 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
2108 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn
, 0));
2109 if (sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
2110 ubsan_maybe_instrument_member_call (stmt
, is_ctor
);
2111 if (sanitize_flags_p (SANITIZE_VPTR
) && !is_ctor
)
2112 cp_ubsan_maybe_instrument_member_call (stmt
);
2114 else if (fn
== NULL_TREE
2115 && CALL_EXPR_IFN (stmt
) == IFN_UBSAN_NULL
2116 && TREE_CODE (CALL_EXPR_ARG (stmt
, 0)) == INTEGER_CST
2117 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt
, 0))))
2121 case AGGR_INIT_EXPR
:
2122 /* For calls to a multi-versioned function, overload resolution
2123 returns the function with the highest target priority, that is,
2124 the version that will checked for dispatching first. If this
2125 version is inlinable, a direct call to this version can be made
2126 otherwise the call should go through the dispatcher. */
2128 tree fn
= cp_get_callee_fndecl_nofold (stmt
);
2129 if (fn
&& DECL_FUNCTION_VERSIONED (fn
)
2130 && (current_function_decl
== NULL
2131 || !targetm
.target_option
.can_inline_p (current_function_decl
,
2133 if (tree dis
= get_function_version_dispatcher (fn
))
2135 mark_versions_used (dis
);
2136 dis
= build_address (dis
);
2137 if (TREE_CODE (stmt
) == CALL_EXPR
)
2138 CALL_EXPR_FN (stmt
) = dis
;
2140 AGGR_INIT_EXPR_FN (stmt
) = dis
;
2146 if (TARGET_EXPR_INITIAL (stmt
)
2147 && TREE_CODE (TARGET_EXPR_INITIAL (stmt
)) == CONSTRUCTOR
2148 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt
)))
2149 TARGET_EXPR_NO_ELIDE (stmt
) = 1;
2152 case TEMPLATE_ID_EXPR
:
2153 gcc_assert (concept_check_p (stmt
));
2154 /* Emit the value of the concept check. */
2155 *stmt_p
= evaluate_concept_check (stmt
);
2159 case OMP_DISTRIBUTE
:
2160 /* Need to explicitly instantiate copy ctors on class iterators of
2161 composite distribute parallel for. */
2162 if (OMP_FOR_INIT (*stmt_p
) == NULL_TREE
)
2164 tree
*data
[4] = { NULL
, NULL
, NULL
, NULL
};
2165 tree inner
= walk_tree (&OMP_FOR_BODY (*stmt_p
),
2166 find_combined_omp_for
, data
, NULL
);
2167 if (inner
!= NULL_TREE
2168 && TREE_CODE (inner
) == OMP_FOR
)
2170 for (int i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner
)); i
++)
2171 if (TREE_VEC_ELT (OMP_FOR_INIT (inner
), i
)
2172 && OMP_FOR_ORIG_DECLS (inner
)
2173 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner
),
2175 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner
),
2178 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner
), i
);
2179 /* Class iterators aren't allowed on OMP_SIMD, so the only
2180 case we need to solve is distribute parallel for. */
2181 gcc_assert (TREE_CODE (inner
) == OMP_FOR
2183 tree orig_decl
= TREE_PURPOSE (orig
);
2184 tree c
, cl
= NULL_TREE
;
2185 for (c
= OMP_FOR_CLAUSES (inner
);
2186 c
; c
= OMP_CLAUSE_CHAIN (c
))
2187 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
2188 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
2189 && OMP_CLAUSE_DECL (c
) == orig_decl
)
2194 if (cl
== NULL_TREE
)
2196 for (c
= OMP_PARALLEL_CLAUSES (*data
[1]);
2197 c
; c
= OMP_CLAUSE_CHAIN (c
))
2198 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
2199 && OMP_CLAUSE_DECL (c
) == orig_decl
)
2207 orig_decl
= require_complete_type (orig_decl
);
2208 tree inner_type
= TREE_TYPE (orig_decl
);
2209 if (orig_decl
== error_mark_node
)
2211 if (TYPE_REF_P (TREE_TYPE (orig_decl
)))
2212 inner_type
= TREE_TYPE (inner_type
);
2214 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
2215 inner_type
= TREE_TYPE (inner_type
);
2216 get_copy_ctor (inner_type
, tf_warning_or_error
);
2235 case STATEMENT_LIST
:
2236 /* These cases are handled by shared code. */
2237 c_genericize_control_stmt (stmt_p
, walk_subtrees
, data
,
2238 cp_genericize_r
, cp_walk_subtrees
);
2242 *stmt_p
= build1_loc (EXPR_LOCATION (stmt
), VIEW_CONVERT_EXPR
,
2243 TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
2247 if (IS_TYPE_OR_DECL_P (stmt
))
2252 p_set
->add (*stmt_p
);
2257 /* Lower C++ front end trees to GENERIC in T_P. */
2260 cp_genericize_tree (tree
* t_p
, bool handle_invisiref_parm_p
)
2262 struct cp_genericize_data wtd
;
2264 wtd
.p_set
= new hash_set
<tree
>;
2265 wtd
.bind_expr_stack
.create (0);
2267 wtd
.try_block
= NULL_TREE
;
2268 wtd
.no_sanitize_p
= false;
2269 wtd
.handle_invisiref_parm_p
= handle_invisiref_parm_p
;
2270 cp_walk_tree (t_p
, cp_genericize_r
, &wtd
, NULL
);
2272 if (sanitize_flags_p (SANITIZE_VPTR
))
2273 cp_ubsan_instrument_member_accesses (t_p
);
2276 /* If a function that should end with a return in non-void
2277 function doesn't obviously end with return, add ubsan
2278 instrumentation code to verify it at runtime. If -fsanitize=return
2279 is not enabled, instrument __builtin_unreachable. */
2282 cp_maybe_instrument_return (tree fndecl
)
2284 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
2285 || DECL_CONSTRUCTOR_P (fndecl
)
2286 || DECL_DESTRUCTOR_P (fndecl
)
2287 || !targetm
.warn_func_return (fndecl
))
2290 if (!sanitize_flags_p (SANITIZE_RETURN
, fndecl
)
2291 /* Don't add __builtin_unreachable () if not optimizing, it will not
2292 improve any optimizations in that case, just break UB code.
2293 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
2294 UBSan covers this with ubsan_instrument_return above where sufficient
2295 information is provided, while the __builtin_unreachable () below
2296 if return sanitization is disabled will just result in hard to
2297 understand runtime error without location. */
2298 && ((!optimize
&& !flag_unreachable_traps
)
2299 || sanitize_flags_p (SANITIZE_UNREACHABLE
, fndecl
)))
2302 tree t
= DECL_SAVED_TREE (fndecl
);
2305 switch (TREE_CODE (t
))
2308 t
= BIND_EXPR_BODY (t
);
2310 case TRY_FINALLY_EXPR
:
2311 case CLEANUP_POINT_EXPR
:
2312 t
= TREE_OPERAND (t
, 0);
2314 case STATEMENT_LIST
:
2316 tree_stmt_iterator i
= tsi_last (t
);
2317 while (!tsi_end_p (i
))
2319 tree p
= tsi_stmt (i
);
2320 if (TREE_CODE (p
) != DEBUG_BEGIN_STMT
)
2340 tree
*p
= &DECL_SAVED_TREE (fndecl
);
2341 if (TREE_CODE (*p
) == BIND_EXPR
)
2342 p
= &BIND_EXPR_BODY (*p
);
2344 location_t loc
= DECL_SOURCE_LOCATION (fndecl
);
2345 if (sanitize_flags_p (SANITIZE_RETURN
, fndecl
))
2346 t
= ubsan_instrument_return (loc
);
2348 t
= build_builtin_unreachable (BUILTINS_LOCATION
);
2350 append_to_statement_list (t
, p
);
2354 cp_genericize (tree fndecl
)
2358 /* Fix up the types of parms passed by invisible reference. */
2359 for (t
= DECL_ARGUMENTS (fndecl
); t
; t
= DECL_CHAIN (t
))
2360 if (TREE_ADDRESSABLE (TREE_TYPE (t
)))
2362 /* If a function's arguments are copied to create a thunk,
2363 then DECL_BY_REFERENCE will be set -- but the type of the
2364 argument will be a pointer type, so we will never get
2366 gcc_assert (!DECL_BY_REFERENCE (t
));
2367 gcc_assert (DECL_ARG_TYPE (t
) != TREE_TYPE (t
));
2368 TREE_TYPE (t
) = DECL_ARG_TYPE (t
);
2369 DECL_BY_REFERENCE (t
) = 1;
2370 TREE_ADDRESSABLE (t
) = 0;
2374 /* Do the same for the return value. */
2375 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl
))))
2377 t
= DECL_RESULT (fndecl
);
2378 TREE_TYPE (t
) = build_reference_type (TREE_TYPE (t
));
2379 DECL_BY_REFERENCE (t
) = 1;
2380 TREE_ADDRESSABLE (t
) = 0;
2384 /* Adjust DECL_VALUE_EXPR of the original var. */
2385 tree outer
= outer_curly_brace_block (current_function_decl
);
2389 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
2391 && DECL_NAME (t
) == DECL_NAME (var
)
2392 && DECL_HAS_VALUE_EXPR_P (var
)
2393 && DECL_VALUE_EXPR (var
) == t
)
2395 tree val
= convert_from_reference (t
);
2396 SET_DECL_VALUE_EXPR (var
, val
);
2402 /* If we're a clone, the body is already GIMPLE. */
2403 if (DECL_CLONED_FUNCTION_P (fndecl
))
2406 /* Allow cp_genericize calls to be nested. */
2407 bc_state_t save_state
;
2408 save_bc_state (&save_state
);
2410 /* We do want to see every occurrence of the parms, so we can't just use
2411 walk_tree's hash functionality. */
2412 cp_genericize_tree (&DECL_SAVED_TREE (fndecl
), true);
2414 cp_maybe_instrument_return (fndecl
);
2416 /* Do everything else. */
2417 c_genericize (fndecl
);
2418 restore_bc_state (&save_state
);
2421 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
2422 NULL if there is in fact nothing to do. ARG2 may be null if FN
2423 actually only takes one argument. */
2426 cxx_omp_clause_apply_fn (tree fn
, tree arg1
, tree arg2
)
2428 tree defparm
, parm
, t
;
2436 nargs
= list_length (DECL_ARGUMENTS (fn
));
2437 argarray
= XALLOCAVEC (tree
, nargs
);
2439 defparm
= TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn
)));
2441 defparm
= TREE_CHAIN (defparm
);
2443 bool is_method
= TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
;
2444 if (TREE_CODE (TREE_TYPE (arg1
)) == ARRAY_TYPE
)
2446 tree inner_type
= TREE_TYPE (arg1
);
2447 tree start1
, end1
, p1
;
2448 tree start2
= NULL
, p2
= NULL
;
2449 tree ret
= NULL
, lab
;
2455 inner_type
= TREE_TYPE (inner_type
);
2456 start1
= build4 (ARRAY_REF
, inner_type
, start1
,
2457 size_zero_node
, NULL
, NULL
);
2459 start2
= build4 (ARRAY_REF
, inner_type
, start2
,
2460 size_zero_node
, NULL
, NULL
);
2462 while (TREE_CODE (inner_type
) == ARRAY_TYPE
);
2463 start1
= build_fold_addr_expr_loc (input_location
, start1
);
2465 start2
= build_fold_addr_expr_loc (input_location
, start2
);
2467 end1
= TYPE_SIZE_UNIT (TREE_TYPE (arg1
));
2468 end1
= fold_build_pointer_plus (start1
, end1
);
2470 p1
= create_tmp_var (TREE_TYPE (start1
));
2471 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, start1
);
2472 append_to_statement_list (t
, &ret
);
2476 p2
= create_tmp_var (TREE_TYPE (start2
));
2477 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, start2
);
2478 append_to_statement_list (t
, &ret
);
2481 lab
= create_artificial_label (input_location
);
2482 t
= build1 (LABEL_EXPR
, void_type_node
, lab
);
2483 append_to_statement_list (t
, &ret
);
2488 /* Handle default arguments. */
2489 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
2490 parm
= TREE_CHAIN (parm
), i
++)
2491 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
2492 TREE_PURPOSE (parm
), fn
,
2493 i
- is_method
, tf_warning_or_error
);
2494 t
= build_call_a (fn
, i
, argarray
);
2495 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (t
)))
2496 t
= build_cplus_new (TREE_TYPE (t
), t
, tf_warning_or_error
);
2497 t
= fold_convert (void_type_node
, t
);
2498 t
= fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
2499 append_to_statement_list (t
, &ret
);
2501 t
= fold_build_pointer_plus (p1
, TYPE_SIZE_UNIT (inner_type
));
2502 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, t
);
2503 append_to_statement_list (t
, &ret
);
2507 t
= fold_build_pointer_plus (p2
, TYPE_SIZE_UNIT (inner_type
));
2508 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, t
);
2509 append_to_statement_list (t
, &ret
);
2512 t
= build2 (NE_EXPR
, boolean_type_node
, p1
, end1
);
2513 t
= build3 (COND_EXPR
, void_type_node
, t
, build_and_jump (&lab
), NULL
);
2514 append_to_statement_list (t
, &ret
);
2520 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg1
);
2522 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg2
);
2523 /* Handle default arguments. */
2524 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
2525 parm
= TREE_CHAIN (parm
), i
++)
2526 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
2527 TREE_PURPOSE (parm
), fn
,
2528 i
- is_method
, tf_warning_or_error
);
2529 t
= build_call_a (fn
, i
, argarray
);
2530 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (t
)))
2531 t
= build_cplus_new (TREE_TYPE (t
), t
, tf_warning_or_error
);
2532 t
= fold_convert (void_type_node
, t
);
2533 return fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
2537 /* Return code to initialize DECL with its default constructor, or
2538 NULL if there's nothing to do. */
2541 cxx_omp_clause_default_ctor (tree clause
, tree decl
, tree
/*outer*/)
2543 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2547 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), decl
, NULL
);
2552 /* Return code to initialize DST with a copy constructor from SRC. */
2555 cxx_omp_clause_copy_ctor (tree clause
, tree dst
, tree src
)
2557 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2561 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), dst
, src
);
2563 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
2568 /* Similarly, except use an assignment operator instead. */
2571 cxx_omp_clause_assign_op (tree clause
, tree dst
, tree src
)
2573 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2577 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 2), dst
, src
);
2579 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
2584 /* Return code to destroy DECL. */
2587 cxx_omp_clause_dtor (tree clause
, tree decl
)
2589 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2593 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 1), decl
, NULL
);
2598 /* True if OpenMP should privatize what this DECL points to rather
2599 than the DECL itself. */
2602 cxx_omp_privatize_by_reference (const_tree decl
)
2604 return (TYPE_REF_P (TREE_TYPE (decl
))
2605 || is_invisiref_parm (decl
));
2608 /* Return true if DECL is const qualified var having no mutable member. */
2610 cxx_omp_const_qual_no_mutable (tree decl
)
2612 tree type
= TREE_TYPE (decl
);
2613 if (TYPE_REF_P (type
))
2615 if (!is_invisiref_parm (decl
))
2617 type
= TREE_TYPE (type
);
2619 if (TREE_CODE (decl
) == RESULT_DECL
&& DECL_NAME (decl
))
2621 /* NVR doesn't preserve const qualification of the
2623 tree outer
= outer_curly_brace_block (current_function_decl
);
2627 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
2629 && DECL_NAME (decl
) == DECL_NAME (var
)
2630 && (TYPE_MAIN_VARIANT (type
)
2631 == TYPE_MAIN_VARIANT (TREE_TYPE (var
))))
2633 if (TYPE_READONLY (TREE_TYPE (var
)))
2634 type
= TREE_TYPE (var
);
2640 if (type
== error_mark_node
)
2643 /* Variables with const-qualified type having no mutable member
2644 are predetermined shared. */
2645 if (TYPE_READONLY (type
) && !cp_has_mutable_p (type
))
2651 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2652 of DECL is predetermined. */
2654 enum omp_clause_default_kind
2655 cxx_omp_predetermined_sharing_1 (tree decl
)
2657 /* Static data members are predetermined shared. */
2658 if (TREE_STATIC (decl
))
2660 tree ctx
= CP_DECL_CONTEXT (decl
);
2661 if (TYPE_P (ctx
) && MAYBE_CLASS_TYPE_P (ctx
))
2662 return OMP_CLAUSE_DEFAULT_SHARED
;
2664 if (c_omp_predefined_variable (decl
))
2665 return OMP_CLAUSE_DEFAULT_SHARED
;
2668 /* this may not be specified in data-sharing clauses, still we need
2669 to predetermined it firstprivate. */
2670 if (decl
== current_class_ptr
)
2671 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
2673 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
2676 /* Likewise, but also include the artificial vars. We don't want to
2677 disallow the artificial vars being mentioned in explicit clauses,
2678 as we use artificial vars e.g. for loop constructs with random
2679 access iterators other than pointers, but during gimplification
2680 we want to treat them as predetermined. */
2682 enum omp_clause_default_kind
2683 cxx_omp_predetermined_sharing (tree decl
)
2685 enum omp_clause_default_kind ret
= cxx_omp_predetermined_sharing_1 (decl
);
2686 if (ret
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
2689 /* Predetermine artificial variables holding integral values, those
2690 are usually result of gimplify_one_sizepos or SAVE_EXPR
2693 && DECL_ARTIFICIAL (decl
)
2694 && INTEGRAL_TYPE_P (TREE_TYPE (decl
))
2695 && !(DECL_LANG_SPECIFIC (decl
)
2696 && DECL_OMP_PRIVATIZED_MEMBER (decl
)))
2697 return OMP_CLAUSE_DEFAULT_SHARED
;
2699 /* Similarly for typeinfo symbols. */
2700 if (VAR_P (decl
) && DECL_ARTIFICIAL (decl
) && DECL_TINFO_P (decl
))
2701 return OMP_CLAUSE_DEFAULT_SHARED
;
2703 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
2706 enum omp_clause_defaultmap_kind
2707 cxx_omp_predetermined_mapping (tree decl
)
2709 /* Predetermine artificial variables holding integral values, those
2710 are usually result of gimplify_one_sizepos or SAVE_EXPR
2713 && DECL_ARTIFICIAL (decl
)
2714 && INTEGRAL_TYPE_P (TREE_TYPE (decl
))
2715 && !(DECL_LANG_SPECIFIC (decl
)
2716 && DECL_OMP_PRIVATIZED_MEMBER (decl
)))
2717 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
;
2719 if (c_omp_predefined_variable (decl
))
2720 return OMP_CLAUSE_DEFAULTMAP_TO
;
2722 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
;
2725 /* Finalize an implicitly determined clause. */
2728 cxx_omp_finish_clause (tree c
, gimple_seq
*, bool /* openacc */)
2730 tree decl
, inner_type
;
2731 bool make_shared
= false;
2733 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
2734 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
2735 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LASTPRIVATE
2736 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)))
2739 decl
= OMP_CLAUSE_DECL (c
);
2740 decl
= require_complete_type (decl
);
2741 inner_type
= TREE_TYPE (decl
);
2742 if (decl
== error_mark_node
)
2744 else if (TYPE_REF_P (TREE_TYPE (decl
)))
2745 inner_type
= TREE_TYPE (inner_type
);
2747 /* We're interested in the base element, not arrays. */
2748 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
2749 inner_type
= TREE_TYPE (inner_type
);
2751 /* Check for special function availability by building a call to one.
2752 Save the results, because later we won't be in the right context
2753 for making these queries. */
2754 bool first
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
;
2755 bool last
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
;
2757 && CLASS_TYPE_P (inner_type
)
2758 && cxx_omp_create_clause_info (c
, inner_type
, !first
, first
, last
,
2764 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_SHARED
;
2765 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
) = 0;
2766 OMP_CLAUSE_SHARED_READONLY (c
) = 0;
2770 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2771 disregarded in OpenMP construct, because it is going to be
2772 remapped during OpenMP lowering. SHARED is true if DECL
2773 is going to be shared, false if it is going to be privatized. */
2776 cxx_omp_disregard_value_expr (tree decl
, bool shared
)
2781 && DECL_HAS_VALUE_EXPR_P (decl
)
2782 && DECL_ARTIFICIAL (decl
)
2783 && DECL_LANG_SPECIFIC (decl
)
2784 && DECL_OMP_PRIVATIZED_MEMBER (decl
))
2786 if (VAR_P (decl
) && DECL_CONTEXT (decl
) && is_capture_proxy (decl
))
2791 /* Fold expression X which is used as an rvalue if RVAL is true. */
2794 cp_fold_maybe_rvalue (tree x
, bool rval
, fold_flags_t flags
)
2798 x
= cp_fold (x
, flags
);
2800 x
= mark_rvalue_use (x
);
2801 if (rval
&& DECL_P (x
)
2802 && !TYPE_REF_P (TREE_TYPE (x
)))
2804 tree v
= decl_constant_value (x
);
2805 if (v
!= x
&& v
!= error_mark_node
)
2817 cp_fold_maybe_rvalue (tree x
, bool rval
)
2819 return cp_fold_maybe_rvalue (x
, rval
, ff_none
);
2822 /* Fold expression X which is used as an rvalue. */
2825 cp_fold_rvalue (tree x
, fold_flags_t flags
)
2827 return cp_fold_maybe_rvalue (x
, true, flags
);
2831 cp_fold_rvalue (tree x
)
2833 return cp_fold_rvalue (x
, ff_none
);
2836 /* Perform folding on expression X. */
2839 cp_fully_fold (tree x
, mce_value manifestly_const_eval
)
2841 if (processing_template_decl
)
2843 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2844 have to call both. */
2845 if (cxx_dialect
>= cxx11
)
2847 x
= maybe_constant_value (x
, /*decl=*/NULL_TREE
, manifestly_const_eval
);
2848 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2849 a TARGET_EXPR; undo that here. */
2850 if (TREE_CODE (x
) == TARGET_EXPR
)
2851 x
= TARGET_EXPR_INITIAL (x
);
2852 else if (TREE_CODE (x
) == VIEW_CONVERT_EXPR
2853 && TREE_CODE (TREE_OPERAND (x
, 0)) == CONSTRUCTOR
2854 && TREE_TYPE (TREE_OPERAND (x
, 0)) == TREE_TYPE (x
))
2855 x
= TREE_OPERAND (x
, 0);
2857 fold_flags_t flags
= ff_none
;
2858 if (manifestly_const_eval
== mce_false
)
2859 flags
|= ff_mce_false
;
2860 return cp_fold_rvalue (x
, flags
);
2864 cp_fully_fold (tree x
)
2866 return cp_fully_fold (x
, mce_unknown
);
2869 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2873 cp_fully_fold_init (tree x
)
2875 if (processing_template_decl
)
2877 x
= cp_fully_fold (x
, mce_false
);
2878 cp_fold_data
data (ff_mce_false
);
2879 cp_walk_tree (&x
, cp_fold_r
, &data
, NULL
);
2883 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2884 and certain changes are made to the folding done. Or should be (FIXME). We
2885 never touch maybe_const, as it is only used for the C front-end
2886 C_MAYBE_CONST_EXPR. */
2889 c_fully_fold (tree x
, bool /*in_init*/, bool */
*maybe_const*/
, bool lval
)
2891 return cp_fold_maybe_rvalue (x
, !lval
);
2894 static GTY((deletable
)) hash_map
<tree
, tree
> *fold_caches
[2];
2896 /* Subroutine of cp_fold. Returns which fold cache to use according
2897 to the given flags. We need multiple caches since the result of
2898 folding may depend on which flags are used. */
2900 static hash_map
<tree
, tree
> *&
2901 get_fold_cache (fold_flags_t flags
)
2903 if (flags
& ff_mce_false
)
2904 return fold_caches
[1];
2906 return fold_caches
[0];
2909 /* Dispose of the whole FOLD_CACHE. */
2912 clear_fold_cache (void)
2914 for (auto& fold_cache
: fold_caches
)
2915 if (fold_cache
!= NULL
)
2916 fold_cache
->empty ();
2919 /* This function tries to fold an expression X.
2920 To avoid combinatorial explosion, folding results are kept in fold_cache.
2921 If X is invalid, we don't fold at all.
2922 For performance reasons we don't cache expressions representing a
2923 declaration or constant.
2924 Function returns X or its folded variant. */
2927 cp_fold (tree x
, fold_flags_t flags
)
2929 tree op0
, op1
, op2
, op3
;
2930 tree org_x
= x
, r
= NULL_TREE
;
2931 enum tree_code code
;
2933 bool rval_ops
= true;
2935 if (!x
|| x
== error_mark_node
)
2938 if (EXPR_P (x
) && (!TREE_TYPE (x
) || TREE_TYPE (x
) == error_mark_node
))
2941 /* Don't bother to cache DECLs or constants. */
2942 if (DECL_P (x
) || CONSTANT_CLASS_P (x
))
2945 auto& fold_cache
= get_fold_cache (flags
);
2946 if (fold_cache
== NULL
)
2947 fold_cache
= hash_map
<tree
, tree
>::create_ggc (101);
2949 if (tree
*cached
= fold_cache
->get (x
))
2951 /* unshare_expr doesn't recurse into SAVE_EXPRs. If SAVE_EXPR's
2952 argument has been folded into a tree invariant, make sure it is
2953 unshared. See PR112727. */
2954 if (TREE_CODE (x
) == SAVE_EXPR
&& *cached
!= x
)
2955 return unshare_expr (*cached
);
2959 uid_sensitive_constexpr_evaluation_checker c
;
2961 code
= TREE_CODE (x
);
2964 case CLEANUP_POINT_EXPR
:
2965 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2967 r
= cp_fold_rvalue (TREE_OPERAND (x
, 0), flags
);
2968 if (!TREE_SIDE_EFFECTS (r
))
2973 x
= fold_sizeof_expr (x
);
2976 case VIEW_CONVERT_EXPR
:
2979 case NON_LVALUE_EXPR
:
2982 if (VOID_TYPE_P (TREE_TYPE (x
)))
2984 /* This is just to make sure we don't end up with casts to
2985 void from error_mark_node. If we just return x, then
2986 cp_fold_r might fold the operand into error_mark_node and
2987 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2988 during gimplification doesn't like such casts.
2989 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2990 folding of the operand should be in the caches and if in cp_fold_r
2991 it will modify it in place. */
2992 op0
= cp_fold (TREE_OPERAND (x
, 0), flags
);
2993 if (op0
== error_mark_node
)
2994 x
= error_mark_node
;
2998 loc
= EXPR_LOCATION (x
);
2999 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
, flags
);
3001 if (code
== CONVERT_EXPR
3002 && SCALAR_TYPE_P (TREE_TYPE (x
))
3003 && op0
!= void_node
)
3004 /* During parsing we used convert_to_*_nofold; re-convert now using the
3005 folding variants, since fold() doesn't do those transformations. */
3006 x
= fold (convert (TREE_TYPE (x
), op0
));
3007 else if (op0
!= TREE_OPERAND (x
, 0))
3009 if (op0
== error_mark_node
)
3010 x
= error_mark_node
;
3012 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
3017 /* Conversion of an out-of-range value has implementation-defined
3018 behavior; the language considers it different from arithmetic
3019 overflow, which is undefined. */
3020 if (TREE_CODE (op0
) == INTEGER_CST
3021 && TREE_OVERFLOW_P (x
) && !TREE_OVERFLOW_P (op0
))
3022 TREE_OVERFLOW (x
) = false;
3026 case EXCESS_PRECISION_EXPR
:
3027 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
, flags
);
3028 x
= fold_convert_loc (EXPR_LOCATION (x
), TREE_TYPE (x
), op0
);
3032 /* We don't need the decltype(auto) obfuscation anymore. */
3033 if (REF_PARENTHESIZED_P (x
))
3035 tree p
= maybe_undo_parenthesized_ref (x
);
3037 return cp_fold (p
, flags
);
3042 loc
= EXPR_LOCATION (x
);
3043 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), false, flags
);
3045 /* Cope with user tricks that amount to offsetof. */
3046 if (op0
!= error_mark_node
3047 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0
)))
3049 tree val
= get_base_address (op0
);
3051 && INDIRECT_REF_P (val
)
3052 && COMPLETE_TYPE_P (TREE_TYPE (val
))
3053 && TREE_CONSTANT (TREE_OPERAND (val
, 0)))
3055 val
= TREE_OPERAND (val
, 0);
3057 val
= maybe_constant_value (val
);
3058 if (TREE_CODE (val
) == INTEGER_CST
)
3059 return fold_offsetof (op0
, TREE_TYPE (x
));
3069 case FIX_TRUNC_EXPR
:
3075 case TRUTH_NOT_EXPR
:
3076 case FIXED_CONVERT_EXPR
:
3079 loc
= EXPR_LOCATION (x
);
3080 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
, flags
);
3083 if (op0
!= TREE_OPERAND (x
, 0))
3085 if (op0
== error_mark_node
)
3086 x
= error_mark_node
;
3089 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
3090 if (code
== INDIRECT_REF
3091 && (INDIRECT_REF_P (x
) || TREE_CODE (x
) == MEM_REF
))
3093 TREE_READONLY (x
) = TREE_READONLY (org_x
);
3094 TREE_SIDE_EFFECTS (x
) = TREE_SIDE_EFFECTS (org_x
);
3095 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
3102 gcc_assert (TREE_CODE (x
) != COND_EXPR
3103 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x
, 0))));
3106 case UNARY_PLUS_EXPR
:
3107 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0), flags
);
3108 if (op0
== error_mark_node
)
3109 x
= error_mark_node
;
3111 x
= fold_convert (TREE_TYPE (x
), op0
);
3114 case POSTDECREMENT_EXPR
:
3115 case POSTINCREMENT_EXPR
:
3117 case PREDECREMENT_EXPR
:
3118 case PREINCREMENT_EXPR
:
3123 case POINTER_PLUS_EXPR
:
3125 case POINTER_DIFF_EXPR
:
3128 case TRUNC_DIV_EXPR
:
3130 case FLOOR_DIV_EXPR
:
3131 case ROUND_DIV_EXPR
:
3132 case TRUNC_MOD_EXPR
:
3134 case ROUND_MOD_EXPR
:
3136 case EXACT_DIV_EXPR
:
3146 case TRUTH_AND_EXPR
:
3147 case TRUTH_ANDIF_EXPR
:
3149 case TRUTH_ORIF_EXPR
:
3150 case TRUTH_XOR_EXPR
:
3151 case LT_EXPR
: case LE_EXPR
:
3152 case GT_EXPR
: case GE_EXPR
:
3153 case EQ_EXPR
: case NE_EXPR
:
3154 case UNORDERED_EXPR
: case ORDERED_EXPR
:
3155 case UNLT_EXPR
: case UNLE_EXPR
:
3156 case UNGT_EXPR
: case UNGE_EXPR
:
3157 case UNEQ_EXPR
: case LTGT_EXPR
:
3158 case RANGE_EXPR
: case COMPLEX_EXPR
:
3160 loc
= EXPR_LOCATION (x
);
3161 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
, flags
);
3162 op1
= cp_fold_rvalue (TREE_OPERAND (x
, 1), flags
);
3164 /* decltype(nullptr) has only one value, so optimize away all comparisons
3165 with that type right away, keeping them in the IL causes troubles for
3166 various optimizations. */
3167 if (COMPARISON_CLASS_P (org_x
)
3168 && TREE_CODE (TREE_TYPE (op0
)) == NULLPTR_TYPE
3169 && TREE_CODE (TREE_TYPE (op1
)) == NULLPTR_TYPE
)
3174 x
= constant_boolean_node (true, TREE_TYPE (x
));
3177 x
= constant_boolean_node (false, TREE_TYPE (x
));
3182 return omit_two_operands_loc (loc
, TREE_TYPE (x
), x
,
3186 if (op0
!= TREE_OPERAND (x
, 0) || op1
!= TREE_OPERAND (x
, 1))
3188 if (op0
== error_mark_node
|| op1
== error_mark_node
)
3189 x
= error_mark_node
;
3191 x
= fold_build2_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
);
3196 /* This is only needed for -Wnonnull-compare and only if
3197 TREE_NO_WARNING (org_x), but to avoid that option affecting code
3198 generation, we do it always. */
3199 if (COMPARISON_CLASS_P (org_x
))
3201 if (x
== error_mark_node
|| TREE_CODE (x
) == INTEGER_CST
)
3203 else if (COMPARISON_CLASS_P (x
))
3205 if (warn_nonnull_compare
3206 && warning_suppressed_p (org_x
, OPT_Wnonnull_compare
))
3207 suppress_warning (x
, OPT_Wnonnull_compare
);
3209 /* Otherwise give up on optimizing these, let GIMPLE folders
3210 optimize those later on. */
3211 else if (op0
!= TREE_OPERAND (org_x
, 0)
3212 || op1
!= TREE_OPERAND (org_x
, 1))
3214 x
= build2_loc (loc
, code
, TREE_TYPE (org_x
), op0
, op1
);
3215 if (warn_nonnull_compare
3216 && warning_suppressed_p (org_x
, OPT_Wnonnull_compare
))
3217 suppress_warning (x
, OPT_Wnonnull_compare
);
3227 loc
= EXPR_LOCATION (x
);
3228 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0), flags
);
3229 op1
= cp_fold (TREE_OPERAND (x
, 1), flags
);
3230 op2
= cp_fold (TREE_OPERAND (x
, 2), flags
);
3232 if (TREE_CODE (TREE_TYPE (x
)) == BOOLEAN_TYPE
)
3234 warning_sentinel
s (warn_int_in_bool_context
);
3235 if (!VOID_TYPE_P (TREE_TYPE (op1
)))
3236 op1
= cp_truthvalue_conversion (op1
, tf_warning_or_error
);
3237 if (!VOID_TYPE_P (TREE_TYPE (op2
)))
3238 op2
= cp_truthvalue_conversion (op2
, tf_warning_or_error
);
3240 else if (VOID_TYPE_P (TREE_TYPE (x
)))
3242 if (TREE_CODE (op0
) == INTEGER_CST
)
3244 /* If the condition is constant, fold can fold away
3245 the COND_EXPR. If some statement-level uses of COND_EXPR
3246 have one of the branches NULL, avoid folding crash. */
3248 op1
= build_empty_stmt (loc
);
3250 op2
= build_empty_stmt (loc
);
3254 /* Otherwise, don't bother folding a void condition, since
3255 it can't produce a constant value. */
3256 if (op0
!= TREE_OPERAND (x
, 0)
3257 || op1
!= TREE_OPERAND (x
, 1)
3258 || op2
!= TREE_OPERAND (x
, 2))
3259 x
= build3_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
);
3264 if (op0
!= TREE_OPERAND (x
, 0)
3265 || op1
!= TREE_OPERAND (x
, 1)
3266 || op2
!= TREE_OPERAND (x
, 2))
3268 if (op0
== error_mark_node
3269 || op1
== error_mark_node
3270 || op2
== error_mark_node
)
3271 x
= error_mark_node
;
3273 x
= fold_build3_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
);
3278 /* A COND_EXPR might have incompatible types in branches if one or both
3279 arms are bitfields. If folding exposed such a branch, fix it up. */
3280 if (TREE_CODE (x
) != code
3281 && x
!= error_mark_node
3282 && !useless_type_conversion_p (TREE_TYPE (org_x
), TREE_TYPE (x
)))
3283 x
= fold_convert (TREE_TYPE (org_x
), x
);
3289 tree callee
= get_callee_fndecl (x
);
3291 /* "Inline" calls to std::move/forward and other cast-like functions
3292 by simply folding them into a corresponding cast to their return
3293 type. This is cheaper than relying on the middle end to do so, and
3294 also means we avoid generating useless debug info for them at all.
3296 At this point the argument has already been converted into a
3297 reference, so it suffices to use a NOP_EXPR to express the
3299 if ((OPTION_SET_P (flag_fold_simple_inlines
)
3300 ? flag_fold_simple_inlines
3302 && call_expr_nargs (x
) == 1
3303 && decl_in_std_namespace_p (callee
)
3304 && DECL_NAME (callee
) != NULL_TREE
3305 && (id_equal (DECL_NAME (callee
), "move")
3306 || id_equal (DECL_NAME (callee
), "forward")
3307 || id_equal (DECL_NAME (callee
), "forward_like")
3308 || id_equal (DECL_NAME (callee
), "addressof")
3309 /* This addressof equivalent is used heavily in libstdc++. */
3310 || id_equal (DECL_NAME (callee
), "__addressof")
3311 || id_equal (DECL_NAME (callee
), "as_const")))
3313 r
= CALL_EXPR_ARG (x
, 0);
3314 /* Check that the return and argument types are sane before
3316 if (INDIRECT_TYPE_P (TREE_TYPE (x
))
3317 && INDIRECT_TYPE_P (TREE_TYPE (r
)))
3319 if (!same_type_p (TREE_TYPE (x
), TREE_TYPE (r
)))
3320 r
= build_nop (TREE_TYPE (x
), r
);
3321 x
= cp_fold (r
, flags
);
3326 int sv
= optimize
, nw
= sv
;
3328 /* Some built-in function calls will be evaluated at compile-time in
3329 fold (). Set optimize to 1 when folding __builtin_constant_p inside
3330 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
3331 if (callee
&& fndecl_built_in_p (callee
) && !optimize
3332 && DECL_IS_BUILTIN_CONSTANT_P (callee
)
3333 && current_function_decl
3334 && DECL_DECLARED_CONSTEXPR_P (current_function_decl
))
3337 if (callee
&& fndecl_built_in_p (callee
, BUILT_IN_FRONTEND
))
3339 iloc_sentinel
ils (EXPR_LOCATION (x
));
3340 switch (DECL_FE_FUNCTION_CODE (callee
))
3342 case CP_BUILT_IN_IS_CONSTANT_EVALUATED
:
3343 /* Defer folding __builtin_is_constant_evaluated unless
3344 we know this isn't a manifestly constant-evaluated
3346 if (flags
& ff_mce_false
)
3347 x
= boolean_false_node
;
3349 case CP_BUILT_IN_SOURCE_LOCATION
:
3350 x
= fold_builtin_source_location (x
);
3352 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER
:
3353 x
= fold_builtin_is_corresponding_member
3354 (EXPR_LOCATION (x
), call_expr_nargs (x
),
3355 &CALL_EXPR_ARG (x
, 0));
3357 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS
:
3358 x
= fold_builtin_is_pointer_inverconvertible_with_class
3359 (EXPR_LOCATION (x
), call_expr_nargs (x
),
3360 &CALL_EXPR_ARG (x
, 0));
3369 && fndecl_built_in_p (callee
, CP_BUILT_IN_SOURCE_LOCATION
,
3372 x
= fold_builtin_source_location (x
);
3376 bool changed
= false;
3377 int m
= call_expr_nargs (x
);
3378 for (int i
= 0; i
< m
; i
++)
3380 r
= cp_fold (CALL_EXPR_ARG (x
, i
), flags
);
3381 if (r
!= CALL_EXPR_ARG (x
, i
))
3383 if (r
== error_mark_node
)
3385 x
= error_mark_node
;
3390 CALL_EXPR_ARG (x
, i
) = r
;
3394 if (x
== error_mark_node
)
3401 if (TREE_CODE (r
) != CALL_EXPR
)
3403 x
= cp_fold (r
, flags
);
3409 /* Invoke maybe_constant_value for functions declared
3410 constexpr and not called with AGGR_INIT_EXPRs.
3412 Do constexpr expansion of expressions where the call itself is not
3413 constant, but the call followed by an INDIRECT_REF is. */
3414 if (callee
&& DECL_DECLARED_CONSTEXPR_P (callee
)
3417 mce_value manifestly_const_eval
= mce_unknown
;
3418 if (flags
& ff_mce_false
)
3419 /* Allow folding __builtin_is_constant_evaluated to false during
3420 constexpr evaluation of this call. */
3421 manifestly_const_eval
= mce_false
;
3422 r
= maybe_constant_value (x
, /*decl=*/NULL_TREE
,
3423 manifestly_const_eval
);
3427 if (TREE_CODE (r
) != CALL_EXPR
)
3429 if (DECL_CONSTRUCTOR_P (callee
))
3431 loc
= EXPR_LOCATION (x
);
3432 tree a
= CALL_EXPR_ARG (x
, 0);
3433 bool return_this
= targetm
.cxx
.cdtor_returns_this ();
3435 a
= cp_save_expr (a
);
3436 tree s
= build_fold_indirect_ref_loc (loc
, a
);
3437 r
= cp_build_init_expr (s
, r
);
3439 r
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (x
), r
,
3440 fold_convert_loc (loc
, TREE_TYPE (x
), a
));
3453 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (x
);
3454 vec
<constructor_elt
, va_gc
> *nelts
= NULL
;
3455 FOR_EACH_VEC_SAFE_ELT (elts
, i
, p
)
3457 tree op
= cp_fold (p
->value
, flags
);
3460 if (op
== error_mark_node
)
3462 x
= error_mark_node
;
3467 nelts
= elts
->copy ();
3468 (*nelts
)[i
].value
= op
;
3473 x
= build_constructor (TREE_TYPE (x
), nelts
);
3474 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x
)
3475 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x
);
3476 CONSTRUCTOR_MUTABLE_POISON (x
)
3477 = CONSTRUCTOR_MUTABLE_POISON (org_x
);
3479 if (VECTOR_TYPE_P (TREE_TYPE (x
)))
3485 bool changed
= false;
3486 int n
= TREE_VEC_LENGTH (x
);
3488 for (int i
= 0; i
< n
; i
++)
3490 tree op
= cp_fold (TREE_VEC_ELT (x
, i
), flags
);
3491 if (op
!= TREE_VEC_ELT (x
, i
))
3495 TREE_VEC_ELT (x
, i
) = op
;
3504 case ARRAY_RANGE_REF
:
3506 loc
= EXPR_LOCATION (x
);
3507 op0
= cp_fold (TREE_OPERAND (x
, 0), flags
);
3508 op1
= cp_fold (TREE_OPERAND (x
, 1), flags
);
3509 op2
= cp_fold (TREE_OPERAND (x
, 2), flags
);
3510 op3
= cp_fold (TREE_OPERAND (x
, 3), flags
);
3512 if (op0
!= TREE_OPERAND (x
, 0)
3513 || op1
!= TREE_OPERAND (x
, 1)
3514 || op2
!= TREE_OPERAND (x
, 2)
3515 || op3
!= TREE_OPERAND (x
, 3))
3517 if (op0
== error_mark_node
3518 || op1
== error_mark_node
3519 || op2
== error_mark_node
3520 || op3
== error_mark_node
)
3521 x
= error_mark_node
;
3524 x
= build4_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
, op3
);
3525 TREE_READONLY (x
) = TREE_READONLY (org_x
);
3526 TREE_SIDE_EFFECTS (x
) = TREE_SIDE_EFFECTS (org_x
);
3527 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
3535 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
3536 folding, evaluates to an invariant. In that case no need to wrap
3537 this folded tree with a SAVE_EXPR. */
3538 r
= cp_fold (TREE_OPERAND (x
, 0), flags
);
3539 if (tree_invariant_p (r
))
3544 x
= evaluate_requires_expr (x
);
3551 if (EXPR_P (x
) && TREE_CODE (x
) == code
)
3553 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
3554 copy_warning (x
, org_x
);
3557 if (!c
.evaluation_restricted_p ())
3559 fold_cache
->put (org_x
, x
);
3560 /* Prevent that we try to fold an already folded result again. */
3562 fold_cache
->put (x
, x
);
3568 /* Look up "hot", "cold", "likely" or "unlikely" in attribute list LIST. */
3571 lookup_hotness_attribute (tree list
)
3573 for (; list
; list
= TREE_CHAIN (list
))
3575 tree name
= get_attribute_name (list
);
3576 if ((is_attribute_p ("hot", name
)
3577 || is_attribute_p ("cold", name
)
3578 || is_attribute_p ("likely", name
)
3579 || is_attribute_p ("unlikely", name
))
3580 && is_attribute_namespace_p ("", list
))
3586 /* Remove "hot", "cold", "likely" and "unlikely" attributes from LIST. */
3589 remove_hotness_attribute (tree list
)
3591 for (tree
*p
= &list
; *p
; )
3594 tree name
= get_attribute_name (l
);
3595 if ((is_attribute_p ("hot", name
)
3596 || is_attribute_p ("cold", name
)
3597 || is_attribute_p ("likely", name
)
3598 || is_attribute_p ("unlikely", name
))
3599 && is_attribute_namespace_p ("", l
))
3601 *p
= TREE_CHAIN (l
);
3604 p
= &TREE_CHAIN (l
);
3609 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3613 process_stmt_hotness_attribute (tree std_attrs
, location_t attrs_loc
)
3615 if (std_attrs
== error_mark_node
)
3617 if (tree attr
= lookup_hotness_attribute (std_attrs
))
3619 tree name
= get_attribute_name (attr
);
3620 bool hot
= (is_attribute_p ("hot", name
)
3621 || is_attribute_p ("likely", name
));
3622 tree pred
= build_predict_expr (hot
? PRED_HOT_LABEL
: PRED_COLD_LABEL
,
3623 hot
? TAKEN
: NOT_TAKEN
);
3624 SET_EXPR_LOCATION (pred
, attrs_loc
);
3626 if (tree other
= lookup_hotness_attribute (TREE_CHAIN (attr
)))
3627 warning (OPT_Wattributes
, "ignoring attribute %qE after earlier %qE",
3628 get_attribute_name (other
), name
);
3629 std_attrs
= remove_hotness_attribute (std_attrs
);
3634 /* Build IFN_ASSUME internal call for assume condition ARG. */
3637 build_assume_call (location_t loc
, tree arg
)
3639 if (!processing_template_decl
)
3640 arg
= fold_build_cleanup_point_expr (TREE_TYPE (arg
), arg
);
3641 return build_call_expr_internal_loc (loc
, IFN_ASSUME
, void_type_node
,
3645 /* If [[assume (cond)]] appears on this statement, handle it. */
3648 process_stmt_assume_attribute (tree std_attrs
, tree statement
,
3649 location_t attrs_loc
)
3651 if (std_attrs
== error_mark_node
)
3653 tree attr
= lookup_attribute ("gnu", "assume", std_attrs
);
3656 /* The next token after the assume attribute is not ';'. */
3659 warning_at (attrs_loc
, OPT_Wattributes
,
3660 "%<assume%> attribute not followed by %<;%>");
3663 for (; attr
; attr
= lookup_attribute ("gnu", "assume", TREE_CHAIN (attr
)))
3665 tree args
= TREE_VALUE (attr
);
3666 if (args
&& PACK_EXPANSION_P (args
))
3668 auto_diagnostic_group d
;
3669 error_at (attrs_loc
, "pack expansion of %qE attribute",
3670 get_attribute_name (attr
));
3671 if (cxx_dialect
>= cxx17
)
3672 inform (attrs_loc
, "use fold expression in the attribute "
3673 "argument instead");
3676 int nargs
= list_length (args
);
3679 auto_diagnostic_group d
;
3680 error_at (attrs_loc
, "wrong number of arguments specified for "
3681 "%qE attribute", get_attribute_name (attr
));
3682 inform (attrs_loc
, "expected %i, found %i", 1, nargs
);
3686 tree arg
= TREE_VALUE (args
);
3687 if (!type_dependent_expression_p (arg
))
3688 arg
= contextual_conv_bool (arg
, tf_warning_or_error
);
3689 if (error_operand_p (arg
))
3691 finish_expr_stmt (build_assume_call (attrs_loc
, arg
));
3694 return remove_attribute ("gnu", "assume", std_attrs
);
3697 /* Return the type std::source_location::__impl after performing
3698 verification on it. */
3701 get_source_location_impl_type ()
3703 tree name
= get_identifier ("source_location");
3704 tree decl
= lookup_qualified_name (std_node
, name
);
3705 if (TREE_CODE (decl
) != TYPE_DECL
)
3707 auto_diagnostic_group d
;
3708 if (decl
== error_mark_node
|| TREE_CODE (decl
) == TREE_LIST
)
3709 qualified_name_lookup_error (std_node
, name
, decl
, input_location
);
3711 error ("%qD is not a type", decl
);
3712 return error_mark_node
;
3714 name
= get_identifier ("__impl");
3715 tree type
= TREE_TYPE (decl
);
3716 decl
= lookup_qualified_name (type
, name
);
3717 if (TREE_CODE (decl
) != TYPE_DECL
)
3719 auto_diagnostic_group d
;
3720 if (decl
== error_mark_node
|| TREE_CODE (decl
) == TREE_LIST
)
3721 qualified_name_lookup_error (type
, name
, decl
, input_location
);
3723 error ("%qD is not a type", decl
);
3724 return error_mark_node
;
3726 type
= TREE_TYPE (decl
);
3727 if (TREE_CODE (type
) != RECORD_TYPE
)
3729 error ("%qD is not a class type", decl
);
3730 return error_mark_node
;
3734 for (tree field
= TYPE_FIELDS (type
);
3735 (field
= next_aggregate_field (field
)) != NULL_TREE
;
3736 field
= DECL_CHAIN (field
))
3738 if (DECL_NAME (field
) != NULL_TREE
)
3740 const char *n
= IDENTIFIER_POINTER (DECL_NAME (field
));
3741 if (strcmp (n
, "_M_file_name") == 0
3742 || strcmp (n
, "_M_function_name") == 0)
3744 if (TREE_TYPE (field
) != const_string_type_node
)
3746 error ("%qD does not have %<const char *%> type", field
);
3747 return error_mark_node
;
3752 else if (strcmp (n
, "_M_line") == 0 || strcmp (n
, "_M_column") == 0)
3754 if (TREE_CODE (TREE_TYPE (field
)) != INTEGER_TYPE
)
3756 error ("%qD does not have integral type", field
);
3757 return error_mark_node
;
3768 error ("%<std::source_location::__impl%> does not contain only "
3769 "non-static data members %<_M_file_name%>, "
3770 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3771 return error_mark_node
;
3773 return build_qualified_type (type
, TYPE_QUAL_CONST
);
3776 /* Type for source_location_table hash_set. */
3777 struct GTY((for_user
)) source_location_table_entry
{
3783 /* Traits class for function start hash maps below. */
3785 struct source_location_table_entry_hash
3786 : ggc_remove
<source_location_table_entry
>
3788 typedef source_location_table_entry value_type
;
3789 typedef source_location_table_entry compare_type
;
3792 hash (const source_location_table_entry
&ref
)
3794 inchash::hash
hstate (0);
3795 hstate
.add_int (ref
.loc
);
3796 hstate
.add_int (ref
.uid
);
3797 return hstate
.end ();
3801 equal (const source_location_table_entry
&ref1
,
3802 const source_location_table_entry
&ref2
)
3804 return ref1
.loc
== ref2
.loc
&& ref1
.uid
== ref2
.uid
;
3808 mark_deleted (source_location_table_entry
&ref
)
3810 ref
.loc
= UNKNOWN_LOCATION
;
3812 ref
.var
= NULL_TREE
;
3815 static const bool empty_zero_p
= true;
3818 mark_empty (source_location_table_entry
&ref
)
3820 ref
.loc
= UNKNOWN_LOCATION
;
3822 ref
.var
= NULL_TREE
;
3826 is_deleted (const source_location_table_entry
&ref
)
3828 return (ref
.loc
== UNKNOWN_LOCATION
3830 && ref
.var
== NULL_TREE
);
3834 is_empty (const source_location_table_entry
&ref
)
3836 return (ref
.loc
== UNKNOWN_LOCATION
3838 && ref
.var
== NULL_TREE
);
3842 pch_nx (source_location_table_entry
&p
)
3844 extern void gt_pch_nx (source_location_table_entry
&);
3849 pch_nx (source_location_table_entry
&p
, gt_pointer_operator op
, void *cookie
)
3851 extern void gt_pch_nx (source_location_table_entry
*, gt_pointer_operator
,
3853 gt_pch_nx (&p
, op
, cookie
);
3857 static GTY(()) hash_table
<source_location_table_entry_hash
>
3858 *source_location_table
;
3859 static GTY(()) unsigned int source_location_id
;
3861 /* Fold the __builtin_source_location () call T. */
3864 fold_builtin_source_location (const_tree t
)
3866 gcc_assert (TREE_CODE (t
) == CALL_EXPR
);
3867 /* TREE_TYPE (t) is const std::source_location::__impl* */
3868 tree source_location_impl
= TREE_TYPE (TREE_TYPE (t
));
3869 if (source_location_impl
== error_mark_node
)
3870 return build_zero_cst (const_ptr_type_node
);
3871 gcc_assert (CLASS_TYPE_P (source_location_impl
)
3872 && id_equal (TYPE_IDENTIFIER (source_location_impl
), "__impl"));
3874 location_t loc
= EXPR_LOCATION (t
);
3875 if (source_location_table
== NULL
)
3876 source_location_table
3877 = hash_table
<source_location_table_entry_hash
>::create_ggc (64);
3878 const line_map_ordinary
*map
;
3879 source_location_table_entry entry
;
3881 = linemap_resolve_location (line_table
, loc
, LRK_MACRO_EXPANSION_POINT
,
3883 entry
.uid
= current_function_decl
? DECL_UID (current_function_decl
) : -1;
3884 entry
.var
= error_mark_node
;
3885 source_location_table_entry
*entryp
3886 = source_location_table
->find_slot (entry
, INSERT
);
3893 ASM_GENERATE_INTERNAL_LABEL (tmp_name
, "Lsrc_loc", source_location_id
++);
3894 var
= build_decl (loc
, VAR_DECL
, get_identifier (tmp_name
),
3895 source_location_impl
);
3896 TREE_STATIC (var
) = 1;
3897 TREE_PUBLIC (var
) = 0;
3898 DECL_ARTIFICIAL (var
) = 1;
3899 DECL_IGNORED_P (var
) = 1;
3900 DECL_EXTERNAL (var
) = 0;
3901 DECL_DECLARED_CONSTEXPR_P (var
) = 1;
3902 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var
) = 1;
3903 layout_decl (var
, 0);
3905 vec
<constructor_elt
, va_gc
> *v
= NULL
;
3907 for (tree field
= TYPE_FIELDS (source_location_impl
);
3908 (field
= next_aggregate_field (field
)) != NULL_TREE
;
3909 field
= DECL_CHAIN (field
))
3911 const char *n
= IDENTIFIER_POINTER (DECL_NAME (field
));
3912 tree val
= NULL_TREE
;
3913 if (strcmp (n
, "_M_file_name") == 0)
3915 if (const char *fname
= LOCATION_FILE (loc
))
3917 fname
= remap_macro_filename (fname
);
3918 val
= build_string_literal (fname
);
3921 val
= build_string_literal ("");
3923 else if (strcmp (n
, "_M_function_name") == 0)
3925 const char *name
= "";
3927 if (current_function_decl
)
3929 /* If this is a coroutine, we should get the name of the user
3930 function rather than the actor we generate. */
3931 if (tree ramp
= DECL_RAMP_FN (current_function_decl
))
3932 name
= cxx_printable_name (ramp
, 2);
3934 name
= cxx_printable_name (current_function_decl
, 2);
3937 val
= build_string_literal (name
);
3939 else if (strcmp (n
, "_M_line") == 0)
3940 val
= build_int_cst (TREE_TYPE (field
), LOCATION_LINE (loc
));
3941 else if (strcmp (n
, "_M_column") == 0)
3942 val
= build_int_cst (TREE_TYPE (field
), LOCATION_COLUMN (loc
));
3945 CONSTRUCTOR_APPEND_ELT (v
, field
, val
);
3948 tree ctor
= build_constructor (source_location_impl
, v
);
3949 TREE_CONSTANT (ctor
) = 1;
3950 TREE_STATIC (ctor
) = 1;
3951 DECL_INITIAL (var
) = ctor
;
3952 varpool_node::finalize_decl (var
);
3957 return build_fold_addr_expr_with_type_loc (loc
, var
, TREE_TYPE (t
));
3960 #include "gt-cp-cp-gimplify.h"