libcpp, c, middle-end: Optimize initializers using #embed in C
[official-gcc.git] / gcc / tree-inline.cc
blob037fd1e946ae1e2bc6c99760a98daa6d69412d1c
1 /* Tree inlining.
2 Copyright (C) 2001-2024 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-iterator.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "value-prof.h"
57 #include "cfgloop.h"
58 #include "builtins.h"
59 #include "stringpool.h"
60 #include "attribs.h"
61 #include "sreal.h"
62 #include "tree-cfgcleanup.h"
63 #include "tree-ssa-live.h"
64 #include "alloc-pool.h"
65 #include "symbol-summary.h"
66 #include "symtab-thunks.h"
67 #include "symtab-clones.h"
68 #include "asan.h"
70 /* I'm not real happy about this, but we need to handle gimple and
71 non-gimple trees. */
73 /* Inlining, Cloning, Versioning, Parallelization
75 Inlining: a function body is duplicated, but the PARM_DECLs are
76 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
77 MODIFY_EXPRs that store to a dedicated returned-value variable.
78 The duplicated eh_region info of the copy will later be appended
79 to the info for the caller; the eh_region info in copied throwing
80 statements and RESX statements are adjusted accordingly.
82 Cloning: (only in C++) We have one body for a con/de/structor, and
83 multiple function decls, each with a unique parameter list.
84 Duplicate the body, using the given splay tree; some parameters
85 will become constants (like 0 or 1).
87 Versioning: a function body is duplicated and the result is a new
88 function rather than into blocks of an existing function as with
89 inlining. Some parameters will become constants.
91 Parallelization: a region of a function is duplicated resulting in
92 a new function. Variables may be replaced with complex expressions
93 to enable shared variable semantics.
95 All of these will simultaneously lookup any callgraph edges. If
96 we're going to inline the duplicated function body, and the given
97 function has some cloned callgraph nodes (one for each place this
98 function will be inlined) those callgraph edges will be duplicated.
99 If we're cloning the body, those callgraph edges will be
100 updated to point into the new body. (Note that the original
101 callgraph node and edge list will not be altered.)
103 See the CALL_EXPR handling case in copy_tree_body_r (). */
105 /* To Do:
107 o In order to make inlining-on-trees work, we pessimized
108 function-local static constants. In particular, they are now
109 always output, even when not addressed. Fix this by treating
110 function-local static constants just like global static
111 constants; the back-end already knows not to output them if they
112 are not needed.
114 o Provide heuristics to clamp inlining of recursive template
115 calls? */
118 /* Weights that estimate_num_insns uses to estimate the size of the
119 produced code. */
121 eni_weights eni_size_weights;
123 /* Weights that estimate_num_insns uses to estimate the time necessary
124 to execute the produced code. */
126 eni_weights eni_time_weights;
128 /* Prototypes. */
130 static tree declare_return_variable (copy_body_data *, tree, tree,
131 basic_block);
132 static void remap_block (tree *, copy_body_data *);
133 static void copy_bind_expr (tree *, int *, copy_body_data *);
134 static void declare_inline_vars (tree, tree);
135 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
136 static void prepend_lexical_block (tree current_block, tree new_block);
137 static tree copy_result_decl_to_var (tree, copy_body_data *);
138 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
139 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
140 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
142 /* Insert a tree->tree mapping for ID. Despite the name suggests
143 that the trees should be variables, it is used for more than that. */
145 void
146 insert_decl_map (copy_body_data *id, tree key, tree value)
148 id->decl_map->put (key, value);
150 /* Always insert an identity map as well. If we see this same new
151 node again, we won't want to duplicate it a second time. */
152 if (key != value && value)
153 id->decl_map->put (value, value);
156 /* If nonzero, we're remapping the contents of inlined debug
157 statements. If negative, an error has occurred, such as a
158 reference to a variable that isn't available in the inlined
159 context. */
160 static int processing_debug_stmt = 0;
162 /* Construct new SSA name for old NAME. ID is the inline context. */
164 static tree
165 remap_ssa_name (tree name, copy_body_data *id)
167 tree new_tree, var;
168 tree *n;
170 gcc_assert (TREE_CODE (name) == SSA_NAME);
172 n = id->decl_map->get (name);
173 if (n)
175 /* When we perform edge redirection as part of CFG copy, IPA-SRA can
176 remove an unused LHS from a call statement. Such LHS can however
177 still appear in debug statements, but their value is lost in this
178 function and we do not want to map them. */
179 if (id->killed_new_ssa_names
180 && id->killed_new_ssa_names->contains (*n))
182 gcc_assert (processing_debug_stmt);
183 processing_debug_stmt = -1;
184 return name;
187 return unshare_expr (*n);
190 if (processing_debug_stmt)
192 if (SSA_NAME_IS_DEFAULT_DEF (name)
193 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
194 && id->entry_bb == NULL
195 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
197 gimple *def_temp;
198 gimple_stmt_iterator gsi;
199 tree val = SSA_NAME_VAR (name);
201 n = id->decl_map->get (val);
202 if (n != NULL)
203 val = *n;
204 if (TREE_CODE (val) != PARM_DECL
205 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
207 processing_debug_stmt = -1;
208 return name;
210 n = id->decl_map->get (val);
211 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
212 return *n;
213 tree vexpr = build_debug_expr_decl (TREE_TYPE (name));
214 /* FIXME: Is setting the mode really necessary? */
215 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
216 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
217 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
218 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
219 insert_decl_map (id, val, vexpr);
220 return vexpr;
223 processing_debug_stmt = -1;
224 return name;
227 /* Remap anonymous SSA names or SSA names of anonymous decls. */
228 var = SSA_NAME_VAR (name);
229 if (!var
230 || (!SSA_NAME_IS_DEFAULT_DEF (name)
231 && VAR_P (var)
232 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
233 && DECL_ARTIFICIAL (var)
234 && DECL_IGNORED_P (var)
235 && !DECL_NAME (var)))
237 struct ptr_info_def *pi;
238 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
239 if (!var && SSA_NAME_IDENTIFIER (name))
240 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
241 insert_decl_map (id, name, new_tree);
242 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
243 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
244 /* At least IPA points-to info can be directly transferred. */
245 if (id->src_cfun->gimple_df
246 && id->src_cfun->gimple_df->ipa_pta
247 && POINTER_TYPE_P (TREE_TYPE (name))
248 && (pi = SSA_NAME_PTR_INFO (name))
249 && !pi->pt.anything)
251 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
252 new_pi->pt = pi->pt;
254 /* So can range-info. */
255 if (!POINTER_TYPE_P (TREE_TYPE (name))
256 && SSA_NAME_RANGE_INFO (name))
257 duplicate_ssa_name_range_info (new_tree, name);
258 return new_tree;
261 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
262 in copy_bb. */
263 new_tree = remap_decl (var, id);
265 /* We might've substituted constant or another SSA_NAME for
266 the variable.
268 Replace the SSA name representing RESULT_DECL by variable during
269 inlining: this saves us from need to introduce PHI node in a case
270 return value is just partly initialized. */
271 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
272 && (!SSA_NAME_VAR (name)
273 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
274 || !id->transform_return_to_modify))
276 struct ptr_info_def *pi;
277 new_tree = make_ssa_name (new_tree);
278 insert_decl_map (id, name, new_tree);
279 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
280 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
281 /* At least IPA points-to info can be directly transferred. */
282 if (id->src_cfun->gimple_df
283 && id->src_cfun->gimple_df->ipa_pta
284 && POINTER_TYPE_P (TREE_TYPE (name))
285 && (pi = SSA_NAME_PTR_INFO (name))
286 && !pi->pt.anything)
288 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
289 new_pi->pt = pi->pt;
291 /* So can range-info. */
292 if (!POINTER_TYPE_P (TREE_TYPE (name))
293 && SSA_NAME_RANGE_INFO (name))
294 duplicate_ssa_name_range_info (new_tree, name);
295 if (SSA_NAME_IS_DEFAULT_DEF (name))
297 /* By inlining function having uninitialized variable, we might
298 extend the lifetime (variable might get reused). This cause
299 ICE in the case we end up extending lifetime of SSA name across
300 abnormal edge, but also increase register pressure.
302 We simply initialize all uninitialized vars by 0 except
303 for case we are inlining to very first BB. We can avoid
304 this for all BBs that are not inside strongly connected
305 regions of the CFG, but this is expensive to test. */
306 if (id->entry_bb
307 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
308 && (!SSA_NAME_VAR (name)
309 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
310 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
311 0)->dest
312 || EDGE_COUNT (id->entry_bb->preds) != 1))
314 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
315 gimple *init_stmt;
316 tree zero = build_zero_cst (TREE_TYPE (new_tree));
318 init_stmt = gimple_build_assign (new_tree, zero);
319 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
320 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
322 else
324 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
325 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
329 else
330 insert_decl_map (id, name, new_tree);
331 return new_tree;
334 /* Remap DECL during the copying of the BLOCK tree for the function. */
336 tree
337 remap_decl (tree decl, copy_body_data *id)
339 tree *n;
341 /* We only remap local variables in the current function. */
343 /* See if we have remapped this declaration. */
345 n = id->decl_map->get (decl);
347 if (!n && processing_debug_stmt)
349 processing_debug_stmt = -1;
350 return decl;
353 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
354 necessary DECLs have already been remapped and we do not want to duplicate
355 a decl coming from outside of the sequence we are copying. */
356 if (!n
357 && id->prevent_decl_creation_for_types
358 && id->remapping_type_depth > 0
359 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
360 return decl;
362 /* If we didn't already have an equivalent for this declaration, create one
363 now. */
364 if (!n)
366 /* Make a copy of the variable or label. */
367 tree t = id->copy_decl (decl, id);
369 /* Remember it, so that if we encounter this local entity again
370 we can reuse this copy. Do this early because remap_type may
371 need this decl for TYPE_STUB_DECL. */
372 insert_decl_map (id, decl, t);
374 if (!DECL_P (t) || t == decl)
375 return t;
377 /* Remap types, if necessary. */
378 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
379 if (TREE_CODE (t) == TYPE_DECL)
381 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
383 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
384 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
385 is not set on the TYPE_DECL, for example in LTO mode. */
386 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
388 tree x = build_variant_type_copy (TREE_TYPE (t));
389 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
390 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
391 DECL_ORIGINAL_TYPE (t) = x;
395 /* Remap sizes as necessary. */
396 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
397 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
399 /* If fields, do likewise for offset and qualifier. */
400 if (TREE_CODE (t) == FIELD_DECL)
402 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
403 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
404 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
407 return t;
410 if (id->do_not_unshare)
411 return *n;
412 else
413 return unshare_expr (*n);
416 static tree
417 remap_type_1 (tree type, copy_body_data *id)
419 tree new_tree, t;
421 /* We do need a copy. build and register it now. If this is a pointer or
422 reference type, remap the designated type and make a new pointer or
423 reference type. */
424 if (TREE_CODE (type) == POINTER_TYPE)
426 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
427 TYPE_MODE (type),
428 TYPE_REF_CAN_ALIAS_ALL (type));
429 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
430 new_tree = build_type_attribute_qual_variant (new_tree,
431 TYPE_ATTRIBUTES (type),
432 TYPE_QUALS (type));
433 insert_decl_map (id, type, new_tree);
434 return new_tree;
436 else if (TREE_CODE (type) == REFERENCE_TYPE)
438 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
439 TYPE_MODE (type),
440 TYPE_REF_CAN_ALIAS_ALL (type));
441 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
442 new_tree = build_type_attribute_qual_variant (new_tree,
443 TYPE_ATTRIBUTES (type),
444 TYPE_QUALS (type));
445 insert_decl_map (id, type, new_tree);
446 return new_tree;
448 else
449 new_tree = copy_node (type);
451 insert_decl_map (id, type, new_tree);
453 /* This is a new type, not a copy of an old type. Need to reassociate
454 variants. We can handle everything except the main variant lazily. */
455 t = TYPE_MAIN_VARIANT (type);
456 if (type != t)
458 t = remap_type (t, id);
459 TYPE_MAIN_VARIANT (new_tree) = t;
460 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
461 TYPE_NEXT_VARIANT (t) = new_tree;
463 else
465 TYPE_MAIN_VARIANT (new_tree) = new_tree;
466 TYPE_NEXT_VARIANT (new_tree) = NULL;
469 if (TYPE_STUB_DECL (type))
470 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
472 /* Lazily create pointer and reference types. */
473 TYPE_POINTER_TO (new_tree) = NULL;
474 TYPE_REFERENCE_TO (new_tree) = NULL;
476 /* Copy all types that may contain references to local variables; be sure to
477 preserve sharing in between type and its main variant when possible. */
478 switch (TREE_CODE (new_tree))
480 case INTEGER_TYPE:
481 case REAL_TYPE:
482 case FIXED_POINT_TYPE:
483 case ENUMERAL_TYPE:
484 case BOOLEAN_TYPE:
485 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
487 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
488 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
490 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
491 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
493 else
495 t = TYPE_MIN_VALUE (new_tree);
496 if (t && TREE_CODE (t) != INTEGER_CST)
497 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
499 t = TYPE_MAX_VALUE (new_tree);
500 if (t && TREE_CODE (t) != INTEGER_CST)
501 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
503 return new_tree;
505 case FUNCTION_TYPE:
506 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
507 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
508 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
509 else
510 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
511 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
512 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
513 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
514 else
515 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
516 return new_tree;
518 case ARRAY_TYPE:
519 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
520 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
521 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
522 else
523 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
525 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
527 gcc_checking_assert (TYPE_DOMAIN (type)
528 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
529 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
531 else
533 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
534 /* For array bounds where we have decided not to copy over the bounds
535 variable which isn't used in OpenMP/OpenACC region, change them to
536 an uninitialized VAR_DECL temporary. */
537 if (id->adjust_array_error_bounds
538 && TYPE_DOMAIN (new_tree)
539 && TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
540 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
542 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
543 DECL_ATTRIBUTES (v)
544 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
545 DECL_ATTRIBUTES (v));
546 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
549 break;
551 case RECORD_TYPE:
552 case UNION_TYPE:
553 case QUAL_UNION_TYPE:
554 if (TYPE_MAIN_VARIANT (type) != type
555 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
556 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
557 else
559 tree f, nf = NULL;
561 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
563 t = remap_decl (f, id);
564 DECL_CONTEXT (t) = new_tree;
565 DECL_CHAIN (t) = nf;
566 nf = t;
568 TYPE_FIELDS (new_tree) = nreverse (nf);
570 break;
572 case OFFSET_TYPE:
573 default:
574 /* Shouldn't have been thought variable sized. */
575 gcc_unreachable ();
578 /* All variants of type share the same size, so use the already remaped data. */
579 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
581 tree s = TYPE_SIZE (type);
582 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
583 tree su = TYPE_SIZE_UNIT (type);
584 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
585 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
586 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
587 || s == mvs);
588 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
589 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
590 || su == mvsu);
591 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
592 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
594 else
596 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
597 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
600 return new_tree;
603 /* Helper function for remap_type_2, called through walk_tree. */
605 static tree
606 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
608 copy_body_data *id = (copy_body_data *) data;
610 if (TYPE_P (*tp))
611 *walk_subtrees = 0;
613 else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
614 return *tp;
616 return NULL_TREE;
619 /* Return true if TYPE needs to be remapped because remap_decl on any
620 needed embedded decl returns something other than that decl. */
622 static bool
623 remap_type_2 (tree type, copy_body_data *id)
625 tree t;
627 #define RETURN_TRUE_IF_VAR(T) \
628 do \
630 tree _t = (T); \
631 if (_t) \
633 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
634 return true; \
635 if (!TYPE_SIZES_GIMPLIFIED (type) \
636 && walk_tree (&_t, remap_type_3, id, NULL)) \
637 return true; \
640 while (0)
642 switch (TREE_CODE (type))
644 case POINTER_TYPE:
645 case REFERENCE_TYPE:
646 case FUNCTION_TYPE:
647 case METHOD_TYPE:
648 return remap_type_2 (TREE_TYPE (type), id);
650 case INTEGER_TYPE:
651 case REAL_TYPE:
652 case FIXED_POINT_TYPE:
653 case ENUMERAL_TYPE:
654 case BOOLEAN_TYPE:
655 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
656 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
657 return false;
659 case ARRAY_TYPE:
660 if (remap_type_2 (TREE_TYPE (type), id)
661 || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
662 return true;
663 break;
665 case RECORD_TYPE:
666 case UNION_TYPE:
667 case QUAL_UNION_TYPE:
668 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
669 if (TREE_CODE (t) == FIELD_DECL)
671 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
672 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
673 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
674 if (TREE_CODE (type) == QUAL_UNION_TYPE)
675 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
677 break;
679 default:
680 return false;
683 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
684 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
685 return false;
686 #undef RETURN_TRUE_IF_VAR
689 tree
690 remap_type (tree type, copy_body_data *id)
692 tree *node;
693 tree tmp;
695 if (type == NULL)
696 return type;
698 /* See if we have remapped this type. */
699 node = id->decl_map->get (type);
700 if (node)
701 return *node;
703 /* The type only needs remapping if it's variably modified. */
704 if (! variably_modified_type_p (type, id->src_fn)
705 /* Don't remap if copy_decl method doesn't always return a new
706 decl and for all embedded decls returns the passed in decl. */
707 || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
709 insert_decl_map (id, type, type);
710 return type;
713 id->remapping_type_depth++;
714 tmp = remap_type_1 (type, id);
715 id->remapping_type_depth--;
717 return tmp;
720 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
722 static bool
723 can_be_nonlocal (tree decl, copy_body_data *id)
725 /* We cannot duplicate function decls. */
726 if (TREE_CODE (decl) == FUNCTION_DECL)
727 return true;
729 /* Local static vars must be non-local or we get multiple declaration
730 problems. */
731 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
732 return true;
734 return false;
737 static tree
738 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
739 copy_body_data *id)
741 tree old_var;
742 tree new_decls = NULL_TREE;
744 /* Remap its variables. */
745 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
747 tree new_var;
749 if (can_be_nonlocal (old_var, id))
751 /* We need to add this variable to the local decls as otherwise
752 nothing else will do so. */
753 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
754 add_local_decl (cfun, old_var);
755 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
756 && !DECL_IGNORED_P (old_var)
757 && nonlocalized_list)
758 vec_safe_push (*nonlocalized_list, old_var);
759 continue;
762 /* Remap the variable. */
763 new_var = remap_decl (old_var, id);
765 /* If we didn't remap this variable, we can't mess with its
766 TREE_CHAIN. If we remapped this variable to the return slot, it's
767 already declared somewhere else, so don't declare it here. */
769 if (new_var == old_var || new_var == id->retvar)
771 else if (!new_var)
773 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
774 && !DECL_IGNORED_P (old_var)
775 && nonlocalized_list)
776 vec_safe_push (*nonlocalized_list, old_var);
778 else
780 gcc_assert (DECL_P (new_var));
781 DECL_CHAIN (new_var) = new_decls;
782 new_decls = new_var;
784 /* Also copy value-expressions. */
785 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
787 tree tem = DECL_VALUE_EXPR (new_var);
788 bool old_regimplify = id->regimplify;
789 id->remapping_type_depth++;
790 walk_tree (&tem, copy_tree_body_r, id, NULL);
791 id->remapping_type_depth--;
792 id->regimplify = old_regimplify;
793 SET_DECL_VALUE_EXPR (new_var, tem);
798 return nreverse (new_decls);
801 /* Copy the BLOCK to contain remapped versions of the variables
802 therein. And hook the new block into the block-tree. */
804 static void
805 remap_block (tree *block, copy_body_data *id)
807 tree old_block;
808 tree new_block;
810 /* Make the new block. */
811 old_block = *block;
812 new_block = make_node (BLOCK);
813 TREE_USED (new_block) = TREE_USED (old_block);
814 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
815 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
816 BLOCK_NONLOCALIZED_VARS (new_block)
817 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
818 *block = new_block;
820 /* Remap its variables. */
821 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
822 &BLOCK_NONLOCALIZED_VARS (new_block),
823 id);
825 /* Remember the remapped block. */
826 insert_decl_map (id, old_block, new_block);
829 /* Copy the whole block tree and root it in id->block. */
831 static tree
832 remap_blocks (tree block, copy_body_data *id)
834 tree t;
835 tree new_tree = block;
837 if (!block)
838 return NULL;
840 remap_block (&new_tree, id);
841 gcc_assert (new_tree != block);
842 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
843 prepend_lexical_block (new_tree, remap_blocks (t, id));
844 /* Blocks are in arbitrary order, but make things slightly prettier and do
845 not swap order when producing a copy. */
846 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
847 return new_tree;
850 /* Remap the block tree rooted at BLOCK to nothing. */
852 static void
853 remap_blocks_to_null (tree block, copy_body_data *id)
855 tree t;
856 insert_decl_map (id, block, NULL_TREE);
857 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
858 remap_blocks_to_null (t, id);
861 /* Remap the location info pointed to by LOCUS. */
863 static location_t
864 remap_location (location_t locus, copy_body_data *id)
866 if (LOCATION_BLOCK (locus))
868 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
869 gcc_assert (n);
870 if (*n)
871 return set_block (locus, *n);
874 locus = LOCATION_LOCUS (locus);
876 if (locus != UNKNOWN_LOCATION && id->block)
877 return set_block (locus, id->block);
879 return locus;
882 static void
883 copy_statement_list (tree *tp)
885 tree_stmt_iterator oi, ni;
886 tree new_tree;
888 new_tree = alloc_stmt_list ();
889 ni = tsi_start (new_tree);
890 oi = tsi_start (*tp);
891 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
892 *tp = new_tree;
894 for (; !tsi_end_p (oi); tsi_next (&oi))
896 tree stmt = tsi_stmt (oi);
897 if (TREE_CODE (stmt) == STATEMENT_LIST)
898 /* This copy is not redundant; tsi_link_after will smash this
899 STATEMENT_LIST into the end of the one we're building, and we
900 don't want to do that with the original. */
901 copy_statement_list (&stmt);
902 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
906 static void
907 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
909 tree block = BIND_EXPR_BLOCK (*tp);
910 /* Copy (and replace) the statement. */
911 copy_tree_r (tp, walk_subtrees, NULL);
912 if (block)
914 remap_block (&block, id);
915 BIND_EXPR_BLOCK (*tp) = block;
918 if (BIND_EXPR_VARS (*tp))
919 /* This will remap a lot of the same decls again, but this should be
920 harmless. */
921 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
925 /* Create a new gimple_seq by remapping all the statements in BODY
926 using the inlining information in ID. */
928 static gimple_seq
929 remap_gimple_seq (gimple_seq body, copy_body_data *id)
931 gimple_stmt_iterator si;
932 gimple_seq new_body = NULL;
934 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
936 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
937 gimple_seq_add_seq (&new_body, new_stmts);
940 return new_body;
944 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
945 block using the mapping information in ID. */
947 static gimple *
948 copy_gimple_bind (gbind *stmt, copy_body_data *id)
950 gimple *new_bind;
951 tree new_block, new_vars;
952 gimple_seq body, new_body;
954 /* Copy the statement. Note that we purposely don't use copy_stmt
955 here because we need to remap statements as we copy. */
956 body = gimple_bind_body (stmt);
957 new_body = remap_gimple_seq (body, id);
959 new_block = gimple_bind_block (stmt);
960 if (new_block)
961 remap_block (&new_block, id);
963 /* This will remap a lot of the same decls again, but this should be
964 harmless. */
965 new_vars = gimple_bind_vars (stmt);
966 if (new_vars)
967 new_vars = remap_decls (new_vars, NULL, id);
969 new_bind = gimple_build_bind (new_vars, new_body, new_block);
971 return new_bind;
974 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
976 static bool
977 is_parm (tree decl)
979 if (TREE_CODE (decl) == SSA_NAME)
981 decl = SSA_NAME_VAR (decl);
982 if (!decl)
983 return false;
986 return (TREE_CODE (decl) == PARM_DECL);
989 /* Remap the dependence CLIQUE from the source to the destination function
990 as specified in ID. */
992 static unsigned short
993 remap_dependence_clique (copy_body_data *id, unsigned short clique)
995 if (clique == 0 || processing_debug_stmt)
996 return 0;
997 if (!id->dependence_map)
998 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
999 bool existed;
1000 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1001 if (!existed)
1003 /* Clique 1 is reserved for local ones set by PTA. */
1004 if (cfun->last_clique == 0)
1005 cfun->last_clique = 1;
1006 newc = get_new_clique (cfun);
1008 return newc;
1011 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1012 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1013 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1014 recursing into the children nodes of *TP. */
1016 static tree
1017 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1019 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1020 copy_body_data *id = (copy_body_data *) wi_p->info;
1021 tree fn = id->src_fn;
1023 /* For recursive invocations this is no longer the LHS itself. */
1024 bool is_lhs = wi_p->is_lhs;
1025 wi_p->is_lhs = false;
1027 if (TREE_CODE (*tp) == SSA_NAME)
1029 *tp = remap_ssa_name (*tp, id);
1030 *walk_subtrees = 0;
1031 if (is_lhs)
1032 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1033 return NULL;
1035 else if (auto_var_in_fn_p (*tp, fn))
1037 /* Local variables and labels need to be replaced by equivalent
1038 variables. We don't want to copy static variables; there's
1039 only one of those, no matter how many times we inline the
1040 containing function. Similarly for globals from an outer
1041 function. */
1042 tree new_decl;
1044 /* Remap the declaration. */
1045 new_decl = remap_decl (*tp, id);
1046 gcc_assert (new_decl);
1047 /* Replace this variable with the copy. */
1048 STRIP_TYPE_NOPS (new_decl);
1049 /* ??? The C++ frontend uses void * pointer zero to initialize
1050 any other type. This confuses the middle-end type verification.
1051 As cloned bodies do not go through gimplification again the fixup
1052 there doesn't trigger. */
1053 if (TREE_CODE (new_decl) == INTEGER_CST
1054 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1055 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1056 *tp = new_decl;
1057 *walk_subtrees = 0;
1059 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1060 gcc_unreachable ();
1061 else if (TREE_CODE (*tp) == SAVE_EXPR)
1062 gcc_unreachable ();
1063 else if (TREE_CODE (*tp) == LABEL_DECL
1064 && (!DECL_CONTEXT (*tp)
1065 || decl_function_context (*tp) == id->src_fn))
1066 /* These may need to be remapped for EH handling. */
1067 *tp = remap_decl (*tp, id);
1068 else if (TREE_CODE (*tp) == FIELD_DECL)
1070 /* If the enclosing record type is variably_modified_type_p, the field
1071 has already been remapped. Otherwise, it need not be. */
1072 tree *n = id->decl_map->get (*tp);
1073 if (n)
1074 *tp = *n;
1075 *walk_subtrees = 0;
1077 else if (TYPE_P (*tp))
1078 /* Types may need remapping as well. */
1079 *tp = remap_type (*tp, id);
1080 else if (CONSTANT_CLASS_P (*tp))
1082 /* If this is a constant, we have to copy the node iff the type
1083 will be remapped. copy_tree_r will not copy a constant. */
1084 tree new_type = remap_type (TREE_TYPE (*tp), id);
1086 if (new_type == TREE_TYPE (*tp))
1087 *walk_subtrees = 0;
1089 else if (TREE_CODE (*tp) == INTEGER_CST)
1090 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1091 else
1093 *tp = copy_node (*tp);
1094 TREE_TYPE (*tp) = new_type;
1097 else
1099 /* Otherwise, just copy the node. Note that copy_tree_r already
1100 knows not to copy VAR_DECLs, etc., so this is safe. */
1102 if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1104 /* We need to re-canonicalize MEM_REFs from inline substitutions
1105 that can happen when a pointer argument is an ADDR_EXPR.
1106 Recurse here manually to allow that. */
1107 tree ptr = TREE_OPERAND (*tp, 0);
1108 tree type = remap_type (TREE_TYPE (*tp), id);
1109 tree old = *tp;
1110 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1111 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1112 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1113 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1114 copy_warning (*tp, old);
1115 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1117 MR_DEPENDENCE_CLIQUE (*tp)
1118 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1119 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1121 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1122 remapped a parameter as the property might be valid only
1123 for the parameter itself. */
1124 if (TREE_THIS_NOTRAP (old)
1125 && (!is_parm (TREE_OPERAND (old, 0))
1126 || (!id->transform_parameter && is_parm (ptr))))
1127 TREE_THIS_NOTRAP (*tp) = 1;
1128 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1129 *walk_subtrees = 0;
1130 return NULL;
1133 /* Here is the "usual case". Copy this tree node, and then
1134 tweak some special cases. */
1135 copy_tree_r (tp, walk_subtrees, NULL);
1137 if (TREE_CODE (*tp) != OMP_CLAUSE)
1138 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1140 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1142 /* The copied TARGET_EXPR has never been expanded, even if the
1143 original node was expanded already. */
1144 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1145 TREE_OPERAND (*tp, 3) = NULL_TREE;
1147 else if (TREE_CODE (*tp) == ADDR_EXPR)
1149 /* Variable substitution need not be simple. In particular,
1150 the MEM_REF substitution above. Make sure that
1151 TREE_CONSTANT and friends are up-to-date. */
1152 int invariant = is_gimple_min_invariant (*tp);
1153 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1154 recompute_tree_invariant_for_addr_expr (*tp);
1156 /* If this used to be invariant, but is not any longer,
1157 then regimplification is probably needed. */
1158 if (invariant && !is_gimple_min_invariant (*tp))
1159 id->regimplify = true;
1161 *walk_subtrees = 0;
1165 /* Update the TREE_BLOCK for the cloned expr. */
1166 if (EXPR_P (*tp))
1168 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1169 tree old_block = TREE_BLOCK (*tp);
1170 if (old_block)
1172 tree *n;
1173 n = id->decl_map->get (TREE_BLOCK (*tp));
1174 if (n)
1175 new_block = *n;
1177 TREE_SET_BLOCK (*tp, new_block);
1180 /* Keep iterating. */
1181 return NULL_TREE;
1185 /* Called from copy_body_id via walk_tree. DATA is really a
1186 `copy_body_data *'. */
1188 tree
1189 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1191 copy_body_data *id = (copy_body_data *) data;
1192 tree fn = id->src_fn;
1193 tree new_block;
1195 /* Begin by recognizing trees that we'll completely rewrite for the
1196 inlining context. Our output for these trees is completely
1197 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1198 into an edge). Further down, we'll handle trees that get
1199 duplicated and/or tweaked. */
1201 /* When requested, RETURN_EXPRs should be transformed to just the
1202 contained MODIFY_EXPR. The branch semantics of the return will
1203 be handled elsewhere by manipulating the CFG rather than a statement. */
1204 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1206 tree assignment = TREE_OPERAND (*tp, 0);
1208 /* If we're returning something, just turn that into an
1209 assignment into the equivalent of the original RESULT_DECL.
1210 If the "assignment" is just the result decl, the result
1211 decl has already been set (e.g. a recent "foo (&result_decl,
1212 ...)"); just toss the entire RETURN_EXPR. */
1213 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1215 /* Replace the RETURN_EXPR with (a copy of) the
1216 MODIFY_EXPR hanging underneath. */
1217 *tp = copy_node (assignment);
1219 else /* Else the RETURN_EXPR returns no value. */
1221 *tp = NULL;
1222 return (tree) (void *)1;
1225 else if (TREE_CODE (*tp) == SSA_NAME)
1227 *tp = remap_ssa_name (*tp, id);
1228 *walk_subtrees = 0;
1229 return NULL;
1232 /* Local variables and labels need to be replaced by equivalent
1233 variables. We don't want to copy static variables; there's only
1234 one of those, no matter how many times we inline the containing
1235 function. Similarly for globals from an outer function. */
1236 else if (auto_var_in_fn_p (*tp, fn))
1238 tree new_decl;
1240 /* Remap the declaration. */
1241 new_decl = remap_decl (*tp, id);
1242 gcc_assert (new_decl);
1243 /* Replace this variable with the copy. */
1244 STRIP_TYPE_NOPS (new_decl);
1245 *tp = new_decl;
1246 *walk_subtrees = 0;
1248 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1249 copy_statement_list (tp);
1250 else if (TREE_CODE (*tp) == SAVE_EXPR
1251 || TREE_CODE (*tp) == TARGET_EXPR)
1252 remap_save_expr (tp, id->decl_map, walk_subtrees);
1253 else if (TREE_CODE (*tp) == LABEL_DECL
1254 && (! DECL_CONTEXT (*tp)
1255 || decl_function_context (*tp) == id->src_fn))
1256 /* These may need to be remapped for EH handling. */
1257 *tp = remap_decl (*tp, id);
1258 else if (TREE_CODE (*tp) == BIND_EXPR)
1259 copy_bind_expr (tp, walk_subtrees, id);
1260 /* Types may need remapping as well. */
1261 else if (TYPE_P (*tp))
1262 *tp = remap_type (*tp, id);
1264 /* If this is a constant, we have to copy the node iff the type will be
1265 remapped. copy_tree_r will not copy a constant. */
1266 else if (CONSTANT_CLASS_P (*tp))
1268 tree new_type = remap_type (TREE_TYPE (*tp), id);
1270 if (new_type == TREE_TYPE (*tp))
1271 *walk_subtrees = 0;
1273 else if (TREE_CODE (*tp) == INTEGER_CST)
1274 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1275 else
1277 *tp = copy_node (*tp);
1278 TREE_TYPE (*tp) = new_type;
1282 /* Otherwise, just copy the node. Note that copy_tree_r already
1283 knows not to copy VAR_DECLs, etc., so this is safe. */
1284 else
1286 /* Here we handle trees that are not completely rewritten.
1287 First we detect some inlining-induced bogosities for
1288 discarding. */
1289 if (TREE_CODE (*tp) == MODIFY_EXPR
1290 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1291 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1293 /* Some assignments VAR = VAR; don't generate any rtl code
1294 and thus don't count as variable modification. Avoid
1295 keeping bogosities like 0 = 0. */
1296 tree decl = TREE_OPERAND (*tp, 0), value;
1297 tree *n;
1299 n = id->decl_map->get (decl);
1300 if (n)
1302 value = *n;
1303 STRIP_TYPE_NOPS (value);
1304 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1306 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1307 return copy_tree_body_r (tp, walk_subtrees, data);
1311 else if (INDIRECT_REF_P (*tp))
1313 /* Get rid of *& from inline substitutions that can happen when a
1314 pointer argument is an ADDR_EXPR. */
1315 tree decl = TREE_OPERAND (*tp, 0);
1316 tree *n = id->decl_map->get (decl);
1317 if (n)
1319 /* If we happen to get an ADDR_EXPR in n->value, strip
1320 it manually here as we'll eventually get ADDR_EXPRs
1321 which lie about their types pointed to. In this case
1322 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1323 but we absolutely rely on that. As fold_indirect_ref
1324 does other useful transformations, try that first, though. */
1325 tree type = TREE_TYPE (*tp);
1326 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1327 tree old = *tp;
1328 *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1329 if (! *tp)
1331 type = remap_type (type, id);
1332 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1335 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1336 /* ??? We should either assert here or build
1337 a VIEW_CONVERT_EXPR instead of blindly leaking
1338 incompatible types to our IL. */
1339 if (! *tp)
1340 *tp = TREE_OPERAND (ptr, 0);
1342 else
1344 *tp = build1 (INDIRECT_REF, type, ptr);
1345 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1346 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1347 TREE_READONLY (*tp) = TREE_READONLY (old);
1348 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1349 have remapped a parameter as the property might be
1350 valid only for the parameter itself. */
1351 if (TREE_THIS_NOTRAP (old)
1352 && (!is_parm (TREE_OPERAND (old, 0))
1353 || (!id->transform_parameter && is_parm (ptr))))
1354 TREE_THIS_NOTRAP (*tp) = 1;
1357 *walk_subtrees = 0;
1358 return NULL;
1361 else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1363 /* We need to re-canonicalize MEM_REFs from inline substitutions
1364 that can happen when a pointer argument is an ADDR_EXPR.
1365 Recurse here manually to allow that. */
1366 tree ptr = TREE_OPERAND (*tp, 0);
1367 tree type = remap_type (TREE_TYPE (*tp), id);
1368 tree old = *tp;
1369 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1370 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1371 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1372 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1373 copy_warning (*tp, old);
1374 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1376 MR_DEPENDENCE_CLIQUE (*tp)
1377 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1378 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1380 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1381 remapped a parameter as the property might be valid only
1382 for the parameter itself. */
1383 if (TREE_THIS_NOTRAP (old)
1384 && (!is_parm (TREE_OPERAND (old, 0))
1385 || (!id->transform_parameter && is_parm (ptr))))
1386 TREE_THIS_NOTRAP (*tp) = 1;
1387 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1388 *walk_subtrees = 0;
1389 return NULL;
1392 /* Here is the "usual case". Copy this tree node, and then
1393 tweak some special cases. */
1394 copy_tree_r (tp, walk_subtrees, NULL);
1396 /* If EXPR has block defined, map it to newly constructed block.
1397 When inlining we want EXPRs without block appear in the block
1398 of function call if we are not remapping a type. */
1399 if (EXPR_P (*tp))
1401 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1402 if (TREE_BLOCK (*tp))
1404 tree *n;
1405 n = id->decl_map->get (TREE_BLOCK (*tp));
1406 if (n)
1407 new_block = *n;
1409 TREE_SET_BLOCK (*tp, new_block);
1412 if (TREE_CODE (*tp) != OMP_CLAUSE)
1413 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1415 /* The copied TARGET_EXPR has never been expanded, even if the
1416 original node was expanded already. */
1417 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1419 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1420 TREE_OPERAND (*tp, 3) = NULL_TREE;
1423 /* Variable substitution need not be simple. In particular, the
1424 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1425 and friends are up-to-date. */
1426 else if (TREE_CODE (*tp) == ADDR_EXPR)
1428 int invariant = is_gimple_min_invariant (*tp);
1429 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1431 /* Handle the case where we substituted an INDIRECT_REF
1432 into the operand of the ADDR_EXPR. */
1433 if (INDIRECT_REF_P (TREE_OPERAND (*tp, 0))
1434 && !id->do_not_fold)
1436 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1437 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1438 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1439 *tp = t;
1441 else
1442 recompute_tree_invariant_for_addr_expr (*tp);
1444 /* If this used to be invariant, but is not any longer,
1445 then regimplification is probably needed. */
1446 if (invariant && !is_gimple_min_invariant (*tp))
1447 id->regimplify = true;
1449 *walk_subtrees = 0;
1451 else if (TREE_CODE (*tp) == OMP_CLAUSE
1452 && (OMP_CLAUSE_CODE (*tp) == OMP_CLAUSE_AFFINITY
1453 || OMP_CLAUSE_CODE (*tp) == OMP_CLAUSE_DEPEND))
1455 tree t = OMP_CLAUSE_DECL (*tp);
1456 if (t
1457 && TREE_CODE (t) == TREE_LIST
1458 && TREE_PURPOSE (t)
1459 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
1461 *walk_subtrees = 0;
1462 OMP_CLAUSE_DECL (*tp) = copy_node (t);
1463 t = OMP_CLAUSE_DECL (*tp);
1464 TREE_PURPOSE (t) = copy_node (TREE_PURPOSE (t));
1465 for (int i = 0; i <= 4; i++)
1466 walk_tree (&TREE_VEC_ELT (TREE_PURPOSE (t), i),
1467 copy_tree_body_r, id, NULL);
1468 if (TREE_VEC_ELT (TREE_PURPOSE (t), 5))
1469 remap_block (&TREE_VEC_ELT (TREE_PURPOSE (t), 5), id);
1470 walk_tree (&TREE_VALUE (t), copy_tree_body_r, id, NULL);
1475 /* Keep iterating. */
1476 return NULL_TREE;
1479 /* Helper for remap_gimple_stmt. Given an EH region number for the
1480 source function, map that to the duplicate EH region number in
1481 the destination function. */
1483 static int
1484 remap_eh_region_nr (int old_nr, copy_body_data *id)
1486 eh_region old_r, new_r;
1488 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1489 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1491 return new_r->index;
1494 /* Similar, but operate on INTEGER_CSTs. */
1496 static tree
1497 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1499 int old_nr, new_nr;
1501 old_nr = tree_to_shwi (old_t_nr);
1502 new_nr = remap_eh_region_nr (old_nr, id);
1504 return build_int_cst (integer_type_node, new_nr);
1507 /* Helper for copy_bb. Remap statement STMT using the inlining
1508 information in ID. Return the new statement copy. */
1510 static gimple_seq
1511 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1513 gimple *copy = NULL;
1514 struct walk_stmt_info wi;
1515 bool skip_first = false;
1516 gimple_seq stmts = NULL;
1518 if (is_gimple_debug (stmt)
1519 && (gimple_debug_nonbind_marker_p (stmt)
1520 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1521 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1522 return NULL;
1524 if (!is_gimple_debug (stmt)
1525 && id->param_body_adjs
1526 && id->param_body_adjs->m_dead_stmts.contains (stmt))
1528 tree *dval = id->param_body_adjs->m_dead_stmt_debug_equiv.get (stmt);
1529 if (!dval)
1530 return NULL;
1532 gcc_assert (is_gimple_assign (stmt));
1533 tree lhs = gimple_assign_lhs (stmt);
1534 tree *dvar = id->param_body_adjs->m_dead_ssa_debug_equiv.get (lhs);
1535 gdebug *bind = gimple_build_debug_bind (*dvar, *dval, stmt);
1536 if (id->reset_location)
1537 gimple_set_location (bind, input_location);
1538 id->debug_stmts.safe_push (bind);
1539 gimple_seq_add_stmt_without_update (&stmts, bind);
1540 return stmts;
1543 /* Begin by recognizing trees that we'll completely rewrite for the
1544 inlining context. Our output for these trees is completely
1545 different from our input (e.g. RETURN_EXPR is deleted and morphs
1546 into an edge). Further down, we'll handle trees that get
1547 duplicated and/or tweaked. */
1549 /* When requested, GIMPLE_RETURN should be transformed to just the
1550 contained GIMPLE_ASSIGN. The branch semantics of the return will
1551 be handled elsewhere by manipulating the CFG rather than the
1552 statement. */
1553 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1555 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1557 /* If we're returning something, just turn that into an
1558 assignment to the equivalent of the original RESULT_DECL.
1559 If RETVAL is just the result decl, the result decl has
1560 already been set (e.g. a recent "foo (&result_decl, ...)");
1561 just toss the entire GIMPLE_RETURN. Likewise for when the
1562 call doesn't want the return value. */
1563 if (retval
1564 && (TREE_CODE (retval) != RESULT_DECL
1565 && (!id->call_stmt
1566 || gimple_call_lhs (id->call_stmt) != NULL_TREE)
1567 && (TREE_CODE (retval) != SSA_NAME
1568 || ! SSA_NAME_VAR (retval)
1569 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1571 copy = gimple_build_assign (id->do_not_unshare
1572 ? id->retvar : unshare_expr (id->retvar),
1573 retval);
1574 /* id->retvar is already substituted. Skip it on later remapping. */
1575 skip_first = true;
1577 else
1578 return NULL;
1580 else if (gimple_has_substatements (stmt))
1582 gimple_seq s1, s2;
1584 /* When cloning bodies from the C++ front end, we will be handed bodies
1585 in High GIMPLE form. Handle here all the High GIMPLE statements that
1586 have embedded statements. */
1587 switch (gimple_code (stmt))
1589 case GIMPLE_BIND:
1590 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1591 break;
1593 case GIMPLE_CATCH:
1595 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1596 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1597 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1599 break;
1601 case GIMPLE_EH_FILTER:
1602 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1603 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1604 break;
1606 case GIMPLE_TRY:
1607 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1608 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1609 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1610 break;
1612 case GIMPLE_WITH_CLEANUP_EXPR:
1613 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1614 copy = gimple_build_wce (s1);
1615 break;
1617 case GIMPLE_OMP_PARALLEL:
1619 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1620 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1621 copy = gimple_build_omp_parallel
1622 (s1,
1623 gimple_omp_parallel_clauses (omp_par_stmt),
1624 gimple_omp_parallel_child_fn (omp_par_stmt),
1625 gimple_omp_parallel_data_arg (omp_par_stmt));
1627 break;
1629 case GIMPLE_OMP_TASK:
1630 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1631 copy = gimple_build_omp_task
1632 (s1,
1633 gimple_omp_task_clauses (stmt),
1634 gimple_omp_task_child_fn (stmt),
1635 gimple_omp_task_data_arg (stmt),
1636 gimple_omp_task_copy_fn (stmt),
1637 gimple_omp_task_arg_size (stmt),
1638 gimple_omp_task_arg_align (stmt));
1639 break;
1641 case GIMPLE_OMP_FOR:
1642 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1643 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1644 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1645 gimple_omp_for_clauses (stmt),
1646 gimple_omp_for_collapse (stmt), s2);
1648 size_t i;
1649 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1651 gimple_omp_for_set_index (copy, i,
1652 gimple_omp_for_index (stmt, i));
1653 gimple_omp_for_set_initial (copy, i,
1654 gimple_omp_for_initial (stmt, i));
1655 gimple_omp_for_set_final (copy, i,
1656 gimple_omp_for_final (stmt, i));
1657 gimple_omp_for_set_incr (copy, i,
1658 gimple_omp_for_incr (stmt, i));
1659 gimple_omp_for_set_cond (copy, i,
1660 gimple_omp_for_cond (stmt, i));
1663 break;
1665 case GIMPLE_OMP_MASTER:
1666 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1667 copy = gimple_build_omp_master (s1);
1668 break;
1670 case GIMPLE_OMP_MASKED:
1671 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1672 copy = gimple_build_omp_masked
1673 (s1, gimple_omp_masked_clauses (stmt));
1674 break;
1676 case GIMPLE_OMP_SCOPE:
1677 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1678 copy = gimple_build_omp_scope
1679 (s1, gimple_omp_scope_clauses (stmt));
1680 break;
1682 case GIMPLE_OMP_TASKGROUP:
1683 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1684 copy = gimple_build_omp_taskgroup
1685 (s1, gimple_omp_taskgroup_clauses (stmt));
1686 break;
1688 case GIMPLE_OMP_ORDERED:
1689 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1690 copy = gimple_build_omp_ordered
1691 (s1,
1692 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1693 break;
1695 case GIMPLE_OMP_SCAN:
1696 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1697 copy = gimple_build_omp_scan
1698 (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1699 break;
1701 case GIMPLE_OMP_SECTION:
1702 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1703 copy = gimple_build_omp_section (s1);
1704 break;
1706 case GIMPLE_OMP_SECTIONS:
1707 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1708 copy = gimple_build_omp_sections
1709 (s1, gimple_omp_sections_clauses (stmt));
1710 break;
1712 case GIMPLE_OMP_STRUCTURED_BLOCK:
1713 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1714 copy = gimple_build_omp_structured_block (s1);
1715 break;
1717 case GIMPLE_OMP_SINGLE:
1718 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1719 copy = gimple_build_omp_single
1720 (s1, gimple_omp_single_clauses (stmt));
1721 break;
1723 case GIMPLE_OMP_TARGET:
1724 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1725 copy = gimple_build_omp_target
1726 (s1, gimple_omp_target_kind (stmt),
1727 gimple_omp_target_clauses (stmt));
1728 break;
1730 case GIMPLE_OMP_TEAMS:
1731 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1732 copy = gimple_build_omp_teams
1733 (s1, gimple_omp_teams_clauses (stmt));
1734 break;
1736 case GIMPLE_OMP_CRITICAL:
1737 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1738 copy = gimple_build_omp_critical (s1,
1739 gimple_omp_critical_name
1740 (as_a <gomp_critical *> (stmt)),
1741 gimple_omp_critical_clauses
1742 (as_a <gomp_critical *> (stmt)));
1743 break;
1745 case GIMPLE_ASSUME:
1746 s1 = remap_gimple_seq (gimple_assume_body (stmt), id);
1747 copy = gimple_build_assume (gimple_assume_guard (stmt), s1);
1748 break;
1750 case GIMPLE_TRANSACTION:
1752 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1753 gtransaction *new_trans_stmt;
1754 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1755 id);
1756 copy = new_trans_stmt = gimple_build_transaction (s1);
1757 gimple_transaction_set_subcode (new_trans_stmt,
1758 gimple_transaction_subcode (old_trans_stmt));
1759 gimple_transaction_set_label_norm (new_trans_stmt,
1760 gimple_transaction_label_norm (old_trans_stmt));
1761 gimple_transaction_set_label_uninst (new_trans_stmt,
1762 gimple_transaction_label_uninst (old_trans_stmt));
1763 gimple_transaction_set_label_over (new_trans_stmt,
1764 gimple_transaction_label_over (old_trans_stmt));
1766 break;
1768 default:
1769 gcc_unreachable ();
1772 else
1774 if (gimple_assign_single_p (stmt)
1775 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1776 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1778 /* Here we handle statements that are not completely rewritten.
1779 First we detect some inlining-induced bogosities for
1780 discarding. */
1782 /* Some assignments VAR = VAR; don't generate any rtl code
1783 and thus don't count as variable modification. Avoid
1784 keeping bogosities like 0 = 0. */
1785 tree decl = gimple_assign_lhs (stmt), value;
1786 tree *n;
1788 n = id->decl_map->get (decl);
1789 if (n)
1791 value = *n;
1792 STRIP_TYPE_NOPS (value);
1793 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1794 return NULL;
1798 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1799 in a block that we aren't copying during tree_function_versioning,
1800 just drop the clobber stmt. */
1801 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1803 tree lhs = gimple_assign_lhs (stmt);
1804 if (TREE_CODE (lhs) == MEM_REF
1805 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1807 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1808 if (gimple_bb (def_stmt)
1809 && !bitmap_bit_p (id->blocks_to_copy,
1810 gimple_bb (def_stmt)->index))
1811 return NULL;
1815 /* We do not allow CLOBBERs of handled components. In case
1816 returned value is stored via such handled component, remove
1817 the clobber so stmt verifier is happy. */
1818 if (gimple_clobber_p (stmt)
1819 && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1821 tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1822 if (!DECL_P (remapped)
1823 && TREE_CODE (remapped) != MEM_REF)
1824 return NULL;
1827 if (gimple_debug_bind_p (stmt))
1829 tree var = gimple_debug_bind_get_var (stmt);
1830 tree value = gimple_debug_bind_get_value (stmt);
1831 if (id->param_body_adjs
1832 && id->param_body_adjs->m_dead_stmts.contains (stmt))
1834 value = unshare_expr_without_location (value);
1835 id->param_body_adjs->remap_with_debug_expressions (&value);
1838 gdebug *copy = gimple_build_debug_bind (var, value, stmt);
1839 if (id->reset_location)
1840 gimple_set_location (copy, input_location);
1841 id->debug_stmts.safe_push (copy);
1842 gimple_seq_add_stmt_without_update (&stmts, copy);
1843 return stmts;
1845 if (gimple_debug_source_bind_p (stmt))
1847 gdebug *copy = gimple_build_debug_source_bind
1848 (gimple_debug_source_bind_get_var (stmt),
1849 gimple_debug_source_bind_get_value (stmt),
1850 stmt);
1851 if (id->reset_location)
1852 gimple_set_location (copy, input_location);
1853 id->debug_stmts.safe_push (copy);
1854 gimple_seq_add_stmt_without_update (&stmts, copy);
1855 return stmts;
1857 if (gimple_debug_nonbind_marker_p (stmt))
1859 /* If the inlined function has too many debug markers,
1860 don't copy them. */
1861 if (id->src_cfun->debug_marker_count
1862 > param_max_debug_marker_count
1863 || id->reset_location)
1864 return stmts;
1866 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1867 id->debug_stmts.safe_push (copy);
1868 gimple_seq_add_stmt_without_update (&stmts, copy);
1869 return stmts;
1872 /* Create a new deep copy of the statement. */
1873 copy = gimple_copy (stmt);
1875 /* Clear flags that need revisiting. */
1876 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1878 if (gimple_call_tail_p (call_stmt))
1879 gimple_call_set_tail (call_stmt, false);
1880 if (gimple_call_from_thunk_p (call_stmt))
1881 gimple_call_set_from_thunk (call_stmt, false);
1882 if (gimple_call_internal_p (call_stmt))
1883 switch (gimple_call_internal_fn (call_stmt))
1885 case IFN_GOMP_SIMD_LANE:
1886 case IFN_GOMP_SIMD_VF:
1887 case IFN_GOMP_SIMD_LAST_LANE:
1888 case IFN_GOMP_SIMD_ORDERED_START:
1889 case IFN_GOMP_SIMD_ORDERED_END:
1890 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1891 break;
1892 default:
1893 break;
1897 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1898 RESX and EH_DISPATCH. */
1899 if (id->eh_map)
1900 switch (gimple_code (copy))
1902 case GIMPLE_CALL:
1904 tree r, fndecl = gimple_call_fndecl (copy);
1905 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1906 switch (DECL_FUNCTION_CODE (fndecl))
1908 case BUILT_IN_EH_COPY_VALUES:
1909 r = gimple_call_arg (copy, 1);
1910 r = remap_eh_region_tree_nr (r, id);
1911 gimple_call_set_arg (copy, 1, r);
1912 /* FALLTHRU */
1914 case BUILT_IN_EH_POINTER:
1915 case BUILT_IN_EH_FILTER:
1916 r = gimple_call_arg (copy, 0);
1917 r = remap_eh_region_tree_nr (r, id);
1918 gimple_call_set_arg (copy, 0, r);
1919 break;
1921 default:
1922 break;
1925 /* Reset alias info if we didn't apply measures to
1926 keep it valid over inlining by setting DECL_PT_UID. */
1927 if (!id->src_cfun->gimple_df
1928 || !id->src_cfun->gimple_df->ipa_pta)
1929 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1931 break;
1933 case GIMPLE_RESX:
1935 gresx *resx_stmt = as_a <gresx *> (copy);
1936 int r = gimple_resx_region (resx_stmt);
1937 r = remap_eh_region_nr (r, id);
1938 gimple_resx_set_region (resx_stmt, r);
1940 break;
1942 case GIMPLE_EH_DISPATCH:
1944 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1945 int r = gimple_eh_dispatch_region (eh_dispatch);
1946 r = remap_eh_region_nr (r, id);
1947 gimple_eh_dispatch_set_region (eh_dispatch, r);
1949 break;
1951 default:
1952 break;
1956 /* If STMT has a block defined, map it to the newly constructed block. */
1957 if (tree block = gimple_block (copy))
1959 tree *n;
1960 n = id->decl_map->get (block);
1961 gcc_assert (n);
1962 gimple_set_block (copy, *n);
1964 if (id->param_body_adjs)
1966 gimple_seq extra_stmts = NULL;
1967 id->param_body_adjs->modify_gimple_stmt (&copy, &extra_stmts, stmt);
1968 if (!gimple_seq_empty_p (extra_stmts))
1970 memset (&wi, 0, sizeof (wi));
1971 wi.info = id;
1972 for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
1973 !gsi_end_p (egsi);
1974 gsi_next (&egsi))
1975 walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
1976 gimple_seq_add_seq_without_update (&stmts, extra_stmts);
1980 if (id->reset_location)
1981 gimple_set_location (copy, input_location);
1983 /* Debug statements ought to be rebuilt and not copied. */
1984 gcc_checking_assert (!is_gimple_debug (copy));
1986 /* Remap all the operands in COPY. */
1987 memset (&wi, 0, sizeof (wi));
1988 wi.info = id;
1989 if (skip_first)
1990 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1991 else
1992 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1994 /* Clear the copied virtual operands. We are not remapping them here
1995 but are going to recreate them from scratch. */
1996 if (gimple_has_mem_ops (copy))
1998 gimple_set_vdef (copy, NULL_TREE);
1999 gimple_set_vuse (copy, NULL_TREE);
2002 if (cfun->can_throw_non_call_exceptions)
2004 /* When inlining a function which does not have non-call exceptions
2005 enabled into a function that has (which only happens with
2006 always-inline) we have to fixup stmts that cannot throw. */
2007 if (gcond *cond = dyn_cast <gcond *> (copy))
2008 if (gimple_could_trap_p (cond))
2010 gassign *cmp
2011 = gimple_build_assign (make_ssa_name (boolean_type_node),
2012 gimple_cond_code (cond),
2013 gimple_cond_lhs (cond),
2014 gimple_cond_rhs (cond));
2015 gimple_seq_add_stmt_without_update (&stmts, cmp);
2016 gimple_cond_set_code (cond, NE_EXPR);
2017 gimple_cond_set_lhs (cond, gimple_assign_lhs (cmp));
2018 gimple_cond_set_rhs (cond, boolean_false_node);
2022 gimple_seq_add_stmt_without_update (&stmts, copy);
2023 return stmts;
2027 /* Copy basic block, scale profile accordingly. Edges will be taken care of
2028 later */
2030 static basic_block
2031 copy_bb (copy_body_data *id, basic_block bb,
2032 profile_count num, profile_count den)
2034 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
2035 basic_block copy_basic_block;
2036 tree decl;
2037 basic_block prev;
2039 profile_count::adjust_for_ipa_scaling (&num, &den);
2041 /* Search for previous copied basic block. */
2042 prev = bb->prev_bb;
2043 while (!prev->aux)
2044 prev = prev->prev_bb;
2046 /* create_basic_block() will append every new block to
2047 basic_block_info automatically. */
2048 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
2049 copy_basic_block->count = bb->count.apply_scale (num, den);
2051 copy_gsi = gsi_start_bb (copy_basic_block);
2053 unsigned min_cond_uid = 0;
2054 if (id->src_cfun->cond_uids)
2056 if (!cfun->cond_uids)
2057 cfun->cond_uids = new hash_map <gcond*, unsigned> ();
2059 for (auto itr : *id->src_cfun->cond_uids)
2060 if (itr.second >= min_cond_uid)
2061 min_cond_uid = itr.second + 1;
2064 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2066 gimple_seq stmts;
2067 gimple *stmt = gsi_stmt (gsi);
2068 gimple *orig_stmt = stmt;
2069 gimple_stmt_iterator stmts_gsi;
2070 bool stmt_added = false;
2072 id->regimplify = false;
2073 stmts = remap_gimple_stmt (stmt, id);
2075 if (gimple_seq_empty_p (stmts))
2076 continue;
2078 seq_gsi = copy_gsi;
2080 for (stmts_gsi = gsi_start (stmts);
2081 !gsi_end_p (stmts_gsi); )
2083 stmt = gsi_stmt (stmts_gsi);
2085 /* Advance iterator now before stmt is moved to seq_gsi. */
2086 gsi_next (&stmts_gsi);
2088 if (gimple_nop_p (stmt))
2089 continue;
2091 /* If -fcondition-coverage is used, register the inlined conditions
2092 in the cond->expression mapping of the caller. The expression tag
2093 is shifted conditions from the two bodies are not mixed. */
2094 if (id->src_cfun->cond_uids && is_a <gcond*> (stmt))
2096 gcond *orig_cond = as_a <gcond*> (orig_stmt);
2097 gcond *cond = as_a <gcond*> (stmt);
2098 unsigned *v = id->src_cfun->cond_uids->get (orig_cond);
2099 if (v)
2100 cfun->cond_uids->put (cond, *v + min_cond_uid);
2103 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
2104 orig_stmt);
2106 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2108 if (id->regimplify)
2109 gimple_regimplify_operands (stmt, &seq_gsi);
2111 stmt_added = true;
2114 if (!stmt_added)
2115 continue;
2117 /* If copy_basic_block has been empty at the start of this iteration,
2118 call gsi_start_bb again to get at the newly added statements. */
2119 if (gsi_end_p (copy_gsi))
2120 copy_gsi = gsi_start_bb (copy_basic_block);
2121 else
2122 gsi_next (&copy_gsi);
2124 /* Process the new statement. The call to gimple_regimplify_operands
2125 possibly turned the statement into multiple statements, we
2126 need to process all of them. */
2129 tree fn;
2130 gcall *call_stmt;
2132 stmt = gsi_stmt (copy_gsi);
2133 call_stmt = dyn_cast <gcall *> (stmt);
2134 if (call_stmt
2135 && gimple_call_va_arg_pack_p (call_stmt)
2136 && id->call_stmt
2137 && ! gimple_call_va_arg_pack_p (id->call_stmt))
2139 /* __builtin_va_arg_pack () should be replaced by
2140 all arguments corresponding to ... in the caller. */
2141 tree p;
2142 gcall *new_call;
2143 vec<tree> argarray;
2144 size_t nargs_caller = gimple_call_num_args (id->call_stmt);
2145 size_t nargs = nargs_caller;
2147 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2149 /* Avoid crashing on invalid IL that doesn't have a
2150 varargs function or that passes not enough arguments. */
2151 if (nargs == 0)
2152 break;
2153 nargs--;
2156 /* Create the new array of arguments. */
2157 size_t nargs_callee = gimple_call_num_args (call_stmt);
2158 size_t n = nargs + nargs_callee;
2159 argarray.create (n);
2160 argarray.safe_grow_cleared (n, true);
2162 /* Copy all the arguments before '...' */
2163 if (nargs_callee)
2164 memcpy (argarray.address (),
2165 gimple_call_arg_ptr (call_stmt, 0),
2166 nargs_callee * sizeof (tree));
2168 /* Append the arguments passed in '...' */
2169 if (nargs)
2170 memcpy (argarray.address () + nargs_callee,
2171 gimple_call_arg_ptr (id->call_stmt, 0)
2172 + (nargs_caller - nargs), nargs * sizeof (tree));
2174 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2175 argarray);
2177 argarray.release ();
2179 /* Copy all GIMPLE_CALL flags, location and block, except
2180 GF_CALL_VA_ARG_PACK. */
2181 gimple_call_copy_flags (new_call, call_stmt);
2182 gimple_call_set_va_arg_pack (new_call, false);
2183 gimple_call_set_fntype (new_call, gimple_call_fntype (call_stmt));
2184 /* location includes block. */
2185 gimple_set_location (new_call, gimple_location (stmt));
2186 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2188 gsi_replace (&copy_gsi, new_call, false);
2189 stmt = new_call;
2191 else if (call_stmt
2192 && id->call_stmt
2193 && (decl = gimple_call_fndecl (stmt))
2194 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2196 /* __builtin_va_arg_pack_len () should be replaced by
2197 the number of anonymous arguments. */
2198 size_t nargs = gimple_call_num_args (id->call_stmt);
2199 tree count, p;
2200 gimple *new_stmt;
2202 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2203 nargs--;
2205 if (!gimple_call_lhs (stmt))
2207 /* Drop unused calls. */
2208 gsi_remove (&copy_gsi, false);
2209 continue;
2211 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2213 count = build_int_cst (integer_type_node, nargs);
2214 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2215 gsi_replace (&copy_gsi, new_stmt, false);
2216 stmt = new_stmt;
2218 else if (nargs != 0)
2220 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2221 count = build_int_cst (integer_type_node, nargs);
2222 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2223 PLUS_EXPR, newlhs, count);
2224 gimple_call_set_lhs (stmt, newlhs);
2225 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2228 else if (call_stmt
2229 && id->call_stmt
2230 && gimple_call_internal_p (stmt))
2231 switch (gimple_call_internal_fn (stmt))
2233 case IFN_TSAN_FUNC_EXIT:
2234 /* Drop .TSAN_FUNC_EXIT () internal calls during inlining. */
2235 gsi_remove (&copy_gsi, false);
2236 continue;
2237 case IFN_ASAN_MARK:
2238 /* Drop .ASAN_MARK internal calls during inlining into
2239 no_sanitize functions. */
2240 if (!sanitize_flags_p (SANITIZE_ADDRESS, id->dst_fn)
2241 && !sanitize_flags_p (SANITIZE_HWADDRESS, id->dst_fn))
2243 gsi_remove (&copy_gsi, false);
2244 continue;
2246 break;
2247 default:
2248 break;
2251 /* Statements produced by inlining can be unfolded, especially
2252 when we constant propagated some operands. We can't fold
2253 them right now for two reasons:
2254 1) folding require SSA_NAME_DEF_STMTs to be correct
2255 2) we can't change function calls to builtins.
2256 So we just mark statement for later folding. We mark
2257 all new statements, instead just statements that has changed
2258 by some nontrivial substitution so even statements made
2259 foldable indirectly are updated. If this turns out to be
2260 expensive, copy_body can be told to watch for nontrivial
2261 changes. */
2262 if (id->statements_to_fold)
2263 id->statements_to_fold->add (stmt);
2265 /* We're duplicating a CALL_EXPR. Find any corresponding
2266 callgraph edges and update or duplicate them. */
2267 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2269 struct cgraph_edge *edge;
2271 switch (id->transform_call_graph_edges)
2273 case CB_CGE_DUPLICATE:
2274 edge = id->src_node->get_edge (orig_stmt);
2275 if (edge)
2277 struct cgraph_edge *old_edge = edge;
2279 /* A speculative call is consist of multiple
2280 edges - indirect edge and one or more direct edges
2281 Duplicate the whole thing and distribute frequencies
2282 accordingly. */
2283 if (edge->speculative)
2285 int n = 0;
2286 profile_count direct_cnt
2287 = profile_count::zero ();
2289 /* First figure out the distribution of counts
2290 so we can re-scale BB profile accordingly. */
2291 for (cgraph_edge *e = old_edge; e;
2292 e = e->next_speculative_call_target ())
2293 direct_cnt = direct_cnt + e->count;
2295 cgraph_edge *indirect
2296 = old_edge->speculative_call_indirect_edge ();
2297 profile_count indir_cnt = indirect->count;
2299 /* Next iterate all direct edges, clone it and its
2300 corresponding reference and update profile. */
2301 for (cgraph_edge *e = old_edge;
2303 e = e->next_speculative_call_target ())
2305 profile_count cnt = e->count;
2307 id->dst_node->clone_reference
2308 (e->speculative_call_target_ref (), stmt);
2309 edge = e->clone (id->dst_node, call_stmt,
2310 gimple_uid (stmt), num, den,
2311 true);
2312 profile_probability prob
2313 = cnt.probability_in (direct_cnt
2314 + indir_cnt);
2315 edge->count
2316 = copy_basic_block->count.apply_probability
2317 (prob);
2318 n++;
2320 gcc_checking_assert
2321 (indirect->num_speculative_call_targets_p ()
2322 == n);
2324 /* Duplicate the indirect edge after all direct edges
2325 cloned. */
2326 indirect = indirect->clone (id->dst_node, call_stmt,
2327 gimple_uid (stmt),
2328 num, den,
2329 true);
2331 profile_probability prob
2332 = indir_cnt.probability_in (direct_cnt
2333 + indir_cnt);
2334 indirect->count
2335 = copy_basic_block->count.apply_probability (prob);
2337 else
2339 edge = edge->clone (id->dst_node, call_stmt,
2340 gimple_uid (stmt),
2341 num, den,
2342 true);
2343 edge->count = copy_basic_block->count;
2346 break;
2348 case CB_CGE_MOVE_CLONES:
2349 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2350 call_stmt);
2351 edge = id->dst_node->get_edge (stmt);
2352 break;
2354 case CB_CGE_MOVE:
2355 edge = id->dst_node->get_edge (orig_stmt);
2356 if (edge)
2357 edge = cgraph_edge::set_call_stmt (edge, call_stmt);
2358 break;
2360 default:
2361 gcc_unreachable ();
2364 /* Constant propagation on argument done during inlining
2365 may create new direct call. Produce an edge for it. */
2366 if ((!edge
2367 || (edge->indirect_inlining_edge
2368 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2369 && id->dst_node->definition
2370 && (fn = gimple_call_fndecl (stmt)) != NULL)
2372 struct cgraph_node *dest = cgraph_node::get_create (fn);
2374 /* We have missing edge in the callgraph. This can happen
2375 when previous inlining turned an indirect call into a
2376 direct call by constant propagating arguments or we are
2377 producing dead clone (for further cloning). In all
2378 other cases we hit a bug (incorrect node sharing is the
2379 most common reason for missing edges). */
2380 gcc_assert (!dest->definition
2381 || dest->address_taken
2382 || !id->src_node->definition
2383 || !id->dst_node->definition);
2384 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2385 id->dst_node->create_edge_including_clones
2386 (dest, orig_stmt, call_stmt, bb->count,
2387 CIF_ORIGINALLY_INDIRECT_CALL);
2388 else
2389 id->dst_node->create_edge (dest, call_stmt,
2390 bb->count)->inline_failed
2391 = CIF_ORIGINALLY_INDIRECT_CALL;
2392 if (dump_file)
2394 fprintf (dump_file, "Created new direct edge to %s\n",
2395 dest->dump_name ());
2399 notice_special_calls (as_a <gcall *> (stmt));
2402 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2403 id->eh_map, id->eh_lp_nr);
2405 gsi_next (&copy_gsi);
2407 while (!gsi_end_p (copy_gsi));
2409 copy_gsi = gsi_last_bb (copy_basic_block);
2412 return copy_basic_block;
2415 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2416 form is quite easy, since dominator relationship for old basic blocks does
2417 not change.
2419 There is however exception where inlining might change dominator relation
2420 across EH edges from basic block within inlined functions destinating
2421 to landing pads in function we inline into.
2423 The function fills in PHI_RESULTs of such PHI nodes if they refer
2424 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2425 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2426 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2427 set, and this means that there will be no overlapping live ranges
2428 for the underlying symbol.
2430 This might change in future if we allow redirecting of EH edges and
2431 we might want to change way build CFG pre-inlining to include
2432 all the possible edges then. */
2433 static void
2434 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2435 bool can_throw, bool nonlocal_goto)
2437 edge e;
2438 edge_iterator ei;
2440 FOR_EACH_EDGE (e, ei, bb->succs)
2441 if (!e->dest->aux
2442 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2444 gphi *phi;
2445 gphi_iterator si;
2447 if (!nonlocal_goto)
2448 gcc_assert (e->flags & EDGE_EH);
2450 if (!can_throw)
2451 gcc_assert (!(e->flags & EDGE_EH));
2453 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2455 edge re;
2457 phi = si.phi ();
2459 /* For abnormal goto/call edges the receiver can be the
2460 ENTRY_BLOCK. Do not assert this cannot happen. */
2462 gcc_assert ((e->flags & EDGE_EH)
2463 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2465 re = find_edge (ret_bb, e->dest);
2466 gcc_checking_assert (re);
2467 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2468 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2470 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2471 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2476 /* Insert clobbers for automatic variables of inlined ID->src_fn
2477 function at the start of basic block ID->eh_landing_pad_dest. */
2479 static void
2480 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2482 tree var;
2483 basic_block bb = id->eh_landing_pad_dest;
2484 live_vars_map *vars = NULL;
2485 unsigned int cnt = 0;
2486 unsigned int i;
2487 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2488 if (VAR_P (var)
2489 && !DECL_HARD_REGISTER (var)
2490 && !TREE_THIS_VOLATILE (var)
2491 && !DECL_HAS_VALUE_EXPR_P (var)
2492 && !is_gimple_reg (var)
2493 && auto_var_in_fn_p (var, id->src_fn)
2494 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2496 tree *t = id->decl_map->get (var);
2497 if (!t)
2498 continue;
2499 tree new_var = *t;
2500 if (VAR_P (new_var)
2501 && !DECL_HARD_REGISTER (new_var)
2502 && !TREE_THIS_VOLATILE (new_var)
2503 && !DECL_HAS_VALUE_EXPR_P (new_var)
2504 && !is_gimple_reg (new_var)
2505 && auto_var_in_fn_p (new_var, id->dst_fn))
2507 if (vars == NULL)
2508 vars = new live_vars_map;
2509 vars->put (DECL_UID (var), cnt++);
2512 if (vars == NULL)
2513 return;
2515 vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2516 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2517 if (VAR_P (var))
2519 edge e;
2520 edge_iterator ei;
2521 bool needed = false;
2522 unsigned int *v = vars->get (DECL_UID (var));
2523 if (v == NULL)
2524 continue;
2525 FOR_EACH_EDGE (e, ei, bb->preds)
2526 if ((e->flags & EDGE_EH) != 0
2527 && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2529 basic_block src_bb = (basic_block) e->src->aux;
2531 if (bitmap_bit_p (&live[src_bb->index], *v))
2533 needed = true;
2534 break;
2537 if (needed)
2539 tree new_var = *id->decl_map->get (var);
2540 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2541 tree clobber = build_clobber (TREE_TYPE (new_var));
2542 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2543 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2546 destroy_live_vars (live);
2547 delete vars;
2550 /* Copy edges from BB into its copy constructed earlier, scale profile
2551 accordingly. Edges will be taken care of later. Assume aux
2552 pointers to point to the copies of each BB. Return true if any
2553 debug stmts are left after a statement that must end the basic block. */
2555 static bool
2556 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2557 basic_block ret_bb, basic_block abnormal_goto_dest,
2558 copy_body_data *id)
2560 basic_block new_bb = (basic_block) bb->aux;
2561 edge_iterator ei;
2562 edge old_edge;
2563 gimple_stmt_iterator si;
2564 bool need_debug_cleanup = false;
2566 /* Use the indices from the original blocks to create edges for the
2567 new ones. */
2568 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2569 if (!(old_edge->flags & EDGE_EH))
2571 edge new_edge;
2572 int flags = old_edge->flags;
2573 location_t locus = old_edge->goto_locus;
2575 /* Return edges do get a FALLTHRU flag when they get inlined. */
2576 if (old_edge->dest->index == EXIT_BLOCK
2577 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2578 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2579 flags |= EDGE_FALLTHRU;
2581 new_edge
2582 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2583 new_edge->probability = old_edge->probability;
2584 if (!id->reset_location)
2585 new_edge->goto_locus = remap_location (locus, id);
2588 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2589 return false;
2591 /* When doing function splitting, we must decrease count of the return block
2592 which was previously reachable by block we did not copy. */
2593 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2594 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2595 if (old_edge->src->index != ENTRY_BLOCK
2596 && !old_edge->src->aux)
2597 new_bb->count -= old_edge->count ().apply_scale (num, den);
2599 /* Walk stmts from end to start so that splitting will adjust the BB
2600 pointer for each stmt at most once, even when we split the block
2601 multiple times. */
2602 bool seen_nondebug = false;
2603 for (si = gsi_last_bb (new_bb); !gsi_end_p (si);)
2605 bool can_throw, nonlocal_goto;
2606 gimple *copy_stmt = gsi_stmt (si);
2608 /* Do this before the possible split_block. */
2609 gsi_prev (&si);
2611 /* If this tree could throw an exception, there are two
2612 cases where we need to add abnormal edge(s): the
2613 tree wasn't in a region and there is a "current
2614 region" in the caller; or the original tree had
2615 EH edges. In both cases split the block after the tree,
2616 and add abnormal edge(s) as needed; we need both
2617 those from the callee and the caller.
2618 We check whether the copy can throw, because the const
2619 propagation can change an INDIRECT_REF which throws
2620 into a COMPONENT_REF which doesn't. If the copy
2621 can throw, the original could also throw. */
2622 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2623 nonlocal_goto
2624 = (stmt_can_make_abnormal_goto (copy_stmt)
2625 && !computed_goto_p (copy_stmt));
2627 if (can_throw || nonlocal_goto)
2629 /* If there's only debug insns after copy_stmt don't split
2630 the block but instead mark the block for cleanup. */
2631 if (!seen_nondebug)
2632 need_debug_cleanup = true;
2633 else
2635 /* Note that bb's predecessor edges aren't necessarily
2636 right at this point; split_block doesn't care. */
2637 edge e = split_block (new_bb, copy_stmt);
2638 e->dest->aux = new_bb->aux;
2639 seen_nondebug = false;
2643 if (!is_gimple_debug (copy_stmt))
2644 seen_nondebug = true;
2646 bool update_probs = false;
2648 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2650 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2651 update_probs = true;
2653 else if (can_throw)
2655 make_eh_edge (copy_stmt);
2656 update_probs = true;
2659 /* EH edges may not match old edges. Copy as much as possible. */
2660 if (update_probs)
2662 edge e;
2663 edge_iterator ei;
2664 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2666 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2667 if ((old_edge->flags & EDGE_EH)
2668 && (e = find_edge (copy_stmt_bb,
2669 (basic_block) old_edge->dest->aux))
2670 && (e->flags & EDGE_EH))
2671 e->probability = old_edge->probability;
2673 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2674 if (e->flags & EDGE_EH)
2676 if (!e->probability.initialized_p ())
2677 e->probability = profile_probability::never ();
2678 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2680 if (id->eh_landing_pad_dest == NULL)
2681 id->eh_landing_pad_dest = e->dest;
2682 else
2683 gcc_assert (id->eh_landing_pad_dest == e->dest);
2689 /* If the call we inline cannot make abnormal goto do not add
2690 additional abnormal edges but only retain those already present
2691 in the original function body. */
2692 if (abnormal_goto_dest == NULL)
2693 nonlocal_goto = false;
2694 if (nonlocal_goto)
2696 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2698 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2699 nonlocal_goto = false;
2700 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2701 in OpenMP regions which aren't allowed to be left abnormally.
2702 So, no need to add abnormal edge in that case. */
2703 else if (is_gimple_call (copy_stmt)
2704 && gimple_call_internal_p (copy_stmt)
2705 && (gimple_call_internal_fn (copy_stmt)
2706 == IFN_ABNORMAL_DISPATCHER)
2707 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2708 nonlocal_goto = false;
2709 else
2710 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2711 EDGE_ABNORMAL);
2714 if ((can_throw || nonlocal_goto)
2715 && gimple_in_ssa_p (cfun))
2716 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2717 can_throw, nonlocal_goto);
2719 return need_debug_cleanup;
2722 /* Copy the PHIs. All blocks and edges are copied, some blocks
2723 was possibly split and new outgoing EH edges inserted.
2724 BB points to the block of original function and AUX pointers links
2725 the original and newly copied blocks. */
2727 static void
2728 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2730 basic_block const new_bb = (basic_block) bb->aux;
2731 edge_iterator ei;
2732 gphi *phi;
2733 gphi_iterator si;
2734 edge new_edge;
2735 bool inserted = false;
2737 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2739 tree res, new_res;
2740 gphi *new_phi;
2742 phi = si.phi ();
2743 res = PHI_RESULT (phi);
2744 new_res = res;
2745 if (!virtual_operand_p (res)
2746 && (!id->param_body_adjs
2747 || !id->param_body_adjs->m_dead_stmts.contains (phi)))
2749 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2750 if (EDGE_COUNT (new_bb->preds) == 0)
2752 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2753 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2755 else
2757 new_phi = create_phi_node (new_res, new_bb);
2758 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2760 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2761 bb);
2762 tree arg;
2763 tree new_arg;
2764 edge_iterator ei2;
2765 location_t locus;
2767 /* When doing partial cloning, we allow PHIs on the entry
2768 block as long as all the arguments are the same.
2769 Find any input edge to see argument to copy. */
2770 if (!old_edge)
2771 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2772 if (!old_edge->src->aux)
2773 break;
2775 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2776 new_arg = arg;
2777 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2778 gcc_assert (new_arg);
2779 /* With return slot optimization we can end up with
2780 non-gimple (foo *)&this->m, fix that here. */
2781 if (TREE_CODE (new_arg) != SSA_NAME
2782 && TREE_CODE (new_arg) != FUNCTION_DECL
2783 && !is_gimple_val (new_arg))
2785 gimple_seq stmts = NULL;
2786 new_arg = force_gimple_operand (new_arg, &stmts, true,
2787 NULL);
2788 gsi_insert_seq_on_edge (new_edge, stmts);
2789 inserted = true;
2791 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2792 if (id->reset_location)
2793 locus = input_location;
2794 else
2795 locus = remap_location (locus, id);
2796 add_phi_arg (new_phi, new_arg, new_edge, locus);
2802 /* Commit the delayed edge insertions. */
2803 if (inserted)
2804 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2805 gsi_commit_one_edge_insert (new_edge, NULL);
2809 /* Wrapper for remap_decl so it can be used as a callback. */
2811 static tree
2812 remap_decl_1 (tree decl, void *data)
2814 return remap_decl (decl, (copy_body_data *) data);
2817 /* Build struct function and associated datastructures for the new clone
2818 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2819 the cfun to the function of new_fndecl (and current_function_decl too). */
2821 static void
2822 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2824 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2826 /* Register specific tree functions. */
2827 gimple_register_cfg_hooks ();
2829 /* Get clean struct function. */
2830 push_struct_function (new_fndecl, true);
2831 targetm.target_option.relayout_function (new_fndecl);
2833 /* We will rebuild these, so just sanity check that they are empty. */
2834 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2835 gcc_assert (cfun->local_decls == NULL);
2836 gcc_assert (cfun->cfg == NULL);
2837 gcc_assert (cfun->decl == new_fndecl);
2839 /* Copy items we preserve during cloning. */
2840 cfun->static_chain_decl = src_cfun->static_chain_decl;
2841 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2842 cfun->function_end_locus = src_cfun->function_end_locus;
2843 cfun->curr_properties = src_cfun->curr_properties;
2844 cfun->last_verified = src_cfun->last_verified;
2845 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2846 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2847 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2848 cfun->calls_eh_return = src_cfun->calls_eh_return;
2849 cfun->stdarg = src_cfun->stdarg;
2850 cfun->after_inlining = src_cfun->after_inlining;
2851 cfun->can_throw_non_call_exceptions
2852 = src_cfun->can_throw_non_call_exceptions;
2853 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2854 cfun->returns_struct = src_cfun->returns_struct;
2855 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2857 init_empty_tree_cfg ();
2859 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2860 cfun->cfg->full_profile = src_cfun->cfg->full_profile;
2862 profile_count num = count;
2863 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2864 profile_count::adjust_for_ipa_scaling (&num, &den);
2866 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2867 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2868 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2869 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2870 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2871 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2872 if (src_cfun->eh)
2873 init_eh_for_function ();
2875 if (src_cfun->gimple_df)
2877 init_tree_ssa (cfun);
2878 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2879 if (cfun->gimple_df->in_ssa_p)
2880 init_ssa_operands (cfun);
2884 /* Helper function for copy_cfg_body. Move debug stmts from the end
2885 of NEW_BB to the beginning of successor basic blocks when needed. If the
2886 successor has multiple predecessors, reset them, otherwise keep
2887 their value. */
2889 static void
2890 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2892 edge e;
2893 edge_iterator ei;
2894 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2896 if (gsi_end_p (si)
2897 || gsi_one_before_end_p (si)
2898 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2899 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2900 return;
2902 FOR_EACH_EDGE (e, ei, new_bb->succs)
2904 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2905 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2906 while (is_gimple_debug (gsi_stmt (ssi)))
2908 gimple *stmt = gsi_stmt (ssi);
2909 gdebug *new_stmt;
2910 tree var;
2911 tree value;
2913 /* For the last edge move the debug stmts instead of copying
2914 them. */
2915 if (ei_one_before_end_p (ei))
2917 si = ssi;
2918 gsi_prev (&ssi);
2919 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2921 gimple_debug_bind_reset_value (stmt);
2922 gimple_set_location (stmt, UNKNOWN_LOCATION);
2924 gsi_remove (&si, false);
2925 gsi_insert_before (&dsi, stmt, GSI_NEW_STMT);
2926 continue;
2929 if (gimple_debug_bind_p (stmt))
2931 var = gimple_debug_bind_get_var (stmt);
2932 if (single_pred_p (e->dest))
2934 value = gimple_debug_bind_get_value (stmt);
2935 value = unshare_expr (value);
2936 new_stmt = gimple_build_debug_bind (var, value, stmt);
2938 else
2939 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2941 else if (gimple_debug_source_bind_p (stmt))
2943 var = gimple_debug_source_bind_get_var (stmt);
2944 value = gimple_debug_source_bind_get_value (stmt);
2945 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2947 else if (gimple_debug_nonbind_marker_p (stmt))
2948 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2949 else
2950 gcc_unreachable ();
2951 gsi_insert_before (&dsi, new_stmt, GSI_NEW_STMT);
2952 id->debug_stmts.safe_push (new_stmt);
2953 gsi_prev (&ssi);
2958 /* Make a copy of the sub-loops of SRC_PARENT and place them
2959 as siblings of DEST_PARENT. */
2961 static void
2962 copy_loops (copy_body_data *id,
2963 class loop *dest_parent, class loop *src_parent)
2965 class loop *src_loop = src_parent->inner;
2966 while (src_loop)
2968 if (!id->blocks_to_copy
2969 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2971 class loop *dest_loop = alloc_loop ();
2973 /* Assign the new loop its header and latch and associate
2974 those with the new loop. */
2975 dest_loop->header = (basic_block)src_loop->header->aux;
2976 dest_loop->header->loop_father = dest_loop;
2977 if (src_loop->latch != NULL)
2979 dest_loop->latch = (basic_block)src_loop->latch->aux;
2980 dest_loop->latch->loop_father = dest_loop;
2983 /* Copy loop meta-data. */
2984 copy_loop_info (src_loop, dest_loop);
2985 if (dest_loop->unroll)
2986 cfun->has_unroll = true;
2987 if (dest_loop->force_vectorize)
2988 cfun->has_force_vectorize_loops = true;
2989 if (id->src_cfun->last_clique != 0)
2990 dest_loop->owned_clique
2991 = remap_dependence_clique (id,
2992 src_loop->owned_clique
2993 ? src_loop->owned_clique : 1);
2995 /* Finally place it into the loop array and the loop tree. */
2996 place_new_loop (cfun, dest_loop);
2997 flow_loop_tree_node_add (dest_parent, dest_loop);
2999 if (src_loop->simduid)
3001 dest_loop->simduid = remap_decl (src_loop->simduid, id);
3002 cfun->has_simduid_loops = true;
3005 /* Recurse. */
3006 copy_loops (id, dest_loop, src_loop);
3008 src_loop = src_loop->next;
3012 /* Call redirect_call_stmt_to_callee on all calls in BB. */
3014 void
3015 redirect_all_calls (copy_body_data * id, basic_block bb)
3017 gimple_stmt_iterator si;
3018 gimple *last = last_nondebug_stmt (bb);
3019 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
3021 gimple *stmt = gsi_stmt (si);
3022 if (is_gimple_call (stmt))
3024 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
3025 if (edge)
3027 if (!id->killed_new_ssa_names)
3028 id->killed_new_ssa_names = new hash_set<tree> (16);
3029 cgraph_edge::redirect_call_stmt_to_callee (edge,
3030 id->killed_new_ssa_names);
3032 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
3033 gimple_purge_dead_eh_edges (bb);
3039 /* Make a copy of the body of FN so that it can be inserted inline in
3040 another function. Walks FN via CFG, returns new fndecl. */
3042 static tree
3043 copy_cfg_body (copy_body_data * id,
3044 basic_block entry_block_map, basic_block exit_block_map,
3045 basic_block new_entry)
3047 tree callee_fndecl = id->src_fn;
3048 /* Original cfun for the callee, doesn't change. */
3049 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3050 struct function *cfun_to_copy;
3051 basic_block bb;
3052 tree new_fndecl = NULL;
3053 bool need_debug_cleanup = false;
3054 int last;
3055 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
3056 profile_count num = entry_block_map->count;
3058 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3060 /* Register specific tree functions. */
3061 gimple_register_cfg_hooks ();
3063 /* If we are inlining just region of the function, make sure to connect
3064 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
3065 part of loop, we must compute frequency and probability of
3066 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
3067 probabilities of edges incoming from nonduplicated region. */
3068 if (new_entry)
3070 edge e;
3071 edge_iterator ei;
3072 den = profile_count::zero ();
3074 FOR_EACH_EDGE (e, ei, new_entry->preds)
3075 if (!e->src->aux)
3076 den += e->count ();
3077 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
3080 profile_count::adjust_for_ipa_scaling (&num, &den);
3082 /* Must have a CFG here at this point. */
3083 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
3084 (DECL_STRUCT_FUNCTION (callee_fndecl)));
3087 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
3088 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
3089 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
3090 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
3092 /* Duplicate any exception-handling regions. */
3093 if (cfun->eh)
3094 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
3095 remap_decl_1, id);
3097 /* Use aux pointers to map the original blocks to copy. */
3098 FOR_EACH_BB_FN (bb, cfun_to_copy)
3099 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
3101 basic_block new_bb = copy_bb (id, bb, num, den);
3102 bb->aux = new_bb;
3103 new_bb->aux = bb;
3104 new_bb->loop_father = entry_block_map->loop_father;
3107 last = last_basic_block_for_fn (cfun);
3109 /* Now that we've duplicated the blocks, duplicate their edges. */
3110 basic_block abnormal_goto_dest = NULL;
3111 if (id->call_stmt
3112 && stmt_can_make_abnormal_goto (id->call_stmt))
3114 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
3116 bb = gimple_bb (id->call_stmt);
3117 gsi_next (&gsi);
3118 if (gsi_end_p (gsi))
3119 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
3121 FOR_ALL_BB_FN (bb, cfun_to_copy)
3122 if (!id->blocks_to_copy
3123 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3124 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
3125 abnormal_goto_dest, id);
3127 if (id->eh_landing_pad_dest)
3129 add_clobbers_to_eh_landing_pad (id);
3130 id->eh_landing_pad_dest = NULL;
3133 if (new_entry)
3135 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
3136 EDGE_FALLTHRU);
3137 e->probability = profile_probability::always ();
3140 /* Duplicate the loop tree, if available and wanted. */
3141 if (loops_for_fn (src_cfun) != NULL
3142 && current_loops != NULL)
3144 copy_loops (id, entry_block_map->loop_father,
3145 get_loop (src_cfun, 0));
3146 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
3147 loops_state_set (LOOPS_NEED_FIXUP);
3150 /* If the loop tree in the source function needed fixup, mark the
3151 destination loop tree for fixup, too. */
3152 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
3153 loops_state_set (LOOPS_NEED_FIXUP);
3155 if (gimple_in_ssa_p (cfun))
3156 FOR_ALL_BB_FN (bb, cfun_to_copy)
3157 if (!id->blocks_to_copy
3158 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3159 copy_phis_for_bb (bb, id);
3161 FOR_ALL_BB_FN (bb, cfun_to_copy)
3162 if (bb->aux)
3164 if (need_debug_cleanup
3165 && bb->index != ENTRY_BLOCK
3166 && bb->index != EXIT_BLOCK)
3167 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3168 /* Update call edge destinations. This cannot be done before loop
3169 info is updated, because we may split basic blocks. */
3170 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3171 && bb->index != ENTRY_BLOCK
3172 && bb->index != EXIT_BLOCK)
3173 redirect_all_calls (id, (basic_block)bb->aux);
3174 ((basic_block)bb->aux)->aux = NULL;
3175 bb->aux = NULL;
3178 /* Zero out AUX fields of newly created block during EH edge
3179 insertion. */
3180 for (; last < last_basic_block_for_fn (cfun); last++)
3182 if (need_debug_cleanup)
3183 maybe_move_debug_stmts_to_successors (id,
3184 BASIC_BLOCK_FOR_FN (cfun, last));
3185 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3186 /* Update call edge destinations. This cannot be done before loop
3187 info is updated, because we may split basic blocks. */
3188 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3189 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3191 entry_block_map->aux = NULL;
3192 exit_block_map->aux = NULL;
3194 if (id->eh_map)
3196 delete id->eh_map;
3197 id->eh_map = NULL;
3199 if (id->dependence_map)
3201 delete id->dependence_map;
3202 id->dependence_map = NULL;
3205 return new_fndecl;
3208 /* Copy the debug STMT using ID. We deal with these statements in a
3209 special way: if any variable in their VALUE expression wasn't
3210 remapped yet, we won't remap it, because that would get decl uids
3211 out of sync, causing codegen differences between -g and -g0. If
3212 this arises, we drop the VALUE expression altogether. */
3214 static void
3215 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3217 tree t, *n;
3218 struct walk_stmt_info wi;
3220 if (tree block = gimple_block (stmt))
3222 n = id->decl_map->get (block);
3223 gimple_set_block (stmt, n ? *n : id->block);
3226 if (gimple_debug_nonbind_marker_p (stmt))
3228 if (id->call_stmt && !gimple_block (stmt))
3230 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3231 gsi_remove (&gsi, true);
3233 return;
3236 /* Remap all the operands in COPY. */
3237 memset (&wi, 0, sizeof (wi));
3238 wi.info = id;
3240 processing_debug_stmt = 1;
3242 if (gimple_debug_source_bind_p (stmt))
3243 t = gimple_debug_source_bind_get_var (stmt);
3244 else if (gimple_debug_bind_p (stmt))
3245 t = gimple_debug_bind_get_var (stmt);
3246 else
3247 gcc_unreachable ();
3249 if (TREE_CODE (t) == PARM_DECL
3250 && id->debug_map
3251 && (n = id->debug_map->get (t)))
3253 gcc_assert (VAR_P (*n));
3254 t = *n;
3256 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3257 /* T is a non-localized variable. */;
3258 else
3259 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3261 if (gimple_debug_bind_p (stmt))
3263 gimple_debug_bind_set_var (stmt, t);
3265 if (gimple_debug_bind_has_value_p (stmt))
3266 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3267 remap_gimple_op_r, &wi, NULL);
3269 /* Punt if any decl couldn't be remapped. */
3270 if (processing_debug_stmt < 0)
3271 gimple_debug_bind_reset_value (stmt);
3273 else if (gimple_debug_source_bind_p (stmt))
3275 gimple_debug_source_bind_set_var (stmt, t);
3276 /* When inlining and source bind refers to one of the optimized
3277 away parameters, change the source bind into normal debug bind
3278 referring to the corresponding DEBUG_EXPR_DECL that should have
3279 been bound before the call stmt. */
3280 t = gimple_debug_source_bind_get_value (stmt);
3281 if (t != NULL_TREE
3282 && TREE_CODE (t) == PARM_DECL
3283 && id->call_stmt)
3285 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3286 unsigned int i;
3287 if (debug_args != NULL)
3289 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3290 if ((**debug_args)[i] == DECL_ORIGIN (t)
3291 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3293 t = (**debug_args)[i + 1];
3294 stmt->subcode = GIMPLE_DEBUG_BIND;
3295 gimple_debug_bind_set_value (stmt, t);
3296 break;
3300 if (gimple_debug_source_bind_p (stmt))
3301 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3302 remap_gimple_op_r, &wi, NULL);
3305 processing_debug_stmt = 0;
3307 update_stmt (stmt);
3310 /* Process deferred debug stmts. In order to give values better odds
3311 of being successfully remapped, we delay the processing of debug
3312 stmts until all other stmts that might require remapping are
3313 processed. */
3315 static void
3316 copy_debug_stmts (copy_body_data *id)
3318 if (!id->debug_stmts.exists ())
3319 return;
3321 for (gdebug *stmt : id->debug_stmts)
3322 copy_debug_stmt (stmt, id);
3324 id->debug_stmts.release ();
3327 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3328 another function. */
3330 static tree
3331 copy_tree_body (copy_body_data *id)
3333 tree fndecl = id->src_fn;
3334 tree body = DECL_SAVED_TREE (fndecl);
3336 walk_tree (&body, copy_tree_body_r, id, NULL);
3338 return body;
3341 /* Make a copy of the body of FN so that it can be inserted inline in
3342 another function. */
3344 static tree
3345 copy_body (copy_body_data *id,
3346 basic_block entry_block_map, basic_block exit_block_map,
3347 basic_block new_entry)
3349 tree fndecl = id->src_fn;
3350 tree body;
3352 /* If this body has a CFG, walk CFG and copy. */
3353 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3354 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3355 new_entry);
3356 copy_debug_stmts (id);
3357 if (id->killed_new_ssa_names)
3359 ipa_release_ssas_in_hash (id->killed_new_ssa_names);
3360 delete id->killed_new_ssa_names;
3361 id->killed_new_ssa_names = NULL;
3364 return body;
3367 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3368 defined in function FN, or of a data member thereof. */
3370 static bool
3371 self_inlining_addr_expr (tree value, tree fn)
3373 tree var;
3375 if (TREE_CODE (value) != ADDR_EXPR)
3376 return false;
3378 var = get_base_address (TREE_OPERAND (value, 0));
3380 return var && auto_var_in_fn_p (var, fn);
3383 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3384 lexical block and line number information from base_stmt, if given,
3385 or from the last stmt of the block otherwise. */
3387 static gimple *
3388 insert_init_debug_bind (copy_body_data *id,
3389 basic_block bb, tree var, tree value,
3390 gimple *base_stmt)
3392 gimple *note;
3393 gimple_stmt_iterator gsi;
3394 tree tracked_var;
3396 if (!gimple_in_ssa_p (id->src_cfun))
3397 return NULL;
3399 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3400 return NULL;
3402 tracked_var = target_for_debug_bind (var);
3403 if (!tracked_var)
3404 return NULL;
3406 if (bb)
3408 gsi = gsi_last_bb (bb);
3409 if (!base_stmt && !gsi_end_p (gsi))
3410 base_stmt = gsi_stmt (gsi);
3413 note = gimple_build_debug_bind (tracked_var,
3414 value == error_mark_node
3415 ? NULL_TREE : unshare_expr (value),
3416 base_stmt);
3418 if (bb)
3420 if (!gsi_end_p (gsi))
3421 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3422 else
3423 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3426 return note;
3429 static void
3430 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3432 /* If VAR represents a zero-sized variable, it's possible that the
3433 assignment statement may result in no gimple statements. */
3434 if (init_stmt)
3436 gimple_stmt_iterator si = gsi_last_bb (bb);
3438 /* We can end up with init statements that store to a non-register
3439 from a rhs with a conversion. Handle that here by forcing the
3440 rhs into a temporary. gimple_regimplify_operands is not
3441 prepared to do this for us. */
3442 if (!is_gimple_debug (init_stmt)
3443 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3444 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3445 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3447 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3448 TREE_TYPE (gimple_assign_lhs (init_stmt)),
3449 gimple_assign_rhs1 (init_stmt));
3450 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3451 GSI_NEW_STMT);
3452 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3453 gimple_assign_set_rhs1 (init_stmt, rhs);
3455 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3456 if (!is_gimple_debug (init_stmt))
3458 gimple_regimplify_operands (init_stmt, &si);
3460 tree def = gimple_assign_lhs (init_stmt);
3461 insert_init_debug_bind (id, bb, def, def, init_stmt);
3466 /* Deal with mismatched formal/actual parameters, in a rather brute-force way
3467 if need be (which should only be necessary for invalid programs). Attempt
3468 to convert VAL to TYPE and return the result if it is possible, just return
3469 a zero constant of the given type if it fails. */
3471 tree
3472 force_value_to_type (tree type, tree value)
3474 /* If we can match up types by promotion/demotion do so. */
3475 if (fold_convertible_p (type, value))
3476 return fold_convert (type, value);
3478 /* ??? For valid programs we should not end up here.
3479 Still if we end up with truly mismatched types here, fall back
3480 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3481 GIMPLE to the following passes. */
3482 if (TREE_CODE (value) == WITH_SIZE_EXPR)
3483 return error_mark_node;
3484 else if (!is_gimple_reg_type (TREE_TYPE (value))
3485 || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
3486 return fold_build1 (VIEW_CONVERT_EXPR, type, value);
3487 else
3488 return build_zero_cst (type);
3491 /* Initialize parameter P with VALUE. If needed, produce init statement
3492 at the end of BB. When BB is NULL, we return init statement to be
3493 output later. */
3494 static gimple *
3495 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3496 basic_block bb, tree *vars)
3498 gimple *init_stmt = NULL;
3499 tree var;
3500 tree def = (gimple_in_ssa_p (cfun)
3501 ? ssa_default_def (id->src_cfun, p) : NULL);
3503 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3504 here since the type of this decl must be visible to the calling
3505 function. */
3506 var = copy_decl_to_var (p, id);
3508 /* Declare this new variable. */
3509 DECL_CHAIN (var) = *vars;
3510 *vars = var;
3512 /* Make gimplifier happy about this variable. */
3513 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3515 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3516 we would not need to create a new variable here at all, if it
3517 weren't for debug info. Still, we can just use the argument
3518 value. */
3519 if (TREE_READONLY (p)
3520 && !TREE_ADDRESSABLE (p)
3521 && value
3522 && !TREE_SIDE_EFFECTS (value)
3523 && !def)
3525 /* We may produce non-gimple trees by adding NOPs or introduce invalid
3526 sharing when the value is not constant or DECL. And we need to make
3527 sure that it cannot be modified from another path in the callee. */
3528 if (((is_gimple_min_invariant (value)
3529 /* When the parameter is used in a context that forces it to
3530 not be a GIMPLE register avoid substituting something that
3531 is not a decl there. */
3532 && ! DECL_NOT_GIMPLE_REG_P (p))
3533 || (DECL_P (value) && TREE_READONLY (value))
3534 || (auto_var_in_fn_p (value, id->dst_fn)
3535 && !TREE_ADDRESSABLE (value)))
3536 && useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value))
3537 /* We have to be very careful about ADDR_EXPR. Make sure
3538 the base variable isn't a local variable of the inlined
3539 function, e.g., when doing recursive inlining, direct or
3540 mutually-recursive or whatever, which is why we don't
3541 just test whether fn == current_function_decl. */
3542 && ! self_inlining_addr_expr (value, fn))
3544 insert_decl_map (id, p, value);
3545 if (!id->debug_map)
3546 id->debug_map = new hash_map<tree, tree>;
3547 id->debug_map->put (p, var);
3548 return insert_init_debug_bind (id, bb, var, value, NULL);
3552 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3553 that way, when the PARM_DECL is encountered, it will be
3554 automatically replaced by the VAR_DECL. */
3555 insert_decl_map (id, p, var);
3557 /* Even if P was TREE_READONLY, the new VAR should not be. In the original
3558 code, we would have constructed a temporary, and then the function body
3559 would have never changed the value of P. However, now, we will be
3560 constructing VAR directly. Therefore, it must not be TREE_READONLY. */
3561 TREE_READONLY (var) = 0;
3563 tree rhs = value;
3564 if (value
3565 && value != error_mark_node
3566 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3567 rhs = force_value_to_type (TREE_TYPE (p), value);
3569 /* If there is no setup required and we are in SSA, take the easy route
3570 replacing all SSA names representing the function parameter by the
3571 SSA name passed to function.
3573 We need to construct map for the variable anyway as it might be used
3574 in different SSA names when parameter is set in function.
3576 Do replacement at -O0 for const arguments replaced by constant.
3577 This is important for builtin_constant_p and other construct requiring
3578 constant argument to be visible in inlined function body. */
3579 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3580 && (optimize
3581 || (TREE_READONLY (p)
3582 && is_gimple_min_invariant (rhs)))
3583 && (TREE_CODE (rhs) == SSA_NAME
3584 || is_gimple_min_invariant (rhs))
3585 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3587 insert_decl_map (id, def, rhs);
3588 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3591 /* If the value of argument is never used, don't care about initializing
3592 it. */
3593 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3595 /* When there's a gross type mismatch between the passed value
3596 and the declared argument type drop it on the floor and do
3597 not bother to insert a debug bind. */
3598 if (value && !is_gimple_reg_type (TREE_TYPE (value)))
3599 return NULL;
3600 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3603 /* Initialize this VAR_DECL from the equivalent argument. Convert
3604 the argument to the proper type in case it was promoted. */
3605 if (value)
3607 if (rhs == error_mark_node)
3609 insert_decl_map (id, p, var);
3610 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3613 STRIP_USELESS_TYPE_CONVERSION (rhs);
3615 /* If we are in SSA form properly remap the default definition. */
3616 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3618 if (def)
3620 def = remap_ssa_name (def, id);
3621 init_stmt = gimple_build_assign (def, rhs);
3622 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3623 set_ssa_default_def (cfun, var, NULL);
3626 else if (!is_empty_type (TREE_TYPE (var)))
3627 init_stmt = gimple_build_assign (var, rhs);
3629 if (bb && init_stmt)
3630 insert_init_stmt (id, bb, init_stmt);
3632 return init_stmt;
3635 /* Generate code to initialize the parameters of the function at the
3636 top of the stack in ID from the GIMPLE_CALL STMT. */
3638 static void
3639 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3640 tree fn, basic_block bb)
3642 tree parms;
3643 size_t i;
3644 tree p;
3645 tree vars = NULL_TREE;
3646 tree static_chain = gimple_call_chain (stmt);
3648 /* Figure out what the parameters are. */
3649 parms = DECL_ARGUMENTS (fn);
3651 /* Loop through the parameter declarations, replacing each with an
3652 equivalent VAR_DECL, appropriately initialized. */
3653 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3655 tree val;
3656 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3657 setup_one_parameter (id, p, val, fn, bb, &vars);
3659 /* After remapping parameters remap their types. This has to be done
3660 in a second loop over all parameters to appropriately remap
3661 variable sized arrays when the size is specified in a
3662 parameter following the array. */
3663 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3665 tree *varp = id->decl_map->get (p);
3666 if (varp && VAR_P (*varp))
3668 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3669 ? ssa_default_def (id->src_cfun, p) : NULL);
3670 tree var = *varp;
3671 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3672 /* Also remap the default definition if it was remapped
3673 to the default definition of the parameter replacement
3674 by the parameter setup. */
3675 if (def)
3677 tree *defp = id->decl_map->get (def);
3678 if (defp
3679 && TREE_CODE (*defp) == SSA_NAME
3680 && SSA_NAME_VAR (*defp) == var)
3681 TREE_TYPE (*defp) = TREE_TYPE (var);
3683 /* When not optimizing and the parameter is unused, assign to
3684 a dummy SSA name. Do this after remapping the type above. */
3685 else if (!optimize
3686 && is_gimple_reg (p)
3687 && i < gimple_call_num_args (stmt))
3689 tree val = gimple_call_arg (stmt, i);
3690 if (val != error_mark_node)
3692 if (!useless_type_conversion_p (TREE_TYPE (p),
3693 TREE_TYPE (val)))
3694 val = force_value_to_type (TREE_TYPE (p), val);
3695 def = make_ssa_name (var);
3696 gimple *init_stmt = gimple_build_assign (def, val);
3697 insert_init_stmt (id, bb, init_stmt);
3703 /* Initialize the static chain. */
3704 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3705 gcc_assert (fn != current_function_decl);
3706 if (p)
3708 /* No static chain? Seems like a bug in tree-nested.cc. */
3709 gcc_assert (static_chain);
3711 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3714 /* Reverse so the variables appear in the correct order in DWARF
3715 debug info. */
3716 vars = nreverse (vars);
3718 declare_inline_vars (id->block, vars);
3722 /* Declare a return variable to replace the RESULT_DECL for the
3723 function we are calling. An appropriate DECL_STMT is returned.
3724 The USE_STMT is filled to contain a use of the declaration to
3725 indicate the return value of the function.
3727 RETURN_SLOT, if non-null is place where to store the result. It
3728 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3729 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3731 The return value is a (possibly null) value that holds the result
3732 as seen by the caller. */
3734 static tree
3735 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3736 basic_block entry_bb)
3738 tree callee = id->src_fn;
3739 tree result = DECL_RESULT (callee);
3740 tree callee_type = TREE_TYPE (result);
3741 tree caller_type;
3742 tree var, use;
3744 /* Handle type-mismatches in the function declaration return type
3745 vs. the call expression. */
3746 if (modify_dest)
3747 caller_type = TREE_TYPE (modify_dest);
3748 else if (return_slot)
3749 caller_type = TREE_TYPE (return_slot);
3750 else /* No LHS on the call. */
3751 caller_type = TREE_TYPE (TREE_TYPE (callee));
3753 /* We don't need to do anything for functions that don't return anything. */
3754 if (VOID_TYPE_P (callee_type))
3755 return NULL_TREE;
3757 /* If there was a return slot, then the return value is the
3758 dereferenced address of that object. */
3759 if (return_slot)
3761 /* The front end shouldn't have used both return_slot and
3762 a modify expression. */
3763 gcc_assert (!modify_dest);
3764 if (DECL_BY_REFERENCE (result))
3766 tree return_slot_addr = build_fold_addr_expr (return_slot);
3767 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3769 /* We are going to construct *&return_slot and we can't do that
3770 for variables believed to be not addressable.
3772 FIXME: This check possibly can match, because values returned
3773 via return slot optimization are not believed to have address
3774 taken by alias analysis. */
3775 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3776 var = return_slot_addr;
3777 mark_addressable (return_slot);
3779 else
3781 var = return_slot;
3782 gcc_assert (TREE_CODE (var) != SSA_NAME);
3783 if (TREE_ADDRESSABLE (result))
3784 mark_addressable (var);
3786 if (DECL_NOT_GIMPLE_REG_P (result)
3787 && DECL_P (var))
3788 DECL_NOT_GIMPLE_REG_P (var) = 1;
3790 if (!useless_type_conversion_p (callee_type, caller_type))
3791 var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
3793 use = NULL;
3794 goto done;
3797 /* All types requiring non-trivial constructors should have been handled. */
3798 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3800 /* Attempt to avoid creating a new temporary variable. */
3801 if (modify_dest
3802 && TREE_CODE (modify_dest) != SSA_NAME)
3804 bool use_it = false;
3806 /* We can't use MODIFY_DEST if there's type promotion involved. */
3807 if (!useless_type_conversion_p (callee_type, caller_type))
3808 use_it = false;
3810 /* ??? If we're assigning to a variable sized type, then we must
3811 reuse the destination variable, because we've no good way to
3812 create variable sized temporaries at this point. */
3813 else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
3814 use_it = true;
3816 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3817 reuse it as the result of the call directly. Don't do this if
3818 it would promote MODIFY_DEST to addressable. */
3819 else if (TREE_ADDRESSABLE (result))
3820 use_it = false;
3821 else
3823 tree base_m = get_base_address (modify_dest);
3825 /* If the base isn't a decl, then it's a pointer, and we don't
3826 know where that's going to go. */
3827 if (!DECL_P (base_m))
3828 use_it = false;
3829 else if (is_global_var (base_m))
3830 use_it = false;
3831 else if (DECL_NOT_GIMPLE_REG_P (result)
3832 && !DECL_NOT_GIMPLE_REG_P (base_m))
3833 use_it = false;
3834 else if (!TREE_ADDRESSABLE (base_m))
3835 use_it = true;
3838 if (use_it)
3840 var = modify_dest;
3841 use = NULL;
3842 goto done;
3846 gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
3848 var = copy_result_decl_to_var (result, id);
3849 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3851 /* Do not have the rest of GCC warn about this variable as it should
3852 not be visible to the user. */
3853 suppress_warning (var /* OPT_Wuninitialized? */);
3855 declare_inline_vars (id->block, var);
3857 /* Build the use expr. If the return type of the function was
3858 promoted, convert it back to the expected type. */
3859 use = var;
3860 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3862 /* If we can match up types by promotion/demotion do so. */
3863 if (fold_convertible_p (caller_type, var))
3864 use = fold_convert (caller_type, var);
3865 else
3867 /* ??? For valid programs we should not end up here.
3868 Still if we end up with truly mismatched types here, fall back
3869 to using a MEM_REF to not leak invalid GIMPLE to the following
3870 passes. */
3871 /* Prevent var from being written into SSA form. */
3872 if (is_gimple_reg_type (TREE_TYPE (var)))
3873 DECL_NOT_GIMPLE_REG_P (var) = true;
3874 use = fold_build2 (MEM_REF, caller_type,
3875 build_fold_addr_expr (var),
3876 build_int_cst (ptr_type_node, 0));
3880 STRIP_USELESS_TYPE_CONVERSION (use);
3882 if (DECL_BY_REFERENCE (result))
3884 TREE_ADDRESSABLE (var) = 1;
3885 var = build_fold_addr_expr (var);
3888 done:
3889 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3890 way, when the RESULT_DECL is encountered, it will be
3891 automatically replaced by the VAR_DECL.
3893 When returning by reference, ensure that RESULT_DECL remaps to
3894 gimple_val. */
3895 if (DECL_BY_REFERENCE (result)
3896 && !is_gimple_val (var))
3898 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3899 insert_decl_map (id, result, temp);
3900 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3901 it's default_def SSA_NAME. */
3902 if (gimple_in_ssa_p (id->src_cfun)
3903 && is_gimple_reg (result))
3904 if (tree default_def = ssa_default_def (id->src_cfun, result))
3906 temp = make_ssa_name (temp);
3907 insert_decl_map (id, default_def, temp);
3909 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3911 else
3912 insert_decl_map (id, result, var);
3914 /* Remember this so we can ignore it in remap_decls. */
3915 id->retvar = var;
3916 return use;
3919 /* Determine if the function can be copied. If so return NULL. If
3920 not return a string describng the reason for failure. */
3922 const char *
3923 copy_forbidden (struct function *fun)
3925 const char *reason = fun->cannot_be_copied_reason;
3927 /* Only examine the function once. */
3928 if (fun->cannot_be_copied_set)
3929 return reason;
3931 /* We cannot copy a function that receives a non-local goto
3932 because we cannot remap the destination label used in the
3933 function that is performing the non-local goto. */
3934 /* ??? Actually, this should be possible, if we work at it.
3935 No doubt there's just a handful of places that simply
3936 assume it doesn't happen and don't substitute properly. */
3937 if (fun->has_nonlocal_label)
3939 reason = G_("function %q+F can never be copied "
3940 "because it receives a non-local goto");
3941 goto fail;
3944 if (fun->has_forced_label_in_static)
3946 reason = G_("function %q+F can never be copied because it saves "
3947 "address of local label in a static variable");
3948 goto fail;
3951 fail:
3952 fun->cannot_be_copied_reason = reason;
3953 fun->cannot_be_copied_set = true;
3954 return reason;
3958 static const char *inline_forbidden_reason;
3960 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3961 iff a function cannot be inlined. Also sets the reason why. */
3963 static tree
3964 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3965 struct walk_stmt_info *wip)
3967 tree fn = (tree) wip->info;
3968 tree t;
3969 gimple *stmt = gsi_stmt (*gsi);
3971 switch (gimple_code (stmt))
3973 case GIMPLE_CALL:
3974 /* Refuse to inline alloca call unless user explicitly forced so as
3975 this may change program's memory overhead drastically when the
3976 function using alloca is called in loop. In GCC present in
3977 SPEC2000 inlining into schedule_block cause it to require 2GB of
3978 RAM instead of 256MB. Don't do so for alloca calls emitted for
3979 VLA objects as those can't cause unbounded growth (they're always
3980 wrapped inside stack_save/stack_restore regions. */
3981 if (gimple_maybe_alloca_call_p (stmt)
3982 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3983 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3985 inline_forbidden_reason
3986 = G_("function %q+F can never be inlined because it uses "
3987 "alloca (override using the always_inline attribute)");
3988 *handled_ops_p = true;
3989 return fn;
3992 t = gimple_call_fndecl (stmt);
3993 if (t == NULL_TREE)
3994 break;
3996 /* We cannot inline functions that call setjmp. */
3997 if (setjmp_call_p (t))
3999 inline_forbidden_reason
4000 = G_("function %q+F can never be inlined because it uses setjmp");
4001 *handled_ops_p = true;
4002 return t;
4005 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
4006 switch (DECL_FUNCTION_CODE (t))
4008 /* We cannot inline functions that take a variable number of
4009 arguments. */
4010 case BUILT_IN_VA_START:
4011 case BUILT_IN_NEXT_ARG:
4012 case BUILT_IN_VA_END:
4013 inline_forbidden_reason
4014 = G_("function %q+F can never be inlined because it "
4015 "uses variable argument lists");
4016 *handled_ops_p = true;
4017 return t;
4019 case BUILT_IN_LONGJMP:
4020 /* We can't inline functions that call __builtin_longjmp at
4021 all. The non-local goto machinery really requires the
4022 destination be in a different function. If we allow the
4023 function calling __builtin_longjmp to be inlined into the
4024 function calling __builtin_setjmp, Things will Go Awry. */
4025 inline_forbidden_reason
4026 = G_("function %q+F can never be inlined because "
4027 "it uses setjmp-longjmp exception handling");
4028 *handled_ops_p = true;
4029 return t;
4031 case BUILT_IN_NONLOCAL_GOTO:
4032 /* Similarly. */
4033 inline_forbidden_reason
4034 = G_("function %q+F can never be inlined because "
4035 "it uses non-local goto");
4036 *handled_ops_p = true;
4037 return t;
4039 case BUILT_IN_RETURN:
4040 case BUILT_IN_APPLY_ARGS:
4041 /* If a __builtin_apply_args caller would be inlined,
4042 it would be saving arguments of the function it has
4043 been inlined into. Similarly __builtin_return would
4044 return from the function the inline has been inlined into. */
4045 inline_forbidden_reason
4046 = G_("function %q+F can never be inlined because "
4047 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
4048 *handled_ops_p = true;
4049 return t;
4051 default:
4052 break;
4054 break;
4056 case GIMPLE_GOTO:
4057 t = gimple_goto_dest (stmt);
4059 /* We will not inline a function which uses computed goto. The
4060 addresses of its local labels, which may be tucked into
4061 global storage, are of course not constant across
4062 instantiations, which causes unexpected behavior. */
4063 if (TREE_CODE (t) != LABEL_DECL)
4065 inline_forbidden_reason
4066 = G_("function %q+F can never be inlined "
4067 "because it contains a computed goto");
4068 *handled_ops_p = true;
4069 return t;
4071 break;
4073 default:
4074 break;
4077 *handled_ops_p = false;
4078 return NULL_TREE;
4081 /* Return true if FNDECL is a function that cannot be inlined into
4082 another one. */
4084 static bool
4085 inline_forbidden_p (tree fndecl)
4087 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
4088 struct walk_stmt_info wi;
4089 basic_block bb;
4090 bool forbidden_p = false;
4092 /* First check for shared reasons not to copy the code. */
4093 inline_forbidden_reason = copy_forbidden (fun);
4094 if (inline_forbidden_reason != NULL)
4095 return true;
4097 /* Next, walk the statements of the function looking for
4098 constraucts we can't handle, or are non-optimal for inlining. */
4099 hash_set<tree> visited_nodes;
4100 memset (&wi, 0, sizeof (wi));
4101 wi.info = (void *) fndecl;
4102 wi.pset = &visited_nodes;
4104 /* We cannot inline a function with a variable-sized parameter because we
4105 cannot materialize a temporary of such a type in the caller if need be.
4106 Note that the return case is not symmetrical because we can guarantee
4107 that a temporary is not needed by means of CALL_EXPR_RETURN_SLOT_OPT. */
4108 for (tree parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
4109 if (!poly_int_tree_p (DECL_SIZE (parm)))
4111 inline_forbidden_reason
4112 = G_("function %q+F can never be inlined because "
4113 "it has a VLA argument");
4114 return true;
4117 FOR_EACH_BB_FN (bb, fun)
4119 gimple *ret;
4120 gimple_seq seq = bb_seq (bb);
4121 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
4122 forbidden_p = (ret != NULL);
4123 if (forbidden_p)
4124 break;
4127 return forbidden_p;
4130 /* Return false if the function FNDECL cannot be inlined on account of its
4131 attributes, true otherwise. */
4132 static bool
4133 function_attribute_inlinable_p (const_tree fndecl)
4135 for (auto scoped_attributes : targetm.attribute_table)
4137 const_tree a;
4139 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
4141 const_tree name = get_attribute_name (a);
4143 for (const attribute_spec &attribute : scoped_attributes->attributes)
4144 if (is_attribute_p (attribute.name, name))
4145 return targetm.function_attribute_inlinable_p (fndecl);
4149 return true;
4152 /* Returns nonzero if FN is a function that does not have any
4153 fundamental inline blocking properties. */
4155 bool
4156 tree_inlinable_function_p (tree fn)
4158 bool inlinable = true;
4159 bool do_warning;
4160 tree always_inline;
4162 /* If we've already decided this function shouldn't be inlined,
4163 there's no need to check again. */
4164 if (DECL_UNINLINABLE (fn))
4165 return false;
4167 /* We only warn for functions declared `inline' by the user. */
4168 do_warning = (opt_for_fn (fn, warn_inline)
4169 && DECL_DECLARED_INLINE_P (fn)
4170 && !DECL_NO_INLINE_WARNING_P (fn)
4171 && !DECL_IN_SYSTEM_HEADER (fn));
4173 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
4175 if (flag_no_inline
4176 && always_inline == NULL)
4178 if (do_warning)
4179 warning (OPT_Winline, "function %q+F can never be inlined because it "
4180 "is suppressed using %<-fno-inline%>", fn);
4181 inlinable = false;
4184 else if (!function_attribute_inlinable_p (fn))
4186 if (do_warning)
4187 warning (OPT_Winline, "function %q+F can never be inlined because it "
4188 "uses attributes conflicting with inlining", fn);
4189 inlinable = false;
4192 else if (inline_forbidden_p (fn))
4194 /* See if we should warn about uninlinable functions. Previously,
4195 some of these warnings would be issued while trying to expand
4196 the function inline, but that would cause multiple warnings
4197 about functions that would for example call alloca. But since
4198 this a property of the function, just one warning is enough.
4199 As a bonus we can now give more details about the reason why a
4200 function is not inlinable. */
4201 if (always_inline)
4202 error (inline_forbidden_reason, fn);
4203 else if (do_warning)
4204 warning (OPT_Winline, inline_forbidden_reason, fn);
4206 inlinable = false;
4209 /* Squirrel away the result so that we don't have to check again. */
4210 DECL_UNINLINABLE (fn) = !inlinable;
4212 return inlinable;
4215 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
4216 word size and take possible memcpy call into account and return
4217 cost based on whether optimizing for size or speed according to SPEED_P. */
4220 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4222 HOST_WIDE_INT size;
4224 gcc_assert (!VOID_TYPE_P (type));
4226 if (VECTOR_TYPE_P (type))
4228 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4229 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4230 int orig_mode_size
4231 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4232 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4233 return ((orig_mode_size + simd_mode_size - 1)
4234 / simd_mode_size);
4237 size = int_size_in_bytes (type);
4239 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4240 /* Cost of a memcpy call, 3 arguments and the call. */
4241 return 4;
4242 else
4243 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4246 /* Returns cost of operation CODE, according to WEIGHTS */
4248 static int
4249 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4250 tree op1 ATTRIBUTE_UNUSED, tree op2)
4252 switch (code)
4254 /* These are "free" conversions, or their presumed cost
4255 is folded into other operations. */
4256 case RANGE_EXPR:
4257 CASE_CONVERT:
4258 case COMPLEX_EXPR:
4259 case PAREN_EXPR:
4260 case VIEW_CONVERT_EXPR:
4261 return 0;
4263 /* Assign cost of 1 to usual operations.
4264 ??? We may consider mapping RTL costs to this. */
4265 case COND_EXPR:
4266 case VEC_COND_EXPR:
4267 case VEC_PERM_EXPR:
4269 case PLUS_EXPR:
4270 case POINTER_PLUS_EXPR:
4271 case POINTER_DIFF_EXPR:
4272 case MINUS_EXPR:
4273 case MULT_EXPR:
4274 case MULT_HIGHPART_EXPR:
4276 case ADDR_SPACE_CONVERT_EXPR:
4277 case FIXED_CONVERT_EXPR:
4278 case FIX_TRUNC_EXPR:
4280 case NEGATE_EXPR:
4281 case FLOAT_EXPR:
4282 case MIN_EXPR:
4283 case MAX_EXPR:
4284 case ABS_EXPR:
4285 case ABSU_EXPR:
4287 case LSHIFT_EXPR:
4288 case RSHIFT_EXPR:
4289 case LROTATE_EXPR:
4290 case RROTATE_EXPR:
4292 case BIT_IOR_EXPR:
4293 case BIT_XOR_EXPR:
4294 case BIT_AND_EXPR:
4295 case BIT_NOT_EXPR:
4297 case TRUTH_ANDIF_EXPR:
4298 case TRUTH_ORIF_EXPR:
4299 case TRUTH_AND_EXPR:
4300 case TRUTH_OR_EXPR:
4301 case TRUTH_XOR_EXPR:
4302 case TRUTH_NOT_EXPR:
4304 case LT_EXPR:
4305 case LE_EXPR:
4306 case GT_EXPR:
4307 case GE_EXPR:
4308 case EQ_EXPR:
4309 case NE_EXPR:
4310 case ORDERED_EXPR:
4311 case UNORDERED_EXPR:
4313 case UNLT_EXPR:
4314 case UNLE_EXPR:
4315 case UNGT_EXPR:
4316 case UNGE_EXPR:
4317 case UNEQ_EXPR:
4318 case LTGT_EXPR:
4320 case CONJ_EXPR:
4322 case PREDECREMENT_EXPR:
4323 case PREINCREMENT_EXPR:
4324 case POSTDECREMENT_EXPR:
4325 case POSTINCREMENT_EXPR:
4327 case REALIGN_LOAD_EXPR:
4329 case WIDEN_SUM_EXPR:
4330 case WIDEN_MULT_EXPR:
4331 case DOT_PROD_EXPR:
4332 case SAD_EXPR:
4333 case WIDEN_MULT_PLUS_EXPR:
4334 case WIDEN_MULT_MINUS_EXPR:
4335 case WIDEN_LSHIFT_EXPR:
4337 case VEC_WIDEN_MULT_HI_EXPR:
4338 case VEC_WIDEN_MULT_LO_EXPR:
4339 case VEC_WIDEN_MULT_EVEN_EXPR:
4340 case VEC_WIDEN_MULT_ODD_EXPR:
4341 case VEC_UNPACK_HI_EXPR:
4342 case VEC_UNPACK_LO_EXPR:
4343 case VEC_UNPACK_FLOAT_HI_EXPR:
4344 case VEC_UNPACK_FLOAT_LO_EXPR:
4345 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4346 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4347 case VEC_PACK_TRUNC_EXPR:
4348 case VEC_PACK_SAT_EXPR:
4349 case VEC_PACK_FIX_TRUNC_EXPR:
4350 case VEC_PACK_FLOAT_EXPR:
4351 case VEC_WIDEN_LSHIFT_HI_EXPR:
4352 case VEC_WIDEN_LSHIFT_LO_EXPR:
4353 case VEC_DUPLICATE_EXPR:
4354 case VEC_SERIES_EXPR:
4356 return 1;
4358 /* Few special cases of expensive operations. This is useful
4359 to avoid inlining on functions having too many of these. */
4360 case TRUNC_DIV_EXPR:
4361 case CEIL_DIV_EXPR:
4362 case FLOOR_DIV_EXPR:
4363 case ROUND_DIV_EXPR:
4364 case EXACT_DIV_EXPR:
4365 case TRUNC_MOD_EXPR:
4366 case CEIL_MOD_EXPR:
4367 case FLOOR_MOD_EXPR:
4368 case ROUND_MOD_EXPR:
4369 case RDIV_EXPR:
4370 if (TREE_CODE (op2) != INTEGER_CST)
4371 return weights->div_mod_cost;
4372 return 1;
4374 /* Bit-field insertion needs several shift and mask operations. */
4375 case BIT_INSERT_EXPR:
4376 return 3;
4378 default:
4379 /* We expect a copy assignment with no operator. */
4380 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4381 return 0;
4386 /* Estimate number of instructions that will be created by expanding
4387 the statements in the statement sequence STMTS.
4388 WEIGHTS contains weights attributed to various constructs. */
4391 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4393 int cost;
4394 gimple_stmt_iterator gsi;
4396 cost = 0;
4397 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4398 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4400 return cost;
4404 /* Estimate number of instructions that will be created by expanding STMT.
4405 WEIGHTS contains weights attributed to various constructs. */
4408 estimate_num_insns (gimple *stmt, eni_weights *weights)
4410 unsigned cost, i;
4411 enum gimple_code code = gimple_code (stmt);
4412 tree lhs;
4413 tree rhs;
4415 switch (code)
4417 case GIMPLE_ASSIGN:
4418 /* Try to estimate the cost of assignments. We have three cases to
4419 deal with:
4420 1) Simple assignments to registers;
4421 2) Stores to things that must live in memory. This includes
4422 "normal" stores to scalars, but also assignments of large
4423 structures, or constructors of big arrays;
4425 Let us look at the first two cases, assuming we have "a = b + C":
4426 <GIMPLE_ASSIGN <var_decl "a">
4427 <plus_expr <var_decl "b"> <constant C>>
4428 If "a" is a GIMPLE register, the assignment to it is free on almost
4429 any target, because "a" usually ends up in a real register. Hence
4430 the only cost of this expression comes from the PLUS_EXPR, and we
4431 can ignore the GIMPLE_ASSIGN.
4432 If "a" is not a GIMPLE register, the assignment to "a" will most
4433 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4434 of moving something into "a", which we compute using the function
4435 estimate_move_cost. */
4436 if (gimple_clobber_p (stmt))
4437 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4439 lhs = gimple_assign_lhs (stmt);
4440 rhs = gimple_assign_rhs1 (stmt);
4442 cost = 0;
4444 /* Account for the cost of moving to / from memory. */
4445 if (gimple_store_p (stmt))
4446 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4447 if (gimple_assign_load_p (stmt))
4448 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4450 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4451 gimple_assign_rhs1 (stmt),
4452 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4453 == GIMPLE_BINARY_RHS
4454 ? gimple_assign_rhs2 (stmt) : NULL);
4455 break;
4457 case GIMPLE_COND:
4458 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4459 gimple_op (stmt, 0),
4460 gimple_op (stmt, 1));
4461 break;
4463 case GIMPLE_SWITCH:
4465 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4466 /* Take into account cost of the switch + guess 2 conditional jumps for
4467 each case label.
4469 TODO: once the switch expansion logic is sufficiently separated, we can
4470 do better job on estimating cost of the switch. */
4471 if (weights->time_based)
4472 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4473 else
4474 cost = gimple_switch_num_labels (switch_stmt) * 2;
4476 break;
4478 case GIMPLE_CALL:
4480 tree decl;
4482 if (gimple_call_internal_p (stmt))
4483 return 0;
4484 else if ((decl = gimple_call_fndecl (stmt))
4485 && fndecl_built_in_p (decl))
4487 /* Do not special case builtins where we see the body.
4488 This just confuse inliner. */
4489 struct cgraph_node *node;
4490 if ((node = cgraph_node::get (decl))
4491 && node->definition)
4493 /* For buitins that are likely expanded to nothing or
4494 inlined do not account operand costs. */
4495 else if (is_simple_builtin (decl))
4496 return 0;
4497 else if (is_inexpensive_builtin (decl))
4498 return weights->target_builtin_call_cost;
4499 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4501 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4502 specialize the cheap expansion we do here.
4503 ??? This asks for a more general solution. */
4504 switch (DECL_FUNCTION_CODE (decl))
4506 case BUILT_IN_POW:
4507 case BUILT_IN_POWF:
4508 case BUILT_IN_POWL:
4509 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4510 && (real_equal
4511 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4512 &dconst2)))
4513 return estimate_operator_cost
4514 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4515 gimple_call_arg (stmt, 0));
4516 break;
4518 default:
4519 break;
4524 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4525 if (gimple_call_lhs (stmt))
4526 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4527 weights->time_based);
4528 for (i = 0; i < gimple_call_num_args (stmt); i++)
4530 tree arg = gimple_call_arg (stmt, i);
4531 cost += estimate_move_cost (TREE_TYPE (arg),
4532 weights->time_based);
4534 break;
4537 case GIMPLE_RETURN:
4538 return weights->return_cost;
4540 case GIMPLE_GOTO:
4541 case GIMPLE_LABEL:
4542 case GIMPLE_NOP:
4543 case GIMPLE_PHI:
4544 case GIMPLE_PREDICT:
4545 case GIMPLE_DEBUG:
4546 return 0;
4548 case GIMPLE_ASM:
4550 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4551 /* 1000 means infinity. This avoids overflows later
4552 with very long asm statements. */
4553 if (count > 1000)
4554 count = 1000;
4555 /* If this asm is asm inline, count anything as minimum size. */
4556 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4557 count = MIN (1, count);
4558 return MAX (1, count);
4561 case GIMPLE_RESX:
4562 /* This is either going to be an external function call with one
4563 argument, or two register copy statements plus a goto. */
4564 return 2;
4566 case GIMPLE_EH_DISPATCH:
4567 /* ??? This is going to turn into a switch statement. Ideally
4568 we'd have a look at the eh region and estimate the number of
4569 edges involved. */
4570 return 10;
4572 case GIMPLE_BIND:
4573 return estimate_num_insns_seq (
4574 gimple_bind_body (as_a <gbind *> (stmt)),
4575 weights);
4577 case GIMPLE_EH_FILTER:
4578 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4580 case GIMPLE_CATCH:
4581 return estimate_num_insns_seq (gimple_catch_handler (
4582 as_a <gcatch *> (stmt)),
4583 weights);
4585 case GIMPLE_TRY:
4586 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4587 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4589 /* OMP directives are generally very expensive. */
4591 case GIMPLE_OMP_RETURN:
4592 case GIMPLE_OMP_SECTIONS_SWITCH:
4593 case GIMPLE_OMP_ATOMIC_STORE:
4594 case GIMPLE_OMP_CONTINUE:
4595 /* ...except these, which are cheap. */
4596 return 0;
4598 case GIMPLE_OMP_ATOMIC_LOAD:
4599 return weights->omp_cost;
4601 case GIMPLE_OMP_FOR:
4602 return (weights->omp_cost
4603 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4604 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4606 case GIMPLE_OMP_PARALLEL:
4607 case GIMPLE_OMP_TASK:
4608 case GIMPLE_OMP_CRITICAL:
4609 case GIMPLE_OMP_MASTER:
4610 case GIMPLE_OMP_MASKED:
4611 case GIMPLE_OMP_SCOPE:
4612 case GIMPLE_OMP_TASKGROUP:
4613 case GIMPLE_OMP_ORDERED:
4614 case GIMPLE_OMP_SCAN:
4615 case GIMPLE_OMP_SECTION:
4616 case GIMPLE_OMP_SECTIONS:
4617 case GIMPLE_OMP_STRUCTURED_BLOCK:
4618 case GIMPLE_OMP_SINGLE:
4619 case GIMPLE_OMP_TARGET:
4620 case GIMPLE_OMP_TEAMS:
4621 return (weights->omp_cost
4622 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4624 case GIMPLE_TRANSACTION:
4625 return (weights->tm_cost
4626 + estimate_num_insns_seq (gimple_transaction_body (
4627 as_a <gtransaction *> (stmt)),
4628 weights));
4630 default:
4631 gcc_unreachable ();
4634 return cost;
4637 /* Estimate number of instructions that will be created by expanding
4638 function FNDECL. WEIGHTS contains weights attributed to various
4639 constructs. */
4642 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4644 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4645 gimple_stmt_iterator bsi;
4646 basic_block bb;
4647 int n = 0;
4649 gcc_assert (my_function && my_function->cfg);
4650 FOR_EACH_BB_FN (bb, my_function)
4652 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4653 n += estimate_num_insns (gsi_stmt (bsi), weights);
4656 return n;
4660 /* Initializes weights used by estimate_num_insns. */
4662 void
4663 init_inline_once (void)
4665 eni_size_weights.call_cost = 1;
4666 eni_size_weights.indirect_call_cost = 3;
4667 eni_size_weights.target_builtin_call_cost = 1;
4668 eni_size_weights.div_mod_cost = 1;
4669 eni_size_weights.omp_cost = 40;
4670 eni_size_weights.tm_cost = 10;
4671 eni_size_weights.time_based = false;
4672 eni_size_weights.return_cost = 1;
4674 /* Estimating time for call is difficult, since we have no idea what the
4675 called function does. In the current uses of eni_time_weights,
4676 underestimating the cost does less harm than overestimating it, so
4677 we choose a rather small value here. */
4678 eni_time_weights.call_cost = 10;
4679 eni_time_weights.indirect_call_cost = 15;
4680 eni_time_weights.target_builtin_call_cost = 1;
4681 eni_time_weights.div_mod_cost = 10;
4682 eni_time_weights.omp_cost = 40;
4683 eni_time_weights.tm_cost = 40;
4684 eni_time_weights.time_based = true;
4685 eni_time_weights.return_cost = 2;
4689 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4691 static void
4692 prepend_lexical_block (tree current_block, tree new_block)
4694 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4695 BLOCK_SUBBLOCKS (current_block) = new_block;
4696 BLOCK_SUPERCONTEXT (new_block) = current_block;
4699 /* Add local variables from CALLEE to CALLER. */
4701 static inline void
4702 add_local_variables (struct function *callee, struct function *caller,
4703 copy_body_data *id)
4705 tree var;
4706 unsigned ix;
4708 FOR_EACH_LOCAL_DECL (callee, ix, var)
4709 if (!can_be_nonlocal (var, id))
4711 tree new_var = remap_decl (var, id);
4713 /* Remap debug-expressions. */
4714 if (VAR_P (new_var)
4715 && DECL_HAS_DEBUG_EXPR_P (var)
4716 && new_var != var)
4718 tree tem = DECL_DEBUG_EXPR (var);
4719 bool old_regimplify = id->regimplify;
4720 id->remapping_type_depth++;
4721 walk_tree (&tem, copy_tree_body_r, id, NULL);
4722 id->remapping_type_depth--;
4723 id->regimplify = old_regimplify;
4724 SET_DECL_DEBUG_EXPR (new_var, tem);
4725 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4727 add_local_decl (caller, new_var);
4731 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4732 have brought in or introduced any debug stmts for SRCVAR. */
4734 static inline void
4735 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4737 tree *remappedvarp = id->decl_map->get (srcvar);
4739 if (!remappedvarp)
4740 return;
4742 if (!VAR_P (*remappedvarp))
4743 return;
4745 if (*remappedvarp == id->retvar)
4746 return;
4748 tree tvar = target_for_debug_bind (*remappedvarp);
4749 if (!tvar)
4750 return;
4752 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4753 id->call_stmt);
4754 gimple_seq_add_stmt (bindings, stmt);
4757 /* For each inlined variable for which we may have debug bind stmts,
4758 add before GSI a final debug stmt resetting it, marking the end of
4759 its life, so that var-tracking knows it doesn't have to compute
4760 further locations for it. */
4762 static inline void
4763 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4765 tree var;
4766 unsigned ix;
4767 gimple_seq bindings = NULL;
4769 if (!gimple_in_ssa_p (id->src_cfun))
4770 return;
4772 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4773 return;
4775 for (var = DECL_ARGUMENTS (id->src_fn);
4776 var; var = DECL_CHAIN (var))
4777 reset_debug_binding (id, var, &bindings);
4779 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4780 reset_debug_binding (id, var, &bindings);
4782 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4785 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4787 static bool
4788 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4789 bitmap to_purge)
4791 tree use_retvar;
4792 tree fn;
4793 hash_map<tree, tree> *dst;
4794 hash_map<tree, tree> *st = NULL;
4795 tree return_slot;
4796 tree modify_dest;
4797 struct cgraph_edge *cg_edge;
4798 cgraph_inline_failed_t reason;
4799 basic_block return_block;
4800 edge e;
4801 gimple_stmt_iterator gsi, stmt_gsi;
4802 bool successfully_inlined = false;
4803 bool purge_dead_abnormal_edges;
4804 gcall *call_stmt;
4805 unsigned int prop_mask, src_properties;
4806 struct function *dst_cfun;
4807 tree simduid;
4808 use_operand_p use;
4809 gimple *simtenter_stmt = NULL;
4810 vec<tree> *simtvars_save;
4811 tree save_stack = NULL_TREE;
4813 /* The gimplifier uses input_location in too many places, such as
4814 internal_get_tmp_var (). */
4815 location_t saved_location = input_location;
4816 input_location = gimple_location (stmt);
4818 /* From here on, we're only interested in CALL_EXPRs. */
4819 call_stmt = dyn_cast <gcall *> (stmt);
4820 if (!call_stmt)
4821 goto egress;
4823 cg_edge = id->dst_node->get_edge (stmt);
4824 gcc_checking_assert (cg_edge);
4825 /* First, see if we can figure out what function is being called.
4826 If we cannot, then there is no hope of inlining the function. */
4827 if (cg_edge->indirect_unknown_callee)
4828 goto egress;
4829 fn = cg_edge->callee->decl;
4830 gcc_checking_assert (fn);
4832 /* If FN is a declaration of a function in a nested scope that was
4833 globally declared inline, we don't set its DECL_INITIAL.
4834 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4835 C++ front-end uses it for cdtors to refer to their internal
4836 declarations, that are not real functions. Fortunately those
4837 don't have trees to be saved, so we can tell by checking their
4838 gimple_body. */
4839 if (!DECL_INITIAL (fn)
4840 && DECL_ABSTRACT_ORIGIN (fn)
4841 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4842 fn = DECL_ABSTRACT_ORIGIN (fn);
4844 /* Don't try to inline functions that are not well-suited to inlining. */
4845 if (cg_edge->inline_failed)
4847 reason = cg_edge->inline_failed;
4848 /* If this call was originally indirect, we do not want to emit any
4849 inlining related warnings or sorry messages because there are no
4850 guarantees regarding those. */
4851 if (cg_edge->indirect_inlining_edge)
4852 goto egress;
4854 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4855 /* For extern inline functions that get redefined we always
4856 silently ignored always_inline flag. Better behavior would
4857 be to be able to keep both bodies and use extern inline body
4858 for inlining, but we can't do that because frontends overwrite
4859 the body. */
4860 && !cg_edge->callee->redefined_extern_inline
4861 /* During early inline pass, report only when optimization is
4862 not turned on. */
4863 && (symtab->global_info_ready
4864 || !optimize
4865 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4866 /* PR 20090218-1_0.c. Body can be provided by another module. */
4867 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4869 error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4870 cgraph_inline_failed_string (reason));
4871 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4872 inform (gimple_location (stmt), "called from here");
4873 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4874 inform (DECL_SOURCE_LOCATION (cfun->decl),
4875 "called from this function");
4877 else if (opt_for_fn (fn, warn_inline)
4878 && DECL_DECLARED_INLINE_P (fn)
4879 && !DECL_NO_INLINE_WARNING_P (fn)
4880 && !DECL_IN_SYSTEM_HEADER (fn)
4881 && reason != CIF_UNSPECIFIED
4882 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4883 /* Do not warn about not inlined recursive calls. */
4884 && !cg_edge->recursive_p ()
4885 /* Avoid warnings during early inline pass. */
4886 && symtab->global_info_ready)
4888 auto_diagnostic_group d;
4889 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4890 fn, _(cgraph_inline_failed_string (reason))))
4892 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4893 inform (gimple_location (stmt), "called from here");
4894 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4895 inform (DECL_SOURCE_LOCATION (cfun->decl),
4896 "called from this function");
4899 goto egress;
4901 id->src_node = cg_edge->callee;
4903 /* If callee is thunk, all we need is to adjust the THIS pointer
4904 and redirect to function being thunked. */
4905 if (id->src_node->thunk)
4907 cgraph_edge *edge;
4908 tree virtual_offset = NULL;
4909 profile_count count = cg_edge->count;
4910 tree op;
4911 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4912 thunk_info *info = thunk_info::get (id->src_node);
4914 cgraph_edge::remove (cg_edge);
4915 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4916 gimple_uid (stmt),
4917 profile_count::one (),
4918 profile_count::one (),
4919 true);
4920 edge->count = count;
4921 if (info->virtual_offset_p)
4922 virtual_offset = size_int (info->virtual_value);
4923 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4924 NULL);
4925 gsi_insert_before (&iter, gimple_build_assign (op,
4926 gimple_call_arg (stmt, 0)),
4927 GSI_NEW_STMT);
4928 gcc_assert (info->this_adjusting);
4929 op = thunk_adjust (&iter, op, 1, info->fixed_offset,
4930 virtual_offset, info->indirect_offset);
4932 gimple_call_set_arg (stmt, 0, op);
4933 gimple_call_set_fndecl (stmt, edge->callee->decl);
4934 update_stmt (stmt);
4935 id->src_node->remove ();
4936 successfully_inlined = expand_call_inline (bb, stmt, id, to_purge);
4937 maybe_remove_unused_call_args (cfun, stmt);
4938 /* This used to return true even though we do fail to inline in
4939 some cases. See PR98525. */
4940 goto egress;
4942 fn = cg_edge->callee->decl;
4943 cg_edge->callee->get_untransformed_body ();
4945 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4946 cg_edge->callee->verify ();
4948 /* We will be inlining this callee. */
4949 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4951 /* Update the callers EH personality. */
4952 if (DECL_FUNCTION_PERSONALITY (fn))
4953 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4954 = DECL_FUNCTION_PERSONALITY (fn);
4956 /* Split the block before the GIMPLE_CALL. */
4957 stmt_gsi = gsi_for_stmt (stmt);
4958 gsi_prev (&stmt_gsi);
4959 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4960 bb = e->src;
4961 return_block = e->dest;
4962 remove_edge (e);
4964 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4965 been the source of abnormal edges. In this case, schedule
4966 the removal of dead abnormal edges. */
4967 gsi = gsi_start_bb (return_block);
4968 gsi_next (&gsi);
4969 purge_dead_abnormal_edges = gsi_end_p (gsi);
4971 stmt_gsi = gsi_start_bb (return_block);
4973 /* Build a block containing code to initialize the arguments, the
4974 actual inline expansion of the body, and a label for the return
4975 statements within the function to jump to. The type of the
4976 statement expression is the return type of the function call.
4977 ??? If the call does not have an associated block then we will
4978 remap all callee blocks to NULL, effectively dropping most of
4979 its debug information. This should only happen for calls to
4980 artificial decls inserted by the compiler itself. We need to
4981 either link the inlined blocks into the caller block tree or
4982 not refer to them in any way to not break GC for locations. */
4983 if (tree block = gimple_block (stmt))
4985 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4986 to make inlined_function_outer_scope_p return true on this BLOCK. */
4987 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4988 if (loc == UNKNOWN_LOCATION)
4989 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4990 if (loc == UNKNOWN_LOCATION)
4991 loc = BUILTINS_LOCATION;
4992 id->block = make_node (BLOCK);
4993 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4994 BLOCK_SOURCE_LOCATION (id->block) = loc;
4995 prepend_lexical_block (block, id->block);
4998 /* Local declarations will be replaced by their equivalents in this map. */
4999 st = id->decl_map;
5000 id->decl_map = new hash_map<tree, tree>;
5001 dst = id->debug_map;
5002 id->debug_map = NULL;
5003 if (flag_stack_reuse != SR_NONE)
5004 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
5006 /* Record the function we are about to inline. */
5007 id->src_fn = fn;
5008 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
5009 id->reset_location = DECL_IGNORED_P (fn);
5010 id->call_stmt = call_stmt;
5011 cfun->cfg->full_profile &= id->src_cfun->cfg->full_profile;
5013 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
5014 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
5015 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
5016 simtvars_save = id->dst_simt_vars;
5017 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
5018 && (simduid = bb->loop_father->simduid) != NULL_TREE
5019 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
5020 && single_imm_use (simduid, &use, &simtenter_stmt)
5021 && is_gimple_call (simtenter_stmt)
5022 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
5023 vec_alloc (id->dst_simt_vars, 0);
5024 else
5025 id->dst_simt_vars = NULL;
5027 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
5028 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
5030 /* If the src function contains an IFN_VA_ARG, then so will the dst
5031 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
5032 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
5033 src_properties = id->src_cfun->curr_properties & prop_mask;
5034 if (src_properties != prop_mask)
5035 dst_cfun->curr_properties &= src_properties | ~prop_mask;
5036 dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
5037 id->dst_node->calls_declare_variant_alt
5038 |= id->src_node->calls_declare_variant_alt;
5040 gcc_assert (!id->src_cfun->after_inlining);
5042 id->entry_bb = bb;
5043 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
5045 gimple_stmt_iterator si = gsi_last_bb (bb);
5046 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
5047 NOT_TAKEN),
5048 GSI_NEW_STMT);
5050 initialize_inlined_parameters (id, stmt, fn, bb);
5051 if (debug_nonbind_markers_p && debug_inline_points && id->block
5052 && inlined_function_outer_scope_p (id->block))
5054 gimple_stmt_iterator si = gsi_last_bb (bb);
5055 gsi_insert_after (&si, gimple_build_debug_inline_entry
5056 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
5057 GSI_NEW_STMT);
5060 /* If function to be inlined calls alloca, wrap the inlined function
5061 in between save_stack = __builtin_stack_save (); and
5062 __builtin_stack_restore (save_stack); calls. */
5063 if (id->src_cfun->calls_alloca && !gimple_call_noreturn_p (stmt))
5064 /* Don't do this for VLA allocations though, just for user alloca
5065 calls. */
5066 for (struct cgraph_edge *e = id->src_node->callees; e; e = e->next_callee)
5067 if (gimple_maybe_alloca_call_p (e->call_stmt)
5068 && !gimple_call_alloca_for_var_p (e->call_stmt))
5070 tree fn = builtin_decl_implicit (BUILT_IN_STACK_SAVE);
5071 gcall *call = gimple_build_call (fn, 0);
5072 save_stack = make_ssa_name (ptr_type_node);
5073 gimple_call_set_lhs (call, save_stack);
5074 gimple_stmt_iterator si = gsi_last_bb (bb);
5075 gsi_insert_after (&si, call, GSI_NEW_STMT);
5076 struct cgraph_node *dest = cgraph_node::get_create (fn);
5077 id->dst_node->create_edge (dest, call, bb->count)->inline_failed
5078 = CIF_BODY_NOT_AVAILABLE;
5079 break;
5082 if (DECL_INITIAL (fn))
5084 if (gimple_block (stmt))
5086 tree *var;
5088 prepend_lexical_block (id->block,
5089 remap_blocks (DECL_INITIAL (fn), id));
5090 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
5091 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
5092 == NULL_TREE));
5093 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
5094 otherwise for DWARF DW_TAG_formal_parameter will not be children of
5095 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
5096 under it. The parameters can be then evaluated in the debugger,
5097 but don't show in backtraces. */
5098 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
5099 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
5101 tree v = *var;
5102 *var = TREE_CHAIN (v);
5103 TREE_CHAIN (v) = BLOCK_VARS (id->block);
5104 BLOCK_VARS (id->block) = v;
5106 else
5107 var = &TREE_CHAIN (*var);
5109 else
5110 remap_blocks_to_null (DECL_INITIAL (fn), id);
5113 /* Return statements in the function body will be replaced by jumps
5114 to the RET_LABEL. */
5115 gcc_assert (DECL_INITIAL (fn));
5116 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
5118 /* Find the LHS to which the result of this call is assigned. */
5119 return_slot = NULL;
5120 if (gimple_call_lhs (stmt))
5122 modify_dest = gimple_call_lhs (stmt);
5124 /* The function which we are inlining might not return a value,
5125 in which case we should issue a warning that the function
5126 does not return a value. In that case the optimizers will
5127 see that the variable to which the value is assigned was not
5128 initialized. We do not want to issue a warning about that
5129 uninitialized variable. */
5130 if (DECL_P (modify_dest))
5131 suppress_warning (modify_dest, OPT_Wuninitialized);
5133 /* If we have a return slot, we can assign it the result directly,
5134 except in the case where it is a global variable that is only
5135 written to because, the callee being permitted to read or take
5136 the address of its DECL_RESULT, this could invalidate the flag
5137 on the global variable; instead we preventively remove the store,
5138 which would have happened later if the call was not inlined. */
5139 if (gimple_call_return_slot_opt_p (call_stmt))
5141 tree base = get_base_address (modify_dest);
5143 if (VAR_P (base)
5144 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
5145 && varpool_node::get (base)->writeonly)
5146 return_slot = NULL;
5147 else
5148 return_slot = modify_dest;
5150 modify_dest = NULL;
5153 else
5154 modify_dest = NULL;
5156 /* If we are inlining a call to the C++ operator new, we don't want
5157 to use type based alias analysis on the return value. Otherwise
5158 we may get confused if the compiler sees that the inlined new
5159 function returns a pointer which was just deleted. See bug
5160 33407. */
5161 if (DECL_IS_OPERATOR_NEW_P (fn))
5163 return_slot = NULL;
5164 modify_dest = NULL;
5167 /* Declare the return variable for the function. */
5168 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
5170 /* Add local vars in this inlined callee to caller. */
5171 add_local_variables (id->src_cfun, cfun, id);
5173 if (dump_enabled_p ())
5175 char buf[128];
5176 snprintf (buf, sizeof(buf), "%4.2f",
5177 cg_edge->sreal_frequency ().to_double ());
5178 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
5179 call_stmt,
5180 "Inlining %C to %C with frequency %s\n",
5181 id->src_node, id->dst_node, buf);
5182 if (dump_file && (dump_flags & TDF_DETAILS))
5184 id->src_node->dump (dump_file);
5185 id->dst_node->dump (dump_file);
5189 /* This is it. Duplicate the callee body. Assume callee is
5190 pre-gimplified. Note that we must not alter the caller
5191 function in any way before this point, as this CALL_EXPR may be
5192 a self-referential call; if we're calling ourselves, we need to
5193 duplicate our body before altering anything. */
5194 copy_body (id, bb, return_block, NULL);
5196 reset_debug_bindings (id, stmt_gsi);
5198 if (flag_stack_reuse != SR_NONE)
5199 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
5200 if (!TREE_THIS_VOLATILE (p))
5202 /* The value associated with P is a local temporary only if
5203 there is no value associated with P in the debug map. */
5204 tree *varp = id->decl_map->get (p);
5205 if (varp
5206 && VAR_P (*varp)
5207 && !is_gimple_reg (*varp)
5208 && !(id->debug_map && id->debug_map->get (p)))
5210 tree clobber = build_clobber (TREE_TYPE (*varp),
5211 CLOBBER_STORAGE_END);
5212 gimple *clobber_stmt;
5213 clobber_stmt = gimple_build_assign (*varp, clobber);
5214 gimple_set_location (clobber_stmt, gimple_location (stmt));
5215 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5219 if (save_stack)
5221 tree fn = builtin_decl_implicit (BUILT_IN_STACK_RESTORE);
5222 gcall *call = gimple_build_call (fn, 1, save_stack);
5223 gsi_insert_before (&stmt_gsi, call, GSI_SAME_STMT);
5224 struct cgraph_node *dest = cgraph_node::get_create (fn);
5225 id->dst_node->create_edge (dest, call,
5226 return_block->count)->inline_failed
5227 = CIF_BODY_NOT_AVAILABLE;
5230 /* Reset the escaped solution. */
5231 if (cfun->gimple_df)
5233 pt_solution_reset (&cfun->gimple_df->escaped);
5234 pt_solution_reset (&cfun->gimple_df->escaped_return);
5237 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
5238 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
5240 size_t nargs = gimple_call_num_args (simtenter_stmt);
5241 vec<tree> *vars = id->dst_simt_vars;
5242 auto_vec<tree> newargs (nargs + vars->length ());
5243 for (size_t i = 0; i < nargs; i++)
5244 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
5245 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
5247 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
5248 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
5250 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
5251 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
5252 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
5253 gsi_replace (&gsi, g, false);
5255 vec_free (id->dst_simt_vars);
5256 id->dst_simt_vars = simtvars_save;
5258 /* Clean up. */
5259 if (id->debug_map)
5261 delete id->debug_map;
5262 id->debug_map = dst;
5264 delete id->decl_map;
5265 id->decl_map = st;
5267 /* Unlink the calls virtual operands before replacing it. */
5268 unlink_stmt_vdef (stmt);
5269 if (gimple_vdef (stmt)
5270 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
5271 release_ssa_name (gimple_vdef (stmt));
5273 /* If the inlined function returns a result that we care about,
5274 substitute the GIMPLE_CALL with an assignment of the return
5275 variable to the LHS of the call. That is, if STMT was
5276 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
5277 if (use_retvar && gimple_call_lhs (stmt))
5279 gimple *old_stmt = stmt;
5280 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
5281 gimple_set_location (stmt, gimple_location (old_stmt));
5282 gsi_replace (&stmt_gsi, stmt, false);
5283 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5284 /* Append a clobber for id->retvar if easily possible. */
5285 if (flag_stack_reuse != SR_NONE
5286 && id->retvar
5287 && VAR_P (id->retvar)
5288 && id->retvar != return_slot
5289 && id->retvar != modify_dest
5290 && !TREE_THIS_VOLATILE (id->retvar)
5291 && !is_gimple_reg (id->retvar)
5292 && !stmt_ends_bb_p (stmt))
5294 tree clobber = build_clobber (TREE_TYPE (id->retvar),
5295 CLOBBER_STORAGE_END);
5296 gimple *clobber_stmt;
5297 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5298 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5299 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5302 else
5304 /* Handle the case of inlining a function with no return
5305 statement, which causes the return value to become undefined. */
5306 if (gimple_call_lhs (stmt)
5307 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5309 tree name = gimple_call_lhs (stmt);
5310 tree var = SSA_NAME_VAR (name);
5311 tree def = var ? ssa_default_def (cfun, var) : NULL;
5313 if (def)
5315 /* If the variable is used undefined, make this name
5316 undefined via a move. */
5317 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5318 gsi_replace (&stmt_gsi, stmt, true);
5320 else
5322 if (!var)
5324 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5325 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5327 /* Otherwise make this variable undefined. */
5328 gsi_remove (&stmt_gsi, true);
5329 set_ssa_default_def (cfun, var, name);
5330 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5333 /* Replace with a clobber for id->retvar. */
5334 else if (flag_stack_reuse != SR_NONE
5335 && id->retvar
5336 && VAR_P (id->retvar)
5337 && id->retvar != return_slot
5338 && id->retvar != modify_dest
5339 && !TREE_THIS_VOLATILE (id->retvar)
5340 && !is_gimple_reg (id->retvar))
5342 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5343 gimple *clobber_stmt;
5344 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5345 gimple_set_location (clobber_stmt, gimple_location (stmt));
5346 gsi_replace (&stmt_gsi, clobber_stmt, false);
5347 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5349 else
5350 gsi_remove (&stmt_gsi, true);
5353 if (purge_dead_abnormal_edges)
5354 bitmap_set_bit (to_purge, return_block->index);
5356 /* If the value of the new expression is ignored, that's OK. We
5357 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5358 the equivalent inlined version either. */
5359 if (is_gimple_assign (stmt))
5361 gcc_assert (gimple_assign_single_p (stmt)
5362 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5363 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5366 id->add_clobbers_to_eh_landing_pads = 0;
5368 /* Output the inlining info for this abstract function, since it has been
5369 inlined. If we don't do this now, we can lose the information about the
5370 variables in the function when the blocks get blown away as soon as we
5371 remove the cgraph node. */
5372 if (gimple_block (stmt))
5373 (*debug_hooks->outlining_inline_function) (fn);
5375 /* Update callgraph if needed. */
5376 cg_edge->callee->remove ();
5378 id->block = NULL_TREE;
5379 id->retvar = NULL_TREE;
5380 successfully_inlined = true;
5382 egress:
5383 input_location = saved_location;
5384 return successfully_inlined;
5387 /* Expand call statements reachable from STMT_P.
5388 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5389 in a MODIFY_EXPR. */
5391 static bool
5392 gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5393 bitmap to_purge)
5395 gimple_stmt_iterator gsi;
5396 bool inlined = false;
5398 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5400 gimple *stmt = gsi_stmt (gsi);
5401 gsi_prev (&gsi);
5403 if (is_gimple_call (stmt)
5404 && !gimple_call_internal_p (stmt))
5405 inlined |= expand_call_inline (bb, stmt, id, to_purge);
5408 return inlined;
5412 /* Walk all basic blocks created after FIRST and try to fold every statement
5413 in the STATEMENTS pointer set. */
5415 static void
5416 fold_marked_statements (int first, hash_set<gimple *> *statements)
5418 auto_bitmap to_purge;
5420 auto_vec<edge, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
5421 auto_sbitmap visited (last_basic_block_for_fn (cfun));
5422 bitmap_clear (visited);
5424 stack.quick_push (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5425 while (!stack.is_empty ())
5427 /* Look at the edge on the top of the stack. */
5428 edge e = stack.pop ();
5429 basic_block dest = e->dest;
5431 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
5432 || bitmap_bit_p (visited, dest->index))
5433 continue;
5435 bitmap_set_bit (visited, dest->index);
5437 if (dest->index >= first)
5438 for (gimple_stmt_iterator gsi = gsi_start_bb (dest);
5439 !gsi_end_p (gsi); gsi_next (&gsi))
5441 if (!statements->contains (gsi_stmt (gsi)))
5442 continue;
5444 gimple *old_stmt = gsi_stmt (gsi);
5445 tree old_decl = (is_gimple_call (old_stmt)
5446 ? gimple_call_fndecl (old_stmt) : 0);
5447 if (old_decl && fndecl_built_in_p (old_decl))
5449 /* Folding builtins can create multiple instructions,
5450 we need to look at all of them. */
5451 gimple_stmt_iterator i2 = gsi;
5452 gsi_prev (&i2);
5453 if (fold_stmt (&gsi))
5455 gimple *new_stmt;
5456 /* If a builtin at the end of a bb folded into nothing,
5457 the following loop won't work. */
5458 if (gsi_end_p (gsi))
5460 cgraph_update_edges_for_call_stmt (old_stmt,
5461 old_decl, NULL);
5462 break;
5464 if (gsi_end_p (i2))
5465 i2 = gsi_start_bb (dest);
5466 else
5467 gsi_next (&i2);
5468 while (1)
5470 new_stmt = gsi_stmt (i2);
5471 update_stmt (new_stmt);
5472 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5473 new_stmt);
5475 if (new_stmt == gsi_stmt (gsi))
5477 /* It is okay to check only for the very last
5478 of these statements. If it is a throwing
5479 statement nothing will change. If it isn't
5480 this can remove EH edges. If that weren't
5481 correct then because some intermediate stmts
5482 throw, but not the last one. That would mean
5483 we'd have to split the block, which we can't
5484 here and we'd loose anyway. And as builtins
5485 probably never throw, this all
5486 is mood anyway. */
5487 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5488 new_stmt))
5489 bitmap_set_bit (to_purge, dest->index);
5490 break;
5492 gsi_next (&i2);
5496 else if (fold_stmt (&gsi))
5498 /* Re-read the statement from GSI as fold_stmt() may
5499 have changed it. */
5500 gimple *new_stmt = gsi_stmt (gsi);
5501 update_stmt (new_stmt);
5503 if (is_gimple_call (old_stmt)
5504 || is_gimple_call (new_stmt))
5505 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5506 new_stmt);
5508 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5509 bitmap_set_bit (to_purge, dest->index);
5513 if (EDGE_COUNT (dest->succs) > 0)
5515 /* Avoid warnings emitted from folding statements that
5516 became unreachable because of inlined function parameter
5517 propagation. */
5518 e = find_taken_edge (dest, NULL_TREE);
5519 if (e)
5520 stack.quick_push (e);
5521 else
5523 edge_iterator ei;
5524 FOR_EACH_EDGE (e, ei, dest->succs)
5525 stack.safe_push (e);
5530 gimple_purge_all_dead_eh_edges (to_purge);
5533 /* Expand calls to inline functions in the body of FN. */
5535 unsigned int
5536 optimize_inline_calls (tree fn)
5538 copy_body_data id;
5539 basic_block bb;
5540 int last = n_basic_blocks_for_fn (cfun);
5541 bool inlined_p = false;
5543 /* Clear out ID. */
5544 memset (&id, 0, sizeof (id));
5546 id.src_node = id.dst_node = cgraph_node::get (fn);
5547 gcc_assert (id.dst_node->definition);
5548 id.dst_fn = fn;
5549 /* Or any functions that aren't finished yet. */
5550 if (current_function_decl)
5551 id.dst_fn = current_function_decl;
5553 id.copy_decl = copy_decl_maybe_to_var;
5554 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5555 id.transform_new_cfg = false;
5556 id.transform_return_to_modify = true;
5557 id.transform_parameter = true;
5558 id.statements_to_fold = new hash_set<gimple *>;
5560 push_gimplify_context ();
5562 /* We make no attempts to keep dominance info up-to-date. */
5563 free_dominance_info (CDI_DOMINATORS);
5564 free_dominance_info (CDI_POST_DOMINATORS);
5566 /* Register specific gimple functions. */
5567 gimple_register_cfg_hooks ();
5569 /* Reach the trees by walking over the CFG, and note the
5570 enclosing basic-blocks in the call edges. */
5571 /* We walk the blocks going forward, because inlined function bodies
5572 will split id->current_basic_block, and the new blocks will
5573 follow it; we'll trudge through them, processing their CALL_EXPRs
5574 along the way. */
5575 auto_bitmap to_purge;
5576 FOR_EACH_BB_FN (bb, cfun)
5577 inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5579 pop_gimplify_context (NULL);
5581 if (flag_checking)
5583 struct cgraph_edge *e;
5585 id.dst_node->verify ();
5587 /* Double check that we inlined everything we are supposed to inline. */
5588 for (e = id.dst_node->callees; e; e = e->next_callee)
5589 gcc_assert (e->inline_failed);
5592 /* If we didn't inline into the function there is nothing to do. */
5593 if (!inlined_p)
5595 delete id.statements_to_fold;
5596 return 0;
5599 /* Fold queued statements. */
5600 update_max_bb_count ();
5601 fold_marked_statements (last, id.statements_to_fold);
5602 delete id.statements_to_fold;
5604 /* Finally purge EH and abnormal edges from the call stmts we inlined.
5605 We need to do this after fold_marked_statements since that may walk
5606 the SSA use-def chain. */
5607 unsigned i;
5608 bitmap_iterator bi;
5609 EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5611 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5612 if (bb)
5614 gimple_purge_dead_eh_edges (bb);
5615 gimple_purge_dead_abnormal_call_edges (bb);
5619 gcc_assert (!id.debug_stmts.exists ());
5621 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5622 number_blocks (fn);
5624 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5625 id.dst_node->calls_comdat_local = id.dst_node->check_calls_comdat_local_p ();
5627 if (flag_checking)
5628 id.dst_node->verify ();
5630 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5631 not possible yet - the IPA passes might make various functions to not
5632 throw and they don't care to proactively update local EH info. This is
5633 done later in fixup_cfg pass that also execute the verification. */
5634 return (TODO_update_ssa
5635 | TODO_cleanup_cfg
5636 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5637 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0));
5640 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5642 tree
5643 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5645 enum tree_code code = TREE_CODE (*tp);
5646 enum tree_code_class cl = TREE_CODE_CLASS (code);
5648 /* We make copies of most nodes. */
5649 if (IS_EXPR_CODE_CLASS (cl)
5650 || code == TREE_LIST
5651 || code == TREE_VEC
5652 || code == TYPE_DECL
5653 || code == OMP_CLAUSE)
5655 /* Because the chain gets clobbered when we make a copy, we save it
5656 here. */
5657 tree chain = NULL_TREE, new_tree;
5659 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5660 chain = TREE_CHAIN (*tp);
5662 /* Copy the node. */
5663 new_tree = copy_node (*tp);
5665 *tp = new_tree;
5667 /* Now, restore the chain, if appropriate. That will cause
5668 walk_tree to walk into the chain as well. */
5669 if (code == PARM_DECL
5670 || code == TREE_LIST
5671 || code == OMP_CLAUSE)
5672 TREE_CHAIN (*tp) = chain;
5674 /* For now, we don't update BLOCKs when we make copies. So, we
5675 have to nullify all BIND_EXPRs. */
5676 if (TREE_CODE (*tp) == BIND_EXPR)
5677 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5679 else if (code == CONSTRUCTOR)
5681 /* CONSTRUCTOR nodes need special handling because
5682 we need to duplicate the vector of elements. */
5683 tree new_tree;
5685 new_tree = copy_node (*tp);
5686 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5687 *tp = new_tree;
5689 else if (code == STATEMENT_LIST)
5690 /* We used to just abort on STATEMENT_LIST, but we can run into them
5691 with statement-expressions (c++/40975). */
5692 copy_statement_list (tp);
5693 else if (TREE_CODE_CLASS (code) == tcc_type)
5694 *walk_subtrees = 0;
5695 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5696 *walk_subtrees = 0;
5697 else if (TREE_CODE_CLASS (code) == tcc_constant)
5698 *walk_subtrees = 0;
5699 return NULL_TREE;
5702 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5703 information indicating to what new SAVE_EXPR this one should be mapped,
5704 use that one. Otherwise, create a new node and enter it in ST. FN is
5705 the function into which the copy will be placed. */
5707 static void
5708 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5710 tree *n;
5711 tree t;
5713 /* See if we already encountered this SAVE_EXPR. */
5714 n = st->get (*tp);
5716 /* If we didn't already remap this SAVE_EXPR, do so now. */
5717 if (!n)
5719 t = copy_node (*tp);
5721 /* Remember this SAVE_EXPR. */
5722 st->put (*tp, t);
5723 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5724 st->put (t, t);
5726 else
5728 /* We've already walked into this SAVE_EXPR; don't do it again. */
5729 *walk_subtrees = 0;
5730 t = *n;
5733 /* Replace this SAVE_EXPR with the copy. */
5734 *tp = t;
5737 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5738 label, copies the declaration and enters it in the splay_tree in DATA (which
5739 is really a 'copy_body_data *'. */
5741 static tree
5742 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5743 bool *handled_ops_p ATTRIBUTE_UNUSED,
5744 struct walk_stmt_info *wi)
5746 copy_body_data *id = (copy_body_data *) wi->info;
5747 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5749 if (stmt)
5751 tree decl = gimple_label_label (stmt);
5753 /* Copy the decl and remember the copy. */
5754 insert_decl_map (id, decl, id->copy_decl (decl, id));
5757 return NULL_TREE;
5760 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5761 struct walk_stmt_info *wi);
5763 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5764 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5765 remaps all local declarations to appropriate replacements in gimple
5766 operands. */
5768 static tree
5769 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5771 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5772 copy_body_data *id = (copy_body_data *) wi->info;
5773 hash_map<tree, tree> *st = id->decl_map;
5774 tree *n;
5775 tree expr = *tp;
5777 /* For recursive invocations this is no longer the LHS itself. */
5778 bool is_lhs = wi->is_lhs;
5779 wi->is_lhs = false;
5781 if (TREE_CODE (expr) == SSA_NAME)
5783 *tp = remap_ssa_name (*tp, id);
5784 *walk_subtrees = 0;
5785 if (is_lhs)
5786 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5788 /* Only a local declaration (variable or label). */
5789 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5790 || TREE_CODE (expr) == LABEL_DECL)
5792 /* Lookup the declaration. */
5793 n = st->get (expr);
5795 /* If it's there, remap it. */
5796 if (n)
5797 *tp = *n;
5798 *walk_subtrees = 0;
5800 else if (TREE_CODE (expr) == STATEMENT_LIST
5801 || TREE_CODE (expr) == BIND_EXPR
5802 || TREE_CODE (expr) == SAVE_EXPR)
5803 gcc_unreachable ();
5804 else if (TREE_CODE (expr) == TARGET_EXPR)
5806 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5807 It's OK for this to happen if it was part of a subtree that
5808 isn't immediately expanded, such as operand 2 of another
5809 TARGET_EXPR. */
5810 if (!TREE_OPERAND (expr, 1))
5812 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5813 TREE_OPERAND (expr, 3) = NULL_TREE;
5816 else if (TREE_CODE (expr) == OMP_CLAUSE)
5818 /* Before the omplower pass completes, some OMP clauses can contain
5819 sequences that are neither copied by gimple_seq_copy nor walked by
5820 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5821 in those situations, we have to copy and process them explicitely. */
5823 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5825 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5826 seq = duplicate_remap_omp_clause_seq (seq, wi);
5827 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5829 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5831 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5832 seq = duplicate_remap_omp_clause_seq (seq, wi);
5833 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5835 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5837 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5838 seq = duplicate_remap_omp_clause_seq (seq, wi);
5839 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5840 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5841 seq = duplicate_remap_omp_clause_seq (seq, wi);
5842 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5846 /* Keep iterating. */
5847 return NULL_TREE;
5851 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5852 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5853 remaps all local declarations to appropriate replacements in gimple
5854 statements. */
5856 static tree
5857 replace_locals_stmt (gimple_stmt_iterator *gsip,
5858 bool *handled_ops_p ATTRIBUTE_UNUSED,
5859 struct walk_stmt_info *wi)
5861 copy_body_data *id = (copy_body_data *) wi->info;
5862 gimple *gs = gsi_stmt (*gsip);
5864 if (gbind *stmt = dyn_cast <gbind *> (gs))
5866 tree block = gimple_bind_block (stmt);
5868 if (block)
5870 remap_block (&block, id);
5871 gimple_bind_set_block (stmt, block);
5874 /* This will remap a lot of the same decls again, but this should be
5875 harmless. */
5876 if (gimple_bind_vars (stmt))
5878 tree old_var, decls = gimple_bind_vars (stmt);
5880 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5881 if (!can_be_nonlocal (old_var, id)
5882 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5883 remap_decl (old_var, id);
5885 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5886 id->prevent_decl_creation_for_types = true;
5887 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5888 id->prevent_decl_creation_for_types = false;
5892 /* Keep iterating. */
5893 return NULL_TREE;
5896 /* Create a copy of SEQ and remap all decls in it. */
5898 static gimple_seq
5899 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5901 if (!seq)
5902 return NULL;
5904 /* If there are any labels in OMP sequences, they can be only referred to in
5905 the sequence itself and therefore we can do both here. */
5906 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5907 gimple_seq copy = gimple_seq_copy (seq);
5908 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5909 return copy;
5912 /* Copies everything in SEQ and replaces variables and labels local to
5913 current_function_decl. */
5915 gimple_seq
5916 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5918 copy_body_data id;
5919 struct walk_stmt_info wi;
5920 gimple_seq copy;
5922 /* There's nothing to do for NULL_TREE. */
5923 if (seq == NULL)
5924 return seq;
5926 /* Set up ID. */
5927 memset (&id, 0, sizeof (id));
5928 id.src_fn = current_function_decl;
5929 id.dst_fn = current_function_decl;
5930 id.src_cfun = cfun;
5931 id.decl_map = new hash_map<tree, tree>;
5932 id.debug_map = NULL;
5934 id.copy_decl = copy_decl_no_change;
5935 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5936 id.transform_new_cfg = false;
5937 id.transform_return_to_modify = false;
5938 id.transform_parameter = false;
5940 /* Walk the tree once to find local labels. */
5941 memset (&wi, 0, sizeof (wi));
5942 hash_set<tree> visited;
5943 wi.info = &id;
5944 wi.pset = &visited;
5945 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5947 copy = gimple_seq_copy (seq);
5949 /* Walk the copy, remapping decls. */
5950 memset (&wi, 0, sizeof (wi));
5951 wi.info = &id;
5952 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5954 /* Clean up. */
5955 delete id.decl_map;
5956 if (id.debug_map)
5957 delete id.debug_map;
5958 if (id.dependence_map)
5960 delete id.dependence_map;
5961 id.dependence_map = NULL;
5964 return copy;
5968 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5970 static tree
5971 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5973 if (*tp == data)
5974 return (tree) data;
5975 else
5976 return NULL;
5979 DEBUG_FUNCTION bool
5980 debug_find_tree (tree top, tree search)
5982 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5986 /* Declare the variables created by the inliner. Add all the variables in
5987 VARS to BIND_EXPR. */
5989 static void
5990 declare_inline_vars (tree block, tree vars)
5992 tree t;
5993 for (t = vars; t; t = DECL_CHAIN (t))
5995 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5996 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5997 add_local_decl (cfun, t);
6000 if (block)
6001 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
6004 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
6005 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
6006 VAR_DECL translation. */
6008 tree
6009 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
6011 /* Don't generate debug information for the copy if we wouldn't have
6012 generated it for the copy either. */
6013 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
6014 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
6016 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
6017 declaration inspired this copy. */
6018 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
6020 /* The new variable/label has no RTL, yet. */
6021 if (HAS_RTL_P (copy)
6022 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
6023 SET_DECL_RTL (copy, 0);
6024 /* For vector typed decls make sure to update DECL_MODE according
6025 to the new function context. */
6026 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
6027 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
6029 /* These args would always appear unused, if not for this. */
6030 TREE_USED (copy) = 1;
6032 /* Set the context for the new declaration. */
6033 if (!DECL_CONTEXT (decl))
6034 /* Globals stay global. */
6036 else if (DECL_CONTEXT (decl) != id->src_fn)
6037 /* Things that weren't in the scope of the function we're inlining
6038 from aren't in the scope we're inlining to, either. */
6040 else if (TREE_STATIC (decl))
6041 /* Function-scoped static variables should stay in the original
6042 function. */
6044 else
6046 /* Ordinary automatic local variables are now in the scope of the
6047 new function. */
6048 DECL_CONTEXT (copy) = id->dst_fn;
6049 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
6051 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
6052 DECL_ATTRIBUTES (copy)
6053 = tree_cons (get_identifier ("omp simt private"), NULL,
6054 DECL_ATTRIBUTES (copy));
6055 id->dst_simt_vars->safe_push (copy);
6059 return copy;
6062 /* Create a new VAR_DECL that is indentical in all respect to DECL except that
6063 DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
6064 DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
6066 tree
6067 copy_decl_to_var (tree decl, copy_body_data *id)
6069 tree copy, type;
6071 gcc_assert (TREE_CODE (decl) == PARM_DECL
6072 || TREE_CODE (decl) == RESULT_DECL);
6074 type = TREE_TYPE (decl);
6076 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
6077 VAR_DECL, DECL_NAME (decl), type);
6078 if (DECL_PT_UID_SET_P (decl))
6079 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
6080 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
6081 TREE_READONLY (copy) = TREE_READONLY (decl);
6082 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
6083 DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
6084 DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
6086 return copy_decl_for_dup_finish (id, decl, copy);
6089 /* Like copy_decl_to_var, but create a return slot object instead of a
6090 pointer variable for return by invisible reference. */
6092 static tree
6093 copy_result_decl_to_var (tree decl, copy_body_data *id)
6095 tree copy, type;
6097 gcc_assert (TREE_CODE (decl) == PARM_DECL
6098 || TREE_CODE (decl) == RESULT_DECL);
6100 type = TREE_TYPE (decl);
6101 if (DECL_BY_REFERENCE (decl))
6102 type = TREE_TYPE (type);
6104 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
6105 VAR_DECL, DECL_NAME (decl), type);
6106 if (DECL_PT_UID_SET_P (decl))
6107 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
6108 TREE_READONLY (copy) = TREE_READONLY (decl);
6109 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
6110 if (!DECL_BY_REFERENCE (decl))
6112 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
6113 DECL_NOT_GIMPLE_REG_P (copy)
6114 = (DECL_NOT_GIMPLE_REG_P (decl)
6115 /* RESULT_DECLs are treated special by needs_to_live_in_memory,
6116 mirror that to the created VAR_DECL. */
6117 || (TREE_CODE (decl) == RESULT_DECL
6118 && aggregate_value_p (decl, id->src_fn)));
6121 return copy_decl_for_dup_finish (id, decl, copy);
6124 tree
6125 copy_decl_no_change (tree decl, copy_body_data *id)
6127 tree copy;
6129 copy = copy_node (decl);
6131 /* The COPY is not abstract; it will be generated in DST_FN. */
6132 DECL_ABSTRACT_P (copy) = false;
6133 lang_hooks.dup_lang_specific_decl (copy);
6135 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
6136 been taken; it's for internal bookkeeping in expand_goto_internal. */
6137 if (TREE_CODE (copy) == LABEL_DECL)
6139 TREE_ADDRESSABLE (copy) = 0;
6140 LABEL_DECL_UID (copy) = -1;
6143 return copy_decl_for_dup_finish (id, decl, copy);
6146 static tree
6147 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
6149 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
6150 return copy_decl_to_var (decl, id);
6151 else
6152 return copy_decl_no_change (decl, id);
6155 /* Return a copy of the function's argument tree without any modifications. */
6157 static tree
6158 copy_arguments_nochange (tree orig_parm, copy_body_data * id)
6160 tree arg, *parg;
6161 tree new_parm = NULL;
6163 parg = &new_parm;
6164 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
6166 tree new_tree = remap_decl (arg, id);
6167 if (TREE_CODE (new_tree) != PARM_DECL)
6168 new_tree = id->copy_decl (arg, id);
6169 lang_hooks.dup_lang_specific_decl (new_tree);
6170 *parg = new_tree;
6171 parg = &DECL_CHAIN (new_tree);
6173 return new_parm;
6176 /* Return a copy of the function's static chain. */
6177 static tree
6178 copy_static_chain (tree static_chain, copy_body_data * id)
6180 tree *chain_copy, *pvar;
6182 chain_copy = &static_chain;
6183 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
6185 tree new_tree = remap_decl (*pvar, id);
6186 lang_hooks.dup_lang_specific_decl (new_tree);
6187 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
6188 *pvar = new_tree;
6190 return static_chain;
6193 /* Return true if the function is allowed to be versioned.
6194 This is a guard for the versioning functionality. */
6196 bool
6197 tree_versionable_function_p (tree fndecl)
6199 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
6200 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
6203 /* Update clone info after duplication. */
6205 static void
6206 update_clone_info (copy_body_data * id)
6208 struct cgraph_node *this_node = id->dst_node;
6209 if (!this_node->clones)
6210 return;
6211 for (cgraph_node *node = this_node->clones; node != this_node;)
6213 /* First update replace maps to match the new body. */
6214 clone_info *info = clone_info::get (node);
6215 if (info && info->tree_map)
6217 unsigned int i;
6218 for (i = 0; i < vec_safe_length (info->tree_map); i++)
6220 struct ipa_replace_map *replace_info;
6221 replace_info = (*info->tree_map)[i];
6222 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
6226 if (node->clones)
6227 node = node->clones;
6228 else if (node->next_sibling_clone)
6229 node = node->next_sibling_clone;
6230 else
6232 while (node != id->dst_node && !node->next_sibling_clone)
6233 node = node->clone_of;
6234 if (node != id->dst_node)
6235 node = node->next_sibling_clone;
6240 /* Create a copy of a function's tree.
6241 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6242 of the original function and the new copied function
6243 respectively. In case we want to replace a DECL
6244 tree with another tree while duplicating the function's
6245 body, TREE_MAP represents the mapping between these
6246 trees. If UPDATE_CLONES is set, the call_stmt fields
6247 of edges of clones of the function will be updated.
6249 If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6250 function parameters and return value) should be modified).
6251 If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6252 If non_NULL NEW_ENTRY determine new entry BB of the clone.
6254 void
6255 tree_function_versioning (tree old_decl, tree new_decl,
6256 vec<ipa_replace_map *, va_gc> *tree_map,
6257 ipa_param_adjustments *param_adjustments,
6258 bool update_clones, bitmap blocks_to_copy,
6259 basic_block new_entry)
6261 struct cgraph_node *old_version_node;
6262 struct cgraph_node *new_version_node;
6263 copy_body_data id;
6264 tree p;
6265 unsigned i;
6266 struct ipa_replace_map *replace_info;
6267 basic_block old_entry_block, bb;
6268 auto_vec<gimple *, 10> init_stmts;
6269 tree vars = NULL_TREE;
6271 /* We can get called recursively from expand_call_inline via clone
6272 materialization. While expand_call_inline maintains input_location
6273 we cannot tolerate it to leak into the materialized clone. */
6274 location_t saved_location = input_location;
6275 input_location = UNKNOWN_LOCATION;
6277 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
6278 && TREE_CODE (new_decl) == FUNCTION_DECL);
6279 DECL_POSSIBLY_INLINED (old_decl) = 1;
6281 old_version_node = cgraph_node::get (old_decl);
6282 gcc_checking_assert (old_version_node);
6283 new_version_node = cgraph_node::get (new_decl);
6284 gcc_checking_assert (new_version_node);
6286 /* Copy over debug args. */
6287 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
6289 vec<tree, va_gc> **new_debug_args, **old_debug_args;
6290 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6291 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6292 old_debug_args = decl_debug_args_lookup (old_decl);
6293 if (old_debug_args)
6295 new_debug_args = decl_debug_args_insert (new_decl);
6296 *new_debug_args = vec_safe_copy (*old_debug_args);
6300 /* Output the inlining info for this abstract function, since it has been
6301 inlined. If we don't do this now, we can lose the information about the
6302 variables in the function when the blocks get blown away as soon as we
6303 remove the cgraph node. */
6304 (*debug_hooks->outlining_inline_function) (old_decl);
6306 DECL_ARTIFICIAL (new_decl) = 1;
6307 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6308 if (DECL_ORIGIN (old_decl) == old_decl)
6309 old_version_node->used_as_abstract_origin = true;
6310 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6312 /* Prepare the data structures for the tree copy. */
6313 memset (&id, 0, sizeof (id));
6315 /* Generate a new name for the new version. */
6316 id.statements_to_fold = new hash_set<gimple *>;
6318 id.decl_map = new hash_map<tree, tree>;
6319 id.debug_map = NULL;
6320 id.src_fn = old_decl;
6321 id.dst_fn = new_decl;
6322 id.src_node = old_version_node;
6323 id.dst_node = new_version_node;
6324 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6325 id.blocks_to_copy = blocks_to_copy;
6327 id.copy_decl = copy_decl_no_change;
6328 id.transform_call_graph_edges
6329 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6330 id.transform_new_cfg = true;
6331 id.transform_return_to_modify = false;
6332 id.transform_parameter = false;
6334 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (old_decl));
6335 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6336 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6337 initialize_cfun (new_decl, old_decl,
6338 new_entry ? new_entry->count : old_entry_block->count);
6339 new_version_node->calls_declare_variant_alt
6340 = old_version_node->calls_declare_variant_alt;
6341 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6342 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6343 = id.src_cfun->gimple_df->ipa_pta;
6345 /* Copy the function's static chain. */
6346 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6347 if (p)
6348 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6349 = copy_static_chain (p, &id);
6351 auto_vec<int, 16> new_param_indices;
6352 clone_info *info = clone_info::get (old_version_node);
6353 ipa_param_adjustments *old_param_adjustments
6354 = info ? info->param_adjustments : NULL;
6355 if (old_param_adjustments)
6356 old_param_adjustments->get_updated_indices (&new_param_indices);
6358 /* If there's a tree_map, prepare for substitution. */
6359 if (tree_map)
6360 for (i = 0; i < tree_map->length (); i++)
6362 gimple *init;
6363 replace_info = (*tree_map)[i];
6365 int p = replace_info->parm_num;
6366 if (old_param_adjustments)
6367 p = new_param_indices[p];
6369 tree parm;
6370 for (parm = DECL_ARGUMENTS (old_decl); p;
6371 parm = DECL_CHAIN (parm))
6372 p--;
6373 gcc_assert (parm);
6374 init = setup_one_parameter (&id, parm, replace_info->new_tree,
6375 id.src_fn, NULL, &vars);
6376 if (init)
6377 init_stmts.safe_push (init);
6380 ipa_param_body_adjustments *param_body_adjs = NULL;
6381 if (param_adjustments)
6383 param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
6384 new_decl, old_decl,
6385 &id, &vars, tree_map);
6386 id.param_body_adjs = param_body_adjs;
6387 DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
6389 else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6390 DECL_ARGUMENTS (new_decl)
6391 = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
6393 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6394 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6396 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6398 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6399 /* Add local vars. */
6400 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6402 if (DECL_RESULT (old_decl) == NULL_TREE)
6404 else if (param_adjustments && param_adjustments->m_skip_return
6405 && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6407 tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
6408 &id);
6409 declare_inline_vars (NULL, resdecl_repl);
6410 if (DECL_BY_REFERENCE (DECL_RESULT (old_decl)))
6411 resdecl_repl = build_fold_addr_expr (resdecl_repl);
6412 insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
6414 DECL_RESULT (new_decl)
6415 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6416 RESULT_DECL, NULL_TREE, void_type_node);
6417 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6418 DECL_IS_MALLOC (new_decl) = false;
6419 cfun->returns_struct = 0;
6420 cfun->returns_pcc_struct = 0;
6422 else
6424 tree old_name;
6425 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6426 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6427 if (gimple_in_ssa_p (id.src_cfun)
6428 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6429 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6431 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6432 insert_decl_map (&id, old_name, new_name);
6433 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6434 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6438 /* Set up the destination functions loop tree. */
6439 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6441 cfun->curr_properties &= ~PROP_loops;
6442 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6443 cfun->curr_properties |= PROP_loops;
6446 /* Copy the Function's body. */
6447 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6448 new_entry);
6450 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6451 number_blocks (new_decl);
6453 /* We want to create the BB unconditionally, so that the addition of
6454 debug stmts doesn't affect BB count, which may in the end cause
6455 codegen differences. */
6456 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6457 while (init_stmts.length ())
6458 insert_init_stmt (&id, bb, init_stmts.pop ());
6459 if (param_body_adjs)
6460 param_body_adjs->append_init_stmts (bb);
6461 update_clone_info (&id);
6463 /* Remap the nonlocal_goto_save_area, if any. */
6464 if (cfun->nonlocal_goto_save_area)
6466 struct walk_stmt_info wi;
6468 memset (&wi, 0, sizeof (wi));
6469 wi.info = &id;
6470 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6473 /* Clean up. */
6474 delete id.decl_map;
6475 if (id.debug_map)
6476 delete id.debug_map;
6477 free_dominance_info (CDI_DOMINATORS);
6478 free_dominance_info (CDI_POST_DOMINATORS);
6480 update_max_bb_count ();
6481 fold_marked_statements (0, id.statements_to_fold);
6482 delete id.statements_to_fold;
6483 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6484 if (id.dst_node->definition)
6485 cgraph_edge::rebuild_references ();
6486 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6488 calculate_dominance_info (CDI_DOMINATORS);
6489 fix_loop_structure (NULL);
6491 update_ssa (TODO_update_ssa);
6493 /* After partial cloning we need to rescale frequencies, so they are
6494 within proper range in the cloned function. */
6495 if (new_entry)
6497 struct cgraph_edge *e;
6498 rebuild_frequencies ();
6500 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6501 for (e = new_version_node->callees; e; e = e->next_callee)
6503 basic_block bb = gimple_bb (e->call_stmt);
6504 e->count = bb->count;
6506 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6508 basic_block bb = gimple_bb (e->call_stmt);
6509 e->count = bb->count;
6513 if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
6515 vec<tree, va_gc> **debug_args = NULL;
6516 unsigned int len = 0;
6517 unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
6519 for (i = 0; i < reset_len; i++)
6521 tree parm = param_body_adjs->m_reset_debug_decls[i];
6522 gcc_assert (is_gimple_reg (parm));
6523 tree ddecl;
6525 if (debug_args == NULL)
6527 debug_args = decl_debug_args_insert (new_decl);
6528 len = vec_safe_length (*debug_args);
6530 ddecl = build_debug_expr_decl (TREE_TYPE (parm));
6531 /* FIXME: Is setting the mode really necessary? */
6532 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6533 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6534 vec_safe_push (*debug_args, ddecl);
6536 if (debug_args != NULL)
6538 /* On the callee side, add
6539 DEBUG D#Y s=> parm
6540 DEBUG var => D#Y
6541 stmts to the first bb where var is a VAR_DECL created for the
6542 optimized away parameter in DECL_INITIAL block. This hints
6543 in the debug info that var (whole DECL_ORIGIN is the parm
6544 PARM_DECL) is optimized away, but could be looked up at the
6545 call site as value of D#X there. */
6546 gimple_stmt_iterator cgsi
6547 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6548 gimple *def_temp;
6549 tree var = vars;
6550 i = vec_safe_length (*debug_args);
6553 tree vexpr = NULL_TREE;
6554 i -= 2;
6555 while (var != NULL_TREE
6556 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6557 var = TREE_CHAIN (var);
6558 if (var == NULL_TREE)
6559 break;
6560 tree parm = (**debug_args)[i];
6561 if (tree parm_ddef = ssa_default_def (id.src_cfun, parm))
6562 if (tree *d
6563 = param_body_adjs->m_dead_ssa_debug_equiv.get (parm_ddef))
6564 vexpr = *d;
6565 if (!vexpr)
6567 vexpr = build_debug_expr_decl (TREE_TYPE (parm));
6568 /* FIXME: Is setting the mode really necessary? */
6569 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6571 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6572 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6573 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6574 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6576 while (i > len);
6579 delete param_body_adjs;
6580 free_dominance_info (CDI_DOMINATORS);
6581 free_dominance_info (CDI_POST_DOMINATORS);
6583 gcc_assert (!id.debug_stmts.exists ());
6584 pop_cfun ();
6585 input_location = saved_location;
6586 return;
6589 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6590 the callee and return the inlined body on success. */
6592 tree
6593 maybe_inline_call_in_expr (tree exp)
6595 tree fn = get_callee_fndecl (exp);
6597 /* We can only try to inline "const" functions. */
6598 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6600 call_expr_arg_iterator iter;
6601 copy_body_data id;
6602 tree param, arg, t;
6603 hash_map<tree, tree> decl_map;
6605 /* Remap the parameters. */
6606 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6607 param;
6608 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6609 decl_map.put (param, arg);
6611 memset (&id, 0, sizeof (id));
6612 id.src_fn = fn;
6613 id.dst_fn = current_function_decl;
6614 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6615 id.decl_map = &decl_map;
6617 id.copy_decl = copy_decl_no_change;
6618 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6619 id.transform_new_cfg = false;
6620 id.transform_return_to_modify = true;
6621 id.transform_parameter = true;
6623 /* Make sure not to unshare trees behind the front-end's back
6624 since front-end specific mechanisms may rely on sharing. */
6625 id.regimplify = false;
6626 id.do_not_unshare = true;
6628 /* We're not inside any EH region. */
6629 id.eh_lp_nr = 0;
6631 t = copy_tree_body (&id);
6633 /* We can only return something suitable for use in a GENERIC
6634 expression tree. */
6635 if (TREE_CODE (t) == MODIFY_EXPR)
6636 return TREE_OPERAND (t, 1);
6639 return NULL_TREE;
6642 /* Duplicate a type, fields and all. */
6644 tree
6645 build_duplicate_type (tree type)
6647 struct copy_body_data id;
6649 memset (&id, 0, sizeof (id));
6650 id.src_fn = current_function_decl;
6651 id.dst_fn = current_function_decl;
6652 id.src_cfun = cfun;
6653 id.decl_map = new hash_map<tree, tree>;
6654 id.debug_map = NULL;
6655 id.copy_decl = copy_decl_no_change;
6657 type = remap_type_1 (type, &id);
6659 delete id.decl_map;
6660 if (id.debug_map)
6661 delete id.debug_map;
6663 TYPE_CANONICAL (type) = type;
6665 return type;
6668 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6669 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6670 evaluation. */
6672 tree
6673 copy_fn (tree fn, tree& parms, tree& result)
6675 copy_body_data id;
6676 tree param;
6677 hash_map<tree, tree> decl_map;
6679 tree *p = &parms;
6680 *p = NULL_TREE;
6682 memset (&id, 0, sizeof (id));
6683 id.src_fn = fn;
6684 id.dst_fn = current_function_decl;
6685 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6686 id.decl_map = &decl_map;
6688 id.copy_decl = [] (tree decl, copy_body_data *id)
6690 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
6691 /* Don't make copies of local types or injected enumerators,
6692 the C++ constexpr evaluator doesn't need them and they
6693 confuse modules streaming. */
6694 return decl;
6695 return copy_decl_no_change (decl, id);
6697 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6698 id.transform_new_cfg = false;
6699 id.transform_return_to_modify = false;
6700 id.transform_parameter = true;
6702 /* Make sure not to unshare trees behind the front-end's back
6703 since front-end specific mechanisms may rely on sharing. */
6704 id.regimplify = false;
6705 id.do_not_unshare = true;
6706 id.do_not_fold = true;
6708 /* We're not inside any EH region. */
6709 id.eh_lp_nr = 0;
6711 /* Remap the parameters and result and return them to the caller. */
6712 for (param = DECL_ARGUMENTS (fn);
6713 param;
6714 param = DECL_CHAIN (param))
6716 *p = remap_decl (param, &id);
6717 p = &DECL_CHAIN (*p);
6720 if (DECL_RESULT (fn))
6721 result = remap_decl (DECL_RESULT (fn), &id);
6722 else
6723 result = NULL_TREE;
6725 return copy_tree_body (&id);