libcpp, c, middle-end: Optimize initializers using #embed in C
[official-gcc.git] / gcc / tree-cfg.cc
blobfcb488d87113761e23116f84ce5705c940bd6e36
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2024 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "tree-ssa-dce.h"
58 #include "omp-general.h"
59 #include "omp-expand.h"
60 #include "tree-cfgcleanup.h"
61 #include "gimplify.h"
62 #include "attribs.h"
63 #include "selftest.h"
64 #include "opts.h"
65 #include "asan.h"
66 #include "profile.h"
67 #include "sreal.h"
69 /* This file contains functions for building the Control Flow Graph (CFG)
70 for a function tree. */
72 /* Local declarations. */
74 /* Initial capacity for the basic block array. */
75 static const int initial_cfg_capacity = 20;
77 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
78 which use a particular edge. The CASE_LABEL_EXPRs are chained together
79 via their CASE_CHAIN field, which we clear after we're done with the
80 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
82 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
83 update the case vector in response to edge redirections.
85 Right now this table is set up and torn down at key points in the
86 compilation process. It would be nice if we could make the table
87 more persistent. The key is getting notification of changes to
88 the CFG (particularly edge removal, creation and redirection). */
90 static hash_map<edge, tree> *edge_to_cases;
92 /* If we record edge_to_cases, this bitmap will hold indexes
93 of basic blocks that end in a GIMPLE_SWITCH which we touched
94 due to edge manipulations. */
96 static bitmap touched_switch_bbs;
98 /* OpenMP region idxs for blocks during cfg pass. */
99 static vec<int> bb_to_omp_idx;
101 /* CFG statistics. */
102 struct cfg_stats_d
104 long num_merged_labels;
107 static struct cfg_stats_d cfg_stats;
109 /* Data to pass to replace_block_vars_by_duplicates_1. */
110 struct replace_decls_d
112 hash_map<tree, tree> *vars_map;
113 tree to_context;
116 /* Hash table to store last discriminator assigned for each locus. */
117 struct locus_discrim_map
119 int location_line;
120 int discriminator;
123 /* Hashtable helpers. */
125 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
127 static inline hashval_t hash (const locus_discrim_map *);
128 static inline bool equal (const locus_discrim_map *,
129 const locus_discrim_map *);
132 /* Trivial hash function for a location_t. ITEM is a pointer to
133 a hash table entry that maps a location_t to a discriminator. */
135 inline hashval_t
136 locus_discrim_hasher::hash (const locus_discrim_map *item)
138 return item->location_line;
141 /* Equality function for the locus-to-discriminator map. A and B
142 point to the two hash table entries to compare. */
144 inline bool
145 locus_discrim_hasher::equal (const locus_discrim_map *a,
146 const locus_discrim_map *b)
148 return a->location_line == b->location_line;
151 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
153 /* Basic blocks and flowgraphs. */
154 static void make_blocks (gimple_seq);
156 /* Edges. */
157 static void make_edges (void);
158 static void assign_discriminators (void);
159 static void make_cond_expr_edges (basic_block);
160 static void make_gimple_switch_edges (gswitch *, basic_block);
161 static bool make_goto_expr_edges (basic_block);
162 static void make_gimple_asm_edges (basic_block);
163 static edge gimple_redirect_edge_and_branch (edge, basic_block);
164 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
166 /* Various helpers. */
167 static inline bool stmt_starts_bb_p (gimple *, gimple *);
168 static bool gimple_verify_flow_info (void);
169 static void gimple_make_forwarder_block (edge);
170 static gimple *first_non_label_stmt (basic_block);
171 static bool verify_gimple_transaction (gtransaction *);
172 static bool call_can_make_abnormal_goto (gimple *);
174 /* Flowgraph optimization and cleanup. */
175 static void gimple_merge_blocks (basic_block, basic_block);
176 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
177 static void remove_bb (basic_block);
178 static edge find_taken_edge_computed_goto (basic_block, tree);
179 static edge find_taken_edge_cond_expr (const gcond *, tree);
181 void
182 init_empty_tree_cfg_for_function (struct function *fn)
184 /* Initialize the basic block array. */
185 init_flow (fn);
186 profile_status_for_fn (fn) = PROFILE_ABSENT;
187 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
188 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
189 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
190 initial_cfg_capacity, true);
192 /* Build a mapping of labels to their associated blocks. */
193 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
194 initial_cfg_capacity, true);
196 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
197 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
199 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
200 = EXIT_BLOCK_PTR_FOR_FN (fn);
201 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
202 = ENTRY_BLOCK_PTR_FOR_FN (fn);
205 void
206 init_empty_tree_cfg (void)
208 init_empty_tree_cfg_for_function (cfun);
211 /*---------------------------------------------------------------------------
212 Create basic blocks
213 ---------------------------------------------------------------------------*/
215 /* Entry point to the CFG builder for trees. SEQ is the sequence of
216 statements to be added to the flowgraph. */
218 static void
219 build_gimple_cfg (gimple_seq seq)
221 /* Register specific gimple functions. */
222 gimple_register_cfg_hooks ();
224 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
226 init_empty_tree_cfg ();
228 make_blocks (seq);
230 /* Make sure there is always at least one block, even if it's empty. */
231 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
232 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
234 /* Adjust the size of the array. */
235 if (basic_block_info_for_fn (cfun)->length ()
236 < (size_t) n_basic_blocks_for_fn (cfun))
237 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
238 n_basic_blocks_for_fn (cfun));
240 /* To speed up statement iterator walks, we first purge dead labels. */
241 cleanup_dead_labels ();
243 /* Group case nodes to reduce the number of edges.
244 We do this after cleaning up dead labels because otherwise we miss
245 a lot of obvious case merging opportunities. */
246 group_case_labels ();
248 /* Create the edges of the flowgraph. */
249 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
250 make_edges ();
251 assign_discriminators ();
252 cleanup_dead_labels ();
253 delete discriminator_per_locus;
254 discriminator_per_locus = NULL;
257 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
258 them and propagate the information to LOOP. We assume that the annotations
259 come immediately before the condition in BB, if any. */
261 static void
262 replace_loop_annotate_in_block (basic_block bb, class loop *loop)
264 gimple_stmt_iterator gsi = gsi_last_bb (bb);
265 gimple *stmt = gsi_stmt (gsi);
267 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
268 return;
270 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
272 stmt = gsi_stmt (gsi);
273 if (gimple_code (stmt) != GIMPLE_CALL)
274 break;
275 if (!gimple_call_internal_p (stmt)
276 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
277 break;
279 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
281 case annot_expr_ivdep_kind:
282 loop->safelen = INT_MAX;
283 break;
284 case annot_expr_unroll_kind:
285 loop->unroll
286 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
287 cfun->has_unroll = true;
288 break;
289 case annot_expr_no_vector_kind:
290 loop->dont_vectorize = true;
291 break;
292 case annot_expr_vector_kind:
293 loop->force_vectorize = true;
294 cfun->has_force_vectorize_loops = true;
295 break;
296 case annot_expr_parallel_kind:
297 loop->can_be_parallel = true;
298 loop->safelen = INT_MAX;
299 break;
300 case annot_expr_maybe_infinite_kind:
301 loop->finite_p = false;
302 break;
303 default:
304 gcc_unreachable ();
307 stmt = gimple_build_assign (gimple_call_lhs (stmt),
308 gimple_call_arg (stmt, 0));
309 gsi_replace (&gsi, stmt, true);
313 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
314 them and propagate the information to the loop. We assume that the
315 annotations come immediately before the condition of the loop. */
317 static void
318 replace_loop_annotate (void)
320 basic_block bb;
321 gimple_stmt_iterator gsi;
322 gimple *stmt;
324 for (auto loop : loops_list (cfun, 0))
326 /* Push the global flag_finite_loops state down to individual loops. */
327 loop->finite_p = flag_finite_loops;
329 /* Check all exit source blocks for annotations. */
330 for (auto e : get_loop_exit_edges (loop))
331 replace_loop_annotate_in_block (e->src, loop);
334 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
335 FOR_EACH_BB_FN (bb, cfun)
337 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
339 stmt = gsi_stmt (gsi);
340 if (gimple_code (stmt) != GIMPLE_CALL)
341 continue;
342 if (!gimple_call_internal_p (stmt)
343 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
344 continue;
346 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
348 case annot_expr_ivdep_kind:
349 case annot_expr_unroll_kind:
350 case annot_expr_no_vector_kind:
351 case annot_expr_vector_kind:
352 case annot_expr_parallel_kind:
353 case annot_expr_maybe_infinite_kind:
354 break;
355 default:
356 gcc_unreachable ();
359 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
360 stmt = gimple_build_assign (gimple_call_lhs (stmt),
361 gimple_call_arg (stmt, 0));
362 gsi_replace (&gsi, stmt, true);
367 static unsigned int
368 execute_build_cfg (void)
370 gimple_seq body = gimple_body (current_function_decl);
372 build_gimple_cfg (body);
373 gimple_set_body (current_function_decl, NULL);
374 if (dump_file && (dump_flags & TDF_DETAILS))
376 fprintf (dump_file, "Scope blocks:\n");
377 dump_scope_blocks (dump_file, dump_flags);
379 cleanup_tree_cfg ();
381 bb_to_omp_idx.release ();
383 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
384 replace_loop_annotate ();
385 return 0;
388 namespace {
390 const pass_data pass_data_build_cfg =
392 GIMPLE_PASS, /* type */
393 "cfg", /* name */
394 OPTGROUP_NONE, /* optinfo_flags */
395 TV_TREE_CFG, /* tv_id */
396 PROP_gimple_leh, /* properties_required */
397 ( PROP_cfg | PROP_loops ), /* properties_provided */
398 0, /* properties_destroyed */
399 0, /* todo_flags_start */
400 0, /* todo_flags_finish */
403 class pass_build_cfg : public gimple_opt_pass
405 public:
406 pass_build_cfg (gcc::context *ctxt)
407 : gimple_opt_pass (pass_data_build_cfg, ctxt)
410 /* opt_pass methods: */
411 unsigned int execute (function *) final override
413 return execute_build_cfg ();
416 }; // class pass_build_cfg
418 } // anon namespace
420 gimple_opt_pass *
421 make_pass_build_cfg (gcc::context *ctxt)
423 return new pass_build_cfg (ctxt);
427 /* Return true if T is a computed goto. */
429 bool
430 computed_goto_p (gimple *t)
432 return (gimple_code (t) == GIMPLE_GOTO
433 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
436 /* Returns true if the sequence of statements STMTS only contains
437 a call to __builtin_unreachable (). */
439 bool
440 gimple_seq_unreachable_p (gimple_seq stmts)
442 if (stmts == NULL
443 /* Return false if -fsanitize=unreachable, we don't want to
444 optimize away those calls, but rather turn them into
445 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
446 later. */
447 || sanitize_flags_p (SANITIZE_UNREACHABLE))
448 return false;
450 gimple_stmt_iterator gsi = gsi_last (stmts);
452 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
453 return false;
455 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
457 gimple *stmt = gsi_stmt (gsi);
458 if (gimple_code (stmt) != GIMPLE_LABEL
459 && !is_gimple_debug (stmt)
460 && !gimple_clobber_p (stmt))
461 return false;
463 return true;
466 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
467 the other edge points to a bb with just __builtin_unreachable ().
468 I.e. return true for C->M edge in:
469 <bb C>:
471 if (something)
472 goto <bb N>;
473 else
474 goto <bb M>;
475 <bb N>:
476 __builtin_unreachable ();
477 <bb M>: */
479 bool
480 assert_unreachable_fallthru_edge_p (edge e)
482 basic_block pred_bb = e->src;
483 if (safe_is_a <gcond *> (*gsi_last_bb (pred_bb)))
485 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
486 if (other_bb == e->dest)
487 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
488 if (EDGE_COUNT (other_bb->succs) == 0)
489 return gimple_seq_unreachable_p (bb_seq (other_bb));
491 return false;
495 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
496 could alter control flow except via eh. We initialize the flag at
497 CFG build time and only ever clear it later. */
499 static void
500 gimple_call_initialize_ctrl_altering (gimple *stmt)
502 int flags = gimple_call_flags (stmt);
504 /* A call alters control flow if it can make an abnormal goto. */
505 if (call_can_make_abnormal_goto (stmt)
506 /* A call also alters control flow if it does not return. */
507 || flags & ECF_NORETURN
508 /* TM ending statements have backedges out of the transaction.
509 Return true so we split the basic block containing them.
510 Note that the TM_BUILTIN test is merely an optimization. */
511 || ((flags & ECF_TM_BUILTIN)
512 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
513 /* BUILT_IN_RETURN call is same as return statement. */
514 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
515 /* IFN_UNIQUE should be the last insn, to make checking for it
516 as cheap as possible. */
517 || (gimple_call_internal_p (stmt)
518 && gimple_call_internal_unique_p (stmt)))
519 gimple_call_set_ctrl_altering (stmt, true);
520 else
521 gimple_call_set_ctrl_altering (stmt, false);
525 /* Insert SEQ after BB and build a flowgraph. */
527 static basic_block
528 make_blocks_1 (gimple_seq seq, basic_block bb)
530 gimple_stmt_iterator i = gsi_start (seq);
531 gimple *stmt = NULL;
532 gimple *prev_stmt = NULL;
533 bool start_new_block = true;
534 bool first_stmt_of_seq = true;
536 while (!gsi_end_p (i))
538 /* PREV_STMT should only be set to a debug stmt if the debug
539 stmt is before nondebug stmts. Once stmt reaches a nondebug
540 nonlabel, prev_stmt will be set to it, so that
541 stmt_starts_bb_p will know to start a new block if a label is
542 found. However, if stmt was a label after debug stmts only,
543 keep the label in prev_stmt even if we find further debug
544 stmts, for there may be other labels after them, and they
545 should land in the same block. */
546 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
547 prev_stmt = stmt;
548 stmt = gsi_stmt (i);
550 if (stmt && is_gimple_call (stmt))
551 gimple_call_initialize_ctrl_altering (stmt);
553 /* If the statement starts a new basic block or if we have determined
554 in a previous pass that we need to create a new block for STMT, do
555 so now. */
556 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
558 if (!first_stmt_of_seq)
559 gsi_split_seq_before (&i, &seq);
560 bb = create_basic_block (seq, bb);
561 start_new_block = false;
562 prev_stmt = NULL;
565 /* Now add STMT to BB and create the subgraphs for special statement
566 codes. */
567 gimple_set_bb (stmt, bb);
569 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
570 next iteration. */
571 if (stmt_ends_bb_p (stmt))
573 /* If the stmt can make abnormal goto use a new temporary
574 for the assignment to the LHS. This makes sure the old value
575 of the LHS is available on the abnormal edge. Otherwise
576 we will end up with overlapping life-ranges for abnormal
577 SSA names. */
578 if (gimple_has_lhs (stmt)
579 && stmt_can_make_abnormal_goto (stmt)
580 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
582 tree lhs = gimple_get_lhs (stmt);
583 tree tmp = create_tmp_var (TREE_TYPE (lhs));
584 gimple *s = gimple_build_assign (lhs, tmp);
585 gimple_set_location (s, gimple_location (stmt));
586 gimple_set_block (s, gimple_block (stmt));
587 gimple_set_lhs (stmt, tmp);
588 gsi_insert_after (&i, s, GSI_SAME_STMT);
590 start_new_block = true;
593 gsi_next (&i);
594 first_stmt_of_seq = false;
596 return bb;
599 /* Build a flowgraph for the sequence of stmts SEQ. */
601 static void
602 make_blocks (gimple_seq seq)
604 /* Look for debug markers right before labels, and move the debug
605 stmts after the labels. Accepting labels among debug markers
606 adds no value, just complexity; if we wanted to annotate labels
607 with view numbers (so sequencing among markers would matter) or
608 somesuch, we're probably better off still moving the labels, but
609 adding other debug annotations in their original positions or
610 emitting nonbind or bind markers associated with the labels in
611 the original position of the labels.
613 Moving labels would probably be simpler, but we can't do that:
614 moving labels assigns label ids to them, and doing so because of
615 debug markers makes for -fcompare-debug and possibly even codegen
616 differences. So, we have to move the debug stmts instead. To
617 that end, we scan SEQ backwards, marking the position of the
618 latest (earliest we find) label, and moving debug stmts that are
619 not separated from it by nondebug nonlabel stmts after the
620 label. */
621 if (MAY_HAVE_DEBUG_MARKER_STMTS)
623 gimple_stmt_iterator label = gsi_none ();
625 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
627 gimple *stmt = gsi_stmt (i);
629 /* If this is the first label we encounter (latest in SEQ)
630 before nondebug stmts, record its position. */
631 if (is_a <glabel *> (stmt))
633 if (gsi_end_p (label))
634 label = i;
635 continue;
638 /* Without a recorded label position to move debug stmts to,
639 there's nothing to do. */
640 if (gsi_end_p (label))
641 continue;
643 /* Move the debug stmt at I after LABEL. */
644 if (is_gimple_debug (stmt))
646 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
647 /* As STMT is removed, I advances to the stmt after
648 STMT, so the gsi_prev in the for "increment"
649 expression gets us to the stmt we're to visit after
650 STMT. LABEL, however, would advance to the moved
651 stmt if we passed it to gsi_move_after, so pass it a
652 copy instead, so as to keep LABEL pointing to the
653 LABEL. */
654 gimple_stmt_iterator copy = label;
655 gsi_move_after (&i, &copy);
656 continue;
659 /* There aren't any (more?) debug stmts before label, so
660 there isn't anything else to move after it. */
661 label = gsi_none ();
665 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
668 /* Create and return a new empty basic block after bb AFTER. */
670 static basic_block
671 create_bb (void *h, void *e, basic_block after)
673 basic_block bb;
675 gcc_assert (!e);
677 /* Create and initialize a new basic block. Since alloc_block uses
678 GC allocation that clears memory to allocate a basic block, we do
679 not have to clear the newly allocated basic block here. */
680 bb = alloc_block ();
682 bb->index = last_basic_block_for_fn (cfun);
683 bb->flags = BB_NEW;
684 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
686 /* Add the new block to the linked list of blocks. */
687 link_block (bb, after);
689 /* Grow the basic block array if needed. */
690 if ((size_t) last_basic_block_for_fn (cfun)
691 == basic_block_info_for_fn (cfun)->length ())
692 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
693 last_basic_block_for_fn (cfun) + 1);
695 /* Add the newly created block to the array. */
696 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
698 n_basic_blocks_for_fn (cfun)++;
699 last_basic_block_for_fn (cfun)++;
701 return bb;
705 /*---------------------------------------------------------------------------
706 Edge creation
707 ---------------------------------------------------------------------------*/
709 /* If basic block BB has an abnormal edge to a basic block
710 containing IFN_ABNORMAL_DISPATCHER internal call, return
711 that the dispatcher's basic block, otherwise return NULL. */
713 basic_block
714 get_abnormal_succ_dispatcher (basic_block bb)
716 edge e;
717 edge_iterator ei;
719 FOR_EACH_EDGE (e, ei, bb->succs)
720 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
722 gimple_stmt_iterator gsi
723 = gsi_start_nondebug_after_labels_bb (e->dest);
724 gimple *g = gsi_stmt (gsi);
725 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
726 return e->dest;
728 return NULL;
731 /* Helper function for make_edges. Create a basic block with
732 with ABNORMAL_DISPATCHER internal call in it if needed, and
733 create abnormal edges from BBS to it and from it to FOR_BB
734 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
736 static void
737 handle_abnormal_edges (basic_block *dispatcher_bbs, basic_block for_bb,
738 auto_vec<basic_block> *bbs, bool computed_goto)
740 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
741 unsigned int idx = 0;
742 basic_block bb;
743 bool inner = false;
745 if (!bb_to_omp_idx.is_empty ())
747 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
748 if (bb_to_omp_idx[for_bb->index] != 0)
749 inner = true;
752 /* If the dispatcher has been created already, then there are basic
753 blocks with abnormal edges to it, so just make a new edge to
754 for_bb. */
755 if (*dispatcher == NULL)
757 /* Check if there are any basic blocks that need to have
758 abnormal edges to this dispatcher. If there are none, return
759 early. */
760 if (bb_to_omp_idx.is_empty ())
762 if (bbs->is_empty ())
763 return;
765 else
767 FOR_EACH_VEC_ELT (*bbs, idx, bb)
768 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
769 break;
770 if (bb == NULL)
771 return;
774 /* Create the dispatcher bb. */
775 *dispatcher = create_basic_block (NULL, for_bb);
776 if (computed_goto)
778 /* Factor computed gotos into a common computed goto site. Also
779 record the location of that site so that we can un-factor the
780 gotos after we have converted back to normal form. */
781 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
783 /* Create the destination of the factored goto. Each original
784 computed goto will put its desired destination into this
785 variable and jump to the label we create immediately below. */
786 tree var = create_tmp_var (ptr_type_node, "gotovar");
788 /* Build a label for the new block which will contain the
789 factored computed goto. */
790 tree factored_label_decl
791 = create_artificial_label (UNKNOWN_LOCATION);
792 gimple *factored_computed_goto_label
793 = gimple_build_label (factored_label_decl);
794 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
796 /* Build our new computed goto. */
797 gimple *factored_computed_goto = gimple_build_goto (var);
798 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
800 FOR_EACH_VEC_ELT (*bbs, idx, bb)
802 if (!bb_to_omp_idx.is_empty ()
803 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
804 continue;
806 gsi = gsi_last_bb (bb);
807 gimple *last = gsi_stmt (gsi);
809 gcc_assert (computed_goto_p (last));
811 /* Copy the original computed goto's destination into VAR. */
812 gimple *assignment
813 = gimple_build_assign (var, gimple_goto_dest (last));
814 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
816 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
817 e->goto_locus = gimple_location (last);
818 gsi_remove (&gsi, true);
821 else
823 tree arg = inner ? boolean_true_node : boolean_false_node;
824 gcall *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
825 1, arg);
826 gimple_call_set_ctrl_altering (g, true);
827 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
828 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
830 /* Create predecessor edges of the dispatcher. */
831 FOR_EACH_VEC_ELT (*bbs, idx, bb)
833 if (!bb_to_omp_idx.is_empty ()
834 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
835 continue;
836 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
841 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
844 /* Creates outgoing edges for BB. Returns 1 when it ends with an
845 computed goto, returns 2 when it ends with a statement that
846 might return to this function via an nonlocal goto, otherwise
847 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
849 static int
850 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
852 gimple *last = *gsi_last_bb (bb);
853 bool fallthru = false;
854 int ret = 0;
856 if (!last)
857 return ret;
859 switch (gimple_code (last))
861 case GIMPLE_GOTO:
862 if (make_goto_expr_edges (bb))
863 ret = 1;
864 fallthru = false;
865 break;
866 case GIMPLE_RETURN:
868 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
869 e->goto_locus = gimple_location (last);
870 fallthru = false;
872 break;
873 case GIMPLE_COND:
874 make_cond_expr_edges (bb);
875 fallthru = false;
876 break;
877 case GIMPLE_SWITCH:
878 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
879 fallthru = false;
880 break;
881 case GIMPLE_RESX:
882 make_eh_edge (last);
883 fallthru = false;
884 break;
885 case GIMPLE_EH_DISPATCH:
886 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
887 break;
889 case GIMPLE_CALL:
890 /* If this function receives a nonlocal goto, then we need to
891 make edges from this call site to all the nonlocal goto
892 handlers. */
893 if (stmt_can_make_abnormal_goto (last))
894 ret = 2;
896 /* If this statement has reachable exception handlers, then
897 create abnormal edges to them. */
898 make_eh_edge (last);
900 /* BUILTIN_RETURN is really a return statement. */
901 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
903 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
904 fallthru = false;
906 /* Some calls are known not to return. */
907 else
908 fallthru = !gimple_call_noreturn_p (last);
909 break;
911 case GIMPLE_ASSIGN:
912 /* A GIMPLE_ASSIGN may throw internally and thus be considered
913 control-altering. */
914 if (is_ctrl_altering_stmt (last))
915 make_eh_edge (last);
916 fallthru = true;
917 break;
919 case GIMPLE_ASM:
920 make_gimple_asm_edges (bb);
921 fallthru = true;
922 break;
924 CASE_GIMPLE_OMP:
925 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
926 break;
928 case GIMPLE_TRANSACTION:
930 gtransaction *txn = as_a <gtransaction *> (last);
931 tree label1 = gimple_transaction_label_norm (txn);
932 tree label2 = gimple_transaction_label_uninst (txn);
934 if (label1)
935 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
936 if (label2)
937 make_edge (bb, label_to_block (cfun, label2),
938 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
940 tree label3 = gimple_transaction_label_over (txn);
941 if (gimple_transaction_subcode (txn)
942 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
943 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
945 fallthru = false;
947 break;
949 default:
950 gcc_assert (!stmt_ends_bb_p (last));
951 fallthru = true;
952 break;
955 if (fallthru)
956 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
958 return ret;
961 /* Join all the blocks in the flowgraph. */
963 static void
964 make_edges (void)
966 basic_block bb;
967 struct omp_region *cur_region = NULL;
968 auto_vec<basic_block> ab_edge_goto;
969 auto_vec<basic_block> ab_edge_call;
970 int cur_omp_region_idx = 0;
972 /* Create an edge from entry to the first block with executable
973 statements in it. */
974 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
975 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
976 EDGE_FALLTHRU);
978 /* Traverse the basic block array placing edges. */
979 FOR_EACH_BB_FN (bb, cfun)
981 int mer;
983 if (!bb_to_omp_idx.is_empty ())
984 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
986 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
987 if (mer == 1)
988 ab_edge_goto.safe_push (bb);
989 else if (mer == 2)
990 ab_edge_call.safe_push (bb);
992 if (cur_region && bb_to_omp_idx.is_empty ())
993 bb_to_omp_idx.safe_grow_cleared (n_basic_blocks_for_fn (cfun), true);
996 /* Computed gotos are hell to deal with, especially if there are
997 lots of them with a large number of destinations. So we factor
998 them to a common computed goto location before we build the
999 edge list. After we convert back to normal form, we will un-factor
1000 the computed gotos since factoring introduces an unwanted jump.
1001 For non-local gotos and abnormal edges from calls to calls that return
1002 twice or forced labels, factor the abnormal edges too, by having all
1003 abnormal edges from the calls go to a common artificial basic block
1004 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1005 basic block to all forced labels and calls returning twice.
1006 We do this per-OpenMP structured block, because those regions
1007 are guaranteed to be single entry single exit by the standard,
1008 so it is not allowed to enter or exit such regions abnormally this way,
1009 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1010 must not transfer control across SESE region boundaries. */
1011 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1013 gimple_stmt_iterator gsi;
1014 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1015 basic_block *dispatcher_bbs = dispatcher_bb_array;
1016 int count = n_basic_blocks_for_fn (cfun);
1018 if (!bb_to_omp_idx.is_empty ())
1019 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1021 FOR_EACH_BB_FN (bb, cfun)
1023 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1025 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1026 tree target;
1028 if (!label_stmt)
1029 break;
1031 target = gimple_label_label (label_stmt);
1033 /* Make an edge to every label block that has been marked as a
1034 potential target for a computed goto or a non-local goto. */
1035 if (FORCED_LABEL (target))
1036 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_goto,
1037 true);
1038 if (DECL_NONLOCAL (target))
1040 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_call,
1041 false);
1042 break;
1046 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1047 gsi_next_nondebug (&gsi);
1048 if (!gsi_end_p (gsi))
1050 /* Make an edge to every setjmp-like call. */
1051 gimple *call_stmt = gsi_stmt (gsi);
1052 if (is_gimple_call (call_stmt)
1053 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1054 || gimple_call_builtin_p (call_stmt,
1055 BUILT_IN_SETJMP_RECEIVER)))
1056 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_call,
1057 false);
1061 if (!bb_to_omp_idx.is_empty ())
1062 XDELETE (dispatcher_bbs);
1065 omp_free_regions ();
1068 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1069 needed. Returns true if new bbs were created.
1070 Note: This is transitional code, and should not be used for new code. We
1071 should be able to get rid of this by rewriting all target va-arg
1072 gimplification hooks to use an interface gimple_build_cond_value as described
1073 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1075 bool
1076 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1078 gimple *stmt = gsi_stmt (*gsi);
1079 basic_block bb = gimple_bb (stmt);
1080 basic_block lastbb, afterbb;
1081 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1082 edge e;
1083 lastbb = make_blocks_1 (seq, bb);
1084 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1085 return false;
1086 e = split_block (bb, stmt);
1087 /* Move e->dest to come after the new basic blocks. */
1088 afterbb = e->dest;
1089 unlink_block (afterbb);
1090 link_block (afterbb, lastbb);
1091 redirect_edge_succ (e, bb->next_bb);
1092 bb = bb->next_bb;
1093 while (bb != afterbb)
1095 struct omp_region *cur_region = NULL;
1096 profile_count cnt = profile_count::zero ();
1097 bool all = true;
1099 int cur_omp_region_idx = 0;
1100 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1101 gcc_assert (!mer && !cur_region);
1102 add_bb_to_loop (bb, afterbb->loop_father);
1104 edge e;
1105 edge_iterator ei;
1106 FOR_EACH_EDGE (e, ei, bb->preds)
1108 if (e->count ().initialized_p ())
1109 cnt += e->count ();
1110 else
1111 all = false;
1113 tree_guess_outgoing_edge_probabilities (bb);
1114 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1115 bb->count = cnt;
1117 bb = bb->next_bb;
1119 return true;
1122 /* Find the next available discriminator value for LOCUS. The
1123 discriminator distinguishes among several basic blocks that
1124 share a common locus, allowing for more accurate sample-based
1125 profiling. */
1127 static int
1128 next_discriminator_for_locus (int line)
1130 struct locus_discrim_map item;
1131 struct locus_discrim_map **slot;
1133 item.location_line = line;
1134 item.discriminator = 0;
1135 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1136 gcc_assert (slot);
1137 if (*slot == HTAB_EMPTY_ENTRY)
1139 *slot = XNEW (struct locus_discrim_map);
1140 gcc_assert (*slot);
1141 (*slot)->location_line = line;
1142 (*slot)->discriminator = 0;
1144 (*slot)->discriminator++;
1145 return (*slot)->discriminator;
1148 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1150 static bool
1151 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1153 expanded_location to;
1155 if (locus1 == locus2)
1156 return true;
1158 to = expand_location (locus2);
1160 if (from->line != to.line)
1161 return false;
1162 if (from->file == to.file)
1163 return true;
1164 return (from->file != NULL
1165 && to.file != NULL
1166 && filename_cmp (from->file, to.file) == 0);
1169 /* Assign a unique discriminator value to all statements in block bb that
1170 have the same line number as locus. */
1172 static void
1173 assign_discriminator (location_t locus, basic_block bb)
1175 gimple_stmt_iterator gsi;
1176 int discriminator;
1178 if (locus == UNKNOWN_LOCATION)
1179 return;
1181 expanded_location locus_e = expand_location (locus);
1183 discriminator = next_discriminator_for_locus (locus_e.line);
1185 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1187 gimple *stmt = gsi_stmt (gsi);
1188 location_t stmt_locus = gimple_location (stmt);
1189 if (same_line_p (locus, &locus_e, stmt_locus))
1190 gimple_set_location (stmt,
1191 location_with_discriminator (stmt_locus, discriminator));
1195 /* Assign discriminators to statement locations. */
1197 static void
1198 assign_discriminators (void)
1200 basic_block bb;
1202 FOR_EACH_BB_FN (bb, cfun)
1204 edge e;
1205 edge_iterator ei;
1206 gimple_stmt_iterator gsi;
1207 location_t curr_locus = UNKNOWN_LOCATION;
1208 expanded_location curr_locus_e = {};
1209 int curr_discr = 0;
1211 /* Traverse the basic block, if two function calls within a basic block
1212 are mapped to the same line, assign a new discriminator because a call
1213 stmt could be a split point of a basic block. */
1214 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1216 gimple *stmt = gsi_stmt (gsi);
1218 /* Don't allow debug stmts to affect discriminators, but
1219 allow them to take discriminators when they're on the
1220 same line as the preceding nondebug stmt. */
1221 if (is_gimple_debug (stmt))
1223 if (curr_locus != UNKNOWN_LOCATION
1224 && same_line_p (curr_locus, &curr_locus_e,
1225 gimple_location (stmt)))
1227 location_t loc = gimple_location (stmt);
1228 location_t dloc = location_with_discriminator (loc,
1229 curr_discr);
1230 gimple_set_location (stmt, dloc);
1232 continue;
1234 if (curr_locus == UNKNOWN_LOCATION)
1236 curr_locus = gimple_location (stmt);
1237 curr_locus_e = expand_location (curr_locus);
1239 else if (!same_line_p (curr_locus, &curr_locus_e, gimple_location (stmt)))
1241 curr_locus = gimple_location (stmt);
1242 curr_locus_e = expand_location (curr_locus);
1243 curr_discr = 0;
1245 else if (curr_discr != 0)
1247 location_t loc = gimple_location (stmt);
1248 location_t dloc = location_with_discriminator (loc, curr_discr);
1249 gimple_set_location (stmt, dloc);
1251 /* Allocate a new discriminator for CALL stmt. */
1252 if (gimple_code (stmt) == GIMPLE_CALL)
1253 curr_discr = next_discriminator_for_locus (curr_locus);
1256 gimple *last = last_nondebug_stmt (bb);
1257 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1258 if (locus == UNKNOWN_LOCATION)
1259 continue;
1261 expanded_location locus_e = expand_location (locus);
1263 FOR_EACH_EDGE (e, ei, bb->succs)
1265 gimple *first = first_non_label_stmt (e->dest);
1266 gimple *last = last_nondebug_stmt (e->dest);
1268 gimple *stmt_on_same_line = NULL;
1269 if (first && same_line_p (locus, &locus_e,
1270 gimple_location (first)))
1271 stmt_on_same_line = first;
1272 else if (last && same_line_p (locus, &locus_e,
1273 gimple_location (last)))
1274 stmt_on_same_line = last;
1276 if (stmt_on_same_line)
1278 if (has_discriminator (gimple_location (stmt_on_same_line))
1279 && !has_discriminator (locus))
1280 assign_discriminator (locus, bb);
1281 else
1282 assign_discriminator (locus, e->dest);
1288 /* Create the edges for a GIMPLE_COND starting at block BB. */
1290 static void
1291 make_cond_expr_edges (basic_block bb)
1293 gcond *entry = as_a <gcond *> (*gsi_last_bb (bb));
1294 gimple *then_stmt, *else_stmt;
1295 basic_block then_bb, else_bb;
1296 tree then_label, else_label;
1297 edge e;
1299 gcc_assert (entry);
1301 /* Entry basic blocks for each component. */
1302 then_label = gimple_cond_true_label (entry);
1303 else_label = gimple_cond_false_label (entry);
1304 then_bb = label_to_block (cfun, then_label);
1305 else_bb = label_to_block (cfun, else_label);
1306 then_stmt = first_stmt (then_bb);
1307 else_stmt = first_stmt (else_bb);
1309 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1310 e->goto_locus = gimple_location (then_stmt);
1311 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1312 if (e)
1313 e->goto_locus = gimple_location (else_stmt);
1315 /* We do not need the labels anymore. */
1316 gimple_cond_set_true_label (entry, NULL_TREE);
1317 gimple_cond_set_false_label (entry, NULL_TREE);
1321 /* Called for each element in the hash table (P) as we delete the
1322 edge to cases hash table.
1324 Clear all the CASE_CHAINs to prevent problems with copying of
1325 SWITCH_EXPRs and structure sharing rules, then free the hash table
1326 element. */
1328 bool
1329 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1331 tree t, next;
1333 for (t = value; t; t = next)
1335 next = CASE_CHAIN (t);
1336 CASE_CHAIN (t) = NULL;
1339 return true;
1342 /* Start recording information mapping edges to case labels. */
1344 void
1345 start_recording_case_labels (void)
1347 gcc_assert (edge_to_cases == NULL);
1348 edge_to_cases = new hash_map<edge, tree>;
1349 touched_switch_bbs = BITMAP_ALLOC (NULL);
1352 /* Return nonzero if we are recording information for case labels. */
1354 static bool
1355 recording_case_labels_p (void)
1357 return (edge_to_cases != NULL);
1360 /* Stop recording information mapping edges to case labels and
1361 remove any information we have recorded. */
1362 void
1363 end_recording_case_labels (void)
1365 bitmap_iterator bi;
1366 unsigned i;
1367 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1368 delete edge_to_cases;
1369 edge_to_cases = NULL;
1370 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1372 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1373 if (bb)
1375 if (gswitch *stmt = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
1376 group_case_labels_stmt (stmt);
1379 BITMAP_FREE (touched_switch_bbs);
1382 /* If we are inside a {start,end}_recording_cases block, then return
1383 a chain of CASE_LABEL_EXPRs from T which reference E.
1385 Otherwise return NULL. */
1387 tree
1388 get_cases_for_edge (edge e, gswitch *t)
1390 tree *slot;
1391 size_t i, n;
1393 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1394 chains available. Return NULL so the caller can detect this case. */
1395 if (!recording_case_labels_p ())
1396 return NULL;
1398 slot = edge_to_cases->get (e);
1399 if (slot)
1400 return *slot;
1402 /* If we did not find E in the hash table, then this must be the first
1403 time we have been queried for information about E & T. Add all the
1404 elements from T to the hash table then perform the query again. */
1406 n = gimple_switch_num_labels (t);
1407 for (i = 0; i < n; i++)
1409 tree elt = gimple_switch_label (t, i);
1410 tree lab = CASE_LABEL (elt);
1411 basic_block label_bb = label_to_block (cfun, lab);
1412 edge this_edge = find_edge (e->src, label_bb);
1414 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1415 a new chain. */
1416 tree &s = edge_to_cases->get_or_insert (this_edge);
1417 CASE_CHAIN (elt) = s;
1418 s = elt;
1421 return *edge_to_cases->get (e);
1424 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1426 static void
1427 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1429 size_t i, n;
1431 n = gimple_switch_num_labels (entry);
1433 for (i = 0; i < n; ++i)
1435 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1436 make_edge (bb, label_bb, 0);
1441 /* Return the basic block holding label DEST. */
1443 basic_block
1444 label_to_block (struct function *ifun, tree dest)
1446 int uid = LABEL_DECL_UID (dest);
1448 /* We would die hard when faced by an undefined label. Emit a label to
1449 the very first basic block. This will hopefully make even the dataflow
1450 and undefined variable warnings quite right. */
1451 if (seen_error () && uid < 0)
1453 gimple_stmt_iterator gsi =
1454 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1455 gimple *stmt;
1457 stmt = gimple_build_label (dest);
1458 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1459 uid = LABEL_DECL_UID (dest);
1461 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1462 return NULL;
1463 return (*ifun->cfg->x_label_to_block_map)[uid];
1466 /* Create edges for a goto statement at block BB. Returns true
1467 if abnormal edges should be created. */
1469 static bool
1470 make_goto_expr_edges (basic_block bb)
1472 gimple_stmt_iterator last = gsi_last_bb (bb);
1473 gimple *goto_t = gsi_stmt (last);
1475 /* A simple GOTO creates normal edges. */
1476 if (simple_goto_p (goto_t))
1478 tree dest = gimple_goto_dest (goto_t);
1479 basic_block label_bb = label_to_block (cfun, dest);
1480 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1481 e->goto_locus = gimple_location (goto_t);
1482 gsi_remove (&last, true);
1483 return false;
1486 /* A computed GOTO creates abnormal edges. */
1487 return true;
1490 /* Create edges for an asm statement with labels at block BB. */
1492 static void
1493 make_gimple_asm_edges (basic_block bb)
1495 gasm *stmt = as_a <gasm *> (*gsi_last_bb (bb));
1496 int i, n = gimple_asm_nlabels (stmt);
1498 for (i = 0; i < n; ++i)
1500 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1501 basic_block label_bb = label_to_block (cfun, label);
1502 make_edge (bb, label_bb, 0);
1506 /*---------------------------------------------------------------------------
1507 Flowgraph analysis
1508 ---------------------------------------------------------------------------*/
1510 /* Cleanup useless labels in basic blocks. This is something we wish
1511 to do early because it allows us to group case labels before creating
1512 the edges for the CFG, and it speeds up block statement iterators in
1513 all passes later on.
1514 We rerun this pass after CFG is created, to get rid of the labels that
1515 are no longer referenced. After then we do not run it any more, since
1516 (almost) no new labels should be created. */
1518 /* A map from basic block index to the leading label of that block. */
1519 struct label_record
1521 /* The label. */
1522 tree label;
1524 /* True if the label is referenced from somewhere. */
1525 bool used;
1528 /* Given LABEL return the first label in the same basic block. */
1530 static tree
1531 main_block_label (tree label, label_record *label_for_bb)
1533 basic_block bb = label_to_block (cfun, label);
1534 tree main_label = label_for_bb[bb->index].label;
1536 /* label_to_block possibly inserted undefined label into the chain. */
1537 if (!main_label)
1539 label_for_bb[bb->index].label = label;
1540 main_label = label;
1543 label_for_bb[bb->index].used = true;
1544 return main_label;
1547 /* Clean up redundant labels within the exception tree. */
1549 static void
1550 cleanup_dead_labels_eh (label_record *label_for_bb)
1552 eh_landing_pad lp;
1553 eh_region r;
1554 tree lab;
1555 int i;
1557 if (cfun->eh == NULL)
1558 return;
1560 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1561 if (lp && lp->post_landing_pad)
1563 lab = main_block_label (lp->post_landing_pad, label_for_bb);
1564 if (lab != lp->post_landing_pad)
1566 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1567 lp->post_landing_pad = lab;
1568 EH_LANDING_PAD_NR (lab) = lp->index;
1572 FOR_ALL_EH_REGION (r)
1573 switch (r->type)
1575 case ERT_CLEANUP:
1576 case ERT_MUST_NOT_THROW:
1577 break;
1579 case ERT_TRY:
1581 eh_catch c;
1582 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1584 lab = c->label;
1585 if (lab)
1586 c->label = main_block_label (lab, label_for_bb);
1589 break;
1591 case ERT_ALLOWED_EXCEPTIONS:
1592 lab = r->u.allowed.label;
1593 if (lab)
1594 r->u.allowed.label = main_block_label (lab, label_for_bb);
1595 break;
1600 /* Cleanup redundant labels. This is a three-step process:
1601 1) Find the leading label for each block.
1602 2) Redirect all references to labels to the leading labels.
1603 3) Cleanup all useless labels. */
1605 void
1606 cleanup_dead_labels (void)
1608 basic_block bb;
1609 label_record *label_for_bb = XCNEWVEC (struct label_record,
1610 last_basic_block_for_fn (cfun));
1612 /* Find a suitable label for each block. We use the first user-defined
1613 label if there is one, or otherwise just the first label we see. */
1614 FOR_EACH_BB_FN (bb, cfun)
1616 gimple_stmt_iterator i;
1618 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1620 tree label;
1621 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1623 if (!label_stmt)
1624 break;
1626 label = gimple_label_label (label_stmt);
1628 /* If we have not yet seen a label for the current block,
1629 remember this one and see if there are more labels. */
1630 if (!label_for_bb[bb->index].label)
1632 label_for_bb[bb->index].label = label;
1633 continue;
1636 /* If we did see a label for the current block already, but it
1637 is an artificially created label, replace it if the current
1638 label is a user defined label. */
1639 if (!DECL_ARTIFICIAL (label)
1640 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1642 label_for_bb[bb->index].label = label;
1643 break;
1648 /* Now redirect all jumps/branches to the selected label.
1649 First do so for each block ending in a control statement. */
1650 FOR_EACH_BB_FN (bb, cfun)
1652 gimple *stmt = *gsi_last_bb (bb);
1653 tree label, new_label;
1655 if (!stmt)
1656 continue;
1658 switch (gimple_code (stmt))
1660 case GIMPLE_COND:
1662 gcond *cond_stmt = as_a <gcond *> (stmt);
1663 label = gimple_cond_true_label (cond_stmt);
1664 if (label)
1666 new_label = main_block_label (label, label_for_bb);
1667 if (new_label != label)
1668 gimple_cond_set_true_label (cond_stmt, new_label);
1671 label = gimple_cond_false_label (cond_stmt);
1672 if (label)
1674 new_label = main_block_label (label, label_for_bb);
1675 if (new_label != label)
1676 gimple_cond_set_false_label (cond_stmt, new_label);
1679 break;
1681 case GIMPLE_SWITCH:
1683 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1684 size_t i, n = gimple_switch_num_labels (switch_stmt);
1686 /* Replace all destination labels. */
1687 for (i = 0; i < n; ++i)
1689 tree case_label = gimple_switch_label (switch_stmt, i);
1690 label = CASE_LABEL (case_label);
1691 new_label = main_block_label (label, label_for_bb);
1692 if (new_label != label)
1693 CASE_LABEL (case_label) = new_label;
1695 break;
1698 case GIMPLE_ASM:
1700 gasm *asm_stmt = as_a <gasm *> (stmt);
1701 int i, n = gimple_asm_nlabels (asm_stmt);
1703 for (i = 0; i < n; ++i)
1705 tree cons = gimple_asm_label_op (asm_stmt, i);
1706 tree label = main_block_label (TREE_VALUE (cons), label_for_bb);
1707 TREE_VALUE (cons) = label;
1709 break;
1712 /* We have to handle gotos until they're removed, and we don't
1713 remove them until after we've created the CFG edges. */
1714 case GIMPLE_GOTO:
1715 if (!computed_goto_p (stmt))
1717 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1718 label = gimple_goto_dest (goto_stmt);
1719 new_label = main_block_label (label, label_for_bb);
1720 if (new_label != label)
1721 gimple_goto_set_dest (goto_stmt, new_label);
1723 break;
1725 case GIMPLE_TRANSACTION:
1727 gtransaction *txn = as_a <gtransaction *> (stmt);
1729 label = gimple_transaction_label_norm (txn);
1730 if (label)
1732 new_label = main_block_label (label, label_for_bb);
1733 if (new_label != label)
1734 gimple_transaction_set_label_norm (txn, new_label);
1737 label = gimple_transaction_label_uninst (txn);
1738 if (label)
1740 new_label = main_block_label (label, label_for_bb);
1741 if (new_label != label)
1742 gimple_transaction_set_label_uninst (txn, new_label);
1745 label = gimple_transaction_label_over (txn);
1746 if (label)
1748 new_label = main_block_label (label, label_for_bb);
1749 if (new_label != label)
1750 gimple_transaction_set_label_over (txn, new_label);
1753 break;
1755 default:
1756 break;
1760 /* Do the same for the exception region tree labels. */
1761 cleanup_dead_labels_eh (label_for_bb);
1763 /* Finally, purge dead labels. All user-defined labels and labels that
1764 can be the target of non-local gotos and labels which have their
1765 address taken are preserved. */
1766 FOR_EACH_BB_FN (bb, cfun)
1768 gimple_stmt_iterator i;
1769 tree label_for_this_bb = label_for_bb[bb->index].label;
1771 if (!label_for_this_bb)
1772 continue;
1774 /* If the main label of the block is unused, we may still remove it. */
1775 if (!label_for_bb[bb->index].used)
1776 label_for_this_bb = NULL;
1778 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1780 tree label;
1781 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1783 if (!label_stmt)
1784 break;
1786 label = gimple_label_label (label_stmt);
1788 if (label == label_for_this_bb
1789 || !DECL_ARTIFICIAL (label)
1790 || DECL_NONLOCAL (label)
1791 || FORCED_LABEL (label))
1792 gsi_next (&i);
1793 else
1795 gcc_checking_assert (EH_LANDING_PAD_NR (label) == 0);
1796 gsi_remove (&i, true);
1801 free (label_for_bb);
1804 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1805 the ones jumping to the same label.
1806 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1808 bool
1809 group_case_labels_stmt (gswitch *stmt)
1811 int old_size = gimple_switch_num_labels (stmt);
1812 int i, next_index, new_size;
1813 basic_block default_bb = NULL;
1814 hash_set<tree> *removed_labels = NULL;
1816 default_bb = gimple_switch_default_bb (cfun, stmt);
1818 /* Look for possible opportunities to merge cases. */
1819 new_size = i = 1;
1820 while (i < old_size)
1822 tree base_case, base_high;
1823 basic_block base_bb;
1825 base_case = gimple_switch_label (stmt, i);
1827 gcc_assert (base_case);
1828 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1830 /* Discard cases that have the same destination as the default case or
1831 whose destination blocks have already been removed as unreachable. */
1832 if (base_bb == NULL
1833 || base_bb == default_bb
1834 || (removed_labels
1835 && removed_labels->contains (CASE_LABEL (base_case))))
1837 i++;
1838 continue;
1841 base_high = CASE_HIGH (base_case)
1842 ? CASE_HIGH (base_case)
1843 : CASE_LOW (base_case);
1844 next_index = i + 1;
1846 /* Try to merge case labels. Break out when we reach the end
1847 of the label vector or when we cannot merge the next case
1848 label with the current one. */
1849 while (next_index < old_size)
1851 tree merge_case = gimple_switch_label (stmt, next_index);
1852 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1853 wide_int bhp1 = wi::to_wide (base_high) + 1;
1855 /* Merge the cases if they jump to the same place,
1856 and their ranges are consecutive. */
1857 if (merge_bb == base_bb
1858 && (removed_labels == NULL
1859 || !removed_labels->contains (CASE_LABEL (merge_case)))
1860 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1862 base_high
1863 = (CASE_HIGH (merge_case)
1864 ? CASE_HIGH (merge_case) : CASE_LOW (merge_case));
1865 CASE_HIGH (base_case) = base_high;
1866 next_index++;
1868 else
1869 break;
1872 /* Discard cases that have an unreachable destination block. */
1873 if (EDGE_COUNT (base_bb->succs) == 0
1874 && gimple_seq_unreachable_p (bb_seq (base_bb))
1875 /* Don't optimize this if __builtin_unreachable () is the
1876 implicitly added one by the C++ FE too early, before
1877 -Wreturn-type can be diagnosed. We'll optimize it later
1878 during switchconv pass or any other cfg cleanup. */
1879 && (gimple_in_ssa_p (cfun)
1880 || (LOCATION_LOCUS (gimple_location (last_nondebug_stmt (base_bb)))
1881 != BUILTINS_LOCATION)))
1883 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1884 if (base_edge != NULL)
1886 for (gimple_stmt_iterator gsi = gsi_start_bb (base_bb);
1887 !gsi_end_p (gsi); gsi_next (&gsi))
1888 if (glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
1890 if (FORCED_LABEL (gimple_label_label (stmt))
1891 || DECL_NONLOCAL (gimple_label_label (stmt)))
1893 /* Forced/non-local labels aren't going to be removed,
1894 but they will be moved to some neighbouring basic
1895 block. If some later case label refers to one of
1896 those labels, we should throw that case away rather
1897 than keeping it around and refering to some random
1898 other basic block without an edge to it. */
1899 if (removed_labels == NULL)
1900 removed_labels = new hash_set<tree>;
1901 removed_labels->add (gimple_label_label (stmt));
1904 else
1905 break;
1906 remove_edge_and_dominated_blocks (base_edge);
1908 i = next_index;
1909 continue;
1912 if (new_size < i)
1913 gimple_switch_set_label (stmt, new_size,
1914 gimple_switch_label (stmt, i));
1915 i = next_index;
1916 new_size++;
1919 gcc_assert (new_size <= old_size);
1921 if (new_size < old_size)
1922 gimple_switch_set_num_labels (stmt, new_size);
1924 delete removed_labels;
1925 return new_size < old_size;
1928 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1929 and scan the sorted vector of cases. Combine the ones jumping to the
1930 same label. */
1932 bool
1933 group_case_labels (void)
1935 basic_block bb;
1936 bool changed = false;
1938 FOR_EACH_BB_FN (bb, cfun)
1940 if (gswitch *stmt = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
1941 changed |= group_case_labels_stmt (stmt);
1944 return changed;
1947 /* Checks whether we can merge block B into block A. */
1949 static bool
1950 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1952 gimple *stmt;
1954 if (!single_succ_p (a))
1955 return false;
1957 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1958 return false;
1960 if (single_succ (a) != b)
1961 return false;
1963 if (!single_pred_p (b))
1964 return false;
1966 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1967 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1968 return false;
1970 /* If A ends by a statement causing exceptions or something similar, we
1971 cannot merge the blocks. */
1972 stmt = *gsi_last_bb (a);
1973 if (stmt && stmt_ends_bb_p (stmt))
1974 return false;
1976 /* Examine the labels at the beginning of B. */
1977 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1978 gsi_next (&gsi))
1980 tree lab;
1981 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1982 if (!label_stmt)
1983 break;
1984 lab = gimple_label_label (label_stmt);
1986 /* Do not remove user forced labels or for -O0 any user labels. */
1987 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1988 return false;
1991 /* Protect simple loop latches. We only want to avoid merging
1992 the latch with the loop header or with a block in another
1993 loop in this case. */
1994 if (current_loops
1995 && b->loop_father->latch == b
1996 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1997 && (b->loop_father->header == a
1998 || b->loop_father != a->loop_father))
1999 return false;
2001 /* It must be possible to eliminate all phi nodes in B. If ssa form
2002 is not up-to-date and a name-mapping is registered, we cannot eliminate
2003 any phis. Symbols marked for renaming are never a problem though. */
2004 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
2005 gsi_next (&gsi))
2007 gphi *phi = gsi.phi ();
2008 /* Technically only new names matter. */
2009 if (name_registered_for_update_p (PHI_RESULT (phi)))
2010 return false;
2013 /* When not optimizing, don't merge if we'd lose goto_locus. */
2014 if (!optimize
2015 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
2017 location_t goto_locus = single_succ_edge (a)->goto_locus;
2018 gimple_stmt_iterator prev, next;
2019 prev = gsi_last_nondebug_bb (a);
2020 next = gsi_after_labels (b);
2021 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
2022 gsi_next_nondebug (&next);
2023 if ((gsi_end_p (prev)
2024 || gimple_location (gsi_stmt (prev)) != goto_locus)
2025 && (gsi_end_p (next)
2026 || gimple_location (gsi_stmt (next)) != goto_locus))
2027 return false;
2030 return true;
2033 /* Replaces all uses of NAME by VAL. */
2035 void
2036 replace_uses_by (tree name, tree val)
2038 imm_use_iterator imm_iter;
2039 use_operand_p use;
2040 gimple *stmt;
2041 edge e;
2043 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
2045 /* Mark the block if we change the last stmt in it. */
2046 if (cfgcleanup_altered_bbs
2047 && stmt_ends_bb_p (stmt))
2048 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
2050 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
2052 replace_exp (use, val);
2054 if (gimple_code (stmt) == GIMPLE_PHI)
2056 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
2057 PHI_ARG_INDEX_FROM_USE (use));
2058 if (e->flags & EDGE_ABNORMAL
2059 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
2061 /* This can only occur for virtual operands, since
2062 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
2063 would prevent replacement. */
2064 gcc_checking_assert (virtual_operand_p (name));
2065 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
2070 if (gimple_code (stmt) != GIMPLE_PHI)
2072 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2073 gimple *orig_stmt = stmt;
2074 size_t i;
2076 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
2077 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
2078 only change sth from non-invariant to invariant, and only
2079 when propagating constants. */
2080 if (is_gimple_min_invariant (val))
2081 for (i = 0; i < gimple_num_ops (stmt); i++)
2083 tree op = gimple_op (stmt, i);
2084 /* Operands may be empty here. For example, the labels
2085 of a GIMPLE_COND are nulled out following the creation
2086 of the corresponding CFG edges. */
2087 if (op && TREE_CODE (op) == ADDR_EXPR)
2088 recompute_tree_invariant_for_addr_expr (op);
2091 if (fold_stmt (&gsi))
2092 stmt = gsi_stmt (gsi);
2094 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2095 gimple_purge_dead_eh_edges (gimple_bb (stmt));
2097 update_stmt (stmt);
2101 gcc_checking_assert (has_zero_uses (name));
2103 /* Also update the trees stored in loop structures. */
2104 if (current_loops)
2106 for (auto loop : loops_list (cfun, 0))
2107 substitute_in_loop_info (loop, name, val);
2111 /* Merge block B into block A. */
2113 static void
2114 gimple_merge_blocks (basic_block a, basic_block b)
2116 gimple_stmt_iterator last, gsi;
2117 gphi_iterator psi;
2119 if (dump_file)
2120 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2122 /* Remove all single-valued PHI nodes from block B of the form
2123 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2124 gsi = gsi_last_bb (a);
2125 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2127 gimple *phi = gsi_stmt (psi);
2128 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2129 gimple *copy;
2130 bool may_replace_uses = (virtual_operand_p (def)
2131 || may_propagate_copy (def, use));
2133 /* In case we maintain loop closed ssa form, do not propagate arguments
2134 of loop exit phi nodes. */
2135 if (current_loops
2136 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2137 && !virtual_operand_p (def)
2138 && TREE_CODE (use) == SSA_NAME
2139 && a->loop_father != b->loop_father)
2140 may_replace_uses = false;
2142 if (!may_replace_uses)
2144 gcc_assert (!virtual_operand_p (def));
2146 /* Note that just emitting the copies is fine -- there is no problem
2147 with ordering of phi nodes. This is because A is the single
2148 predecessor of B, therefore results of the phi nodes cannot
2149 appear as arguments of the phi nodes. */
2150 copy = gimple_build_assign (def, use);
2151 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2152 remove_phi_node (&psi, false);
2154 else
2156 /* If we deal with a PHI for virtual operands, we can simply
2157 propagate these without fussing with folding or updating
2158 the stmt. */
2159 if (virtual_operand_p (def))
2161 imm_use_iterator iter;
2162 use_operand_p use_p;
2163 gimple *stmt;
2165 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2166 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2167 SET_USE (use_p, use);
2169 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2170 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2172 else
2173 replace_uses_by (def, use);
2175 remove_phi_node (&psi, true);
2179 /* Ensure that B follows A. */
2180 move_block_after (b, a);
2182 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2183 gcc_assert (!*gsi_last_bb (a)
2184 || !stmt_ends_bb_p (*gsi_last_bb (a)));
2186 /* Remove labels from B and set gimple_bb to A for other statements. */
2187 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2189 gimple *stmt = gsi_stmt (gsi);
2190 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2192 tree label = gimple_label_label (label_stmt);
2193 int lp_nr;
2195 gsi_remove (&gsi, false);
2197 /* Now that we can thread computed gotos, we might have
2198 a situation where we have a forced label in block B
2199 However, the label at the start of block B might still be
2200 used in other ways (think about the runtime checking for
2201 Fortran assigned gotos). So we cannot just delete the
2202 label. Instead we move the label to the start of block A. */
2203 if (FORCED_LABEL (label))
2205 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2206 tree first_label = NULL_TREE;
2207 if (!gsi_end_p (dest_gsi))
2208 if (glabel *first_label_stmt
2209 = dyn_cast <glabel *> (gsi_stmt (dest_gsi)))
2210 first_label = gimple_label_label (first_label_stmt);
2211 if (first_label
2212 && (DECL_NONLOCAL (first_label)
2213 || EH_LANDING_PAD_NR (first_label) != 0))
2214 gsi_insert_after (&dest_gsi, stmt, GSI_NEW_STMT);
2215 else
2216 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2218 /* Other user labels keep around in a form of a debug stmt. */
2219 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2221 gimple *dbg = gimple_build_debug_bind (label,
2222 integer_zero_node,
2223 stmt);
2224 gimple_debug_bind_reset_value (dbg);
2225 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2228 lp_nr = EH_LANDING_PAD_NR (label);
2229 if (lp_nr)
2231 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2232 lp->post_landing_pad = NULL;
2235 else
2237 gimple_set_bb (stmt, a);
2238 gsi_next (&gsi);
2242 /* When merging two BBs, if their counts are different, the larger count
2243 is selected as the new bb count. This is to handle inconsistent
2244 profiles. */
2245 if (a->loop_father == b->loop_father)
2247 a->count = a->count.merge (b->count);
2250 /* Merge the sequences. */
2251 last = gsi_last_bb (a);
2252 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2253 set_bb_seq (b, NULL);
2255 if (cfgcleanup_altered_bbs)
2256 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2260 /* Return the one of two successors of BB that is not reachable by a
2261 complex edge, if there is one. Else, return BB. We use
2262 this in optimizations that use post-dominators for their heuristics,
2263 to catch the cases in C++ where function calls are involved. */
2265 basic_block
2266 single_noncomplex_succ (basic_block bb)
2268 edge e0, e1;
2269 if (EDGE_COUNT (bb->succs) != 2)
2270 return bb;
2272 e0 = EDGE_SUCC (bb, 0);
2273 e1 = EDGE_SUCC (bb, 1);
2274 if (e0->flags & EDGE_COMPLEX)
2275 return e1->dest;
2276 if (e1->flags & EDGE_COMPLEX)
2277 return e0->dest;
2279 return bb;
2282 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2284 void
2285 notice_special_calls (gcall *call)
2287 int flags = gimple_call_flags (call);
2289 if (flags & ECF_MAY_BE_ALLOCA)
2290 cfun->calls_alloca = true;
2291 if (flags & ECF_RETURNS_TWICE)
2292 cfun->calls_setjmp = true;
2293 if (gimple_call_must_tail_p (call))
2294 cfun->has_musttail = true;
2298 /* Clear flags set by notice_special_calls. Used by dead code removal
2299 to update the flags. */
2301 void
2302 clear_special_calls (void)
2304 cfun->calls_alloca = false;
2305 cfun->calls_setjmp = false;
2306 cfun->has_musttail = false;
2309 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2311 static void
2312 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2314 /* Since this block is no longer reachable, we can just delete all
2315 of its PHI nodes. */
2316 remove_phi_nodes (bb);
2318 /* Remove edges to BB's successors. */
2319 while (EDGE_COUNT (bb->succs) > 0)
2320 remove_edge (EDGE_SUCC (bb, 0));
2324 /* Remove statements of basic block BB. */
2326 static void
2327 remove_bb (basic_block bb)
2329 gimple_stmt_iterator i;
2331 if (dump_file)
2333 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2334 if (dump_flags & TDF_DETAILS)
2336 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2337 fprintf (dump_file, "\n");
2341 if (current_loops)
2343 class loop *loop = bb->loop_father;
2345 /* If a loop gets removed, clean up the information associated
2346 with it. */
2347 if (loop->latch == bb
2348 || loop->header == bb)
2349 free_numbers_of_iterations_estimates (loop);
2352 /* Remove all the instructions in the block. */
2353 if (bb_seq (bb) != NULL)
2355 /* Walk backwards so as to get a chance to substitute all
2356 released DEFs into debug stmts. See
2357 eliminate_unnecessary_stmts() in tree-ssa-dce.cc for more
2358 details. */
2359 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2361 gimple *stmt = gsi_stmt (i);
2362 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2363 if (label_stmt
2364 && (FORCED_LABEL (gimple_label_label (label_stmt))
2365 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2367 basic_block new_bb;
2368 gimple_stmt_iterator new_gsi;
2370 /* A non-reachable non-local label may still be referenced.
2371 But it no longer needs to carry the extra semantics of
2372 non-locality. */
2373 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2375 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2376 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2379 new_bb = bb->prev_bb;
2380 /* Don't move any labels into ENTRY block. */
2381 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2383 new_bb = single_succ (new_bb);
2384 gcc_assert (new_bb != bb);
2386 if ((unsigned) bb->index < bb_to_omp_idx.length ()
2387 && ((unsigned) new_bb->index >= bb_to_omp_idx.length ()
2388 || (bb_to_omp_idx[bb->index]
2389 != bb_to_omp_idx[new_bb->index])))
2391 /* During cfg pass make sure to put orphaned labels
2392 into the right OMP region. */
2393 unsigned int i;
2394 int idx;
2395 new_bb = NULL;
2396 FOR_EACH_VEC_ELT (bb_to_omp_idx, i, idx)
2397 if (i >= NUM_FIXED_BLOCKS
2398 && idx == bb_to_omp_idx[bb->index]
2399 && i != (unsigned) bb->index)
2401 new_bb = BASIC_BLOCK_FOR_FN (cfun, i);
2402 break;
2404 if (new_bb == NULL)
2406 new_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2407 gcc_assert (new_bb != bb);
2410 new_gsi = gsi_after_labels (new_bb);
2411 gsi_remove (&i, false);
2412 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2414 else
2416 /* Release SSA definitions. */
2417 release_defs (stmt);
2418 gsi_remove (&i, true);
2421 if (gsi_end_p (i))
2422 i = gsi_last_bb (bb);
2423 else
2424 gsi_prev (&i);
2428 if ((unsigned) bb->index < bb_to_omp_idx.length ())
2429 bb_to_omp_idx[bb->index] = -1;
2430 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2431 bb->il.gimple.seq = NULL;
2432 bb->il.gimple.phi_nodes = NULL;
2436 /* Given a basic block BB and a value VAL for use in the final statement
2437 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2438 the edge that will be taken out of the block.
2439 If VAL is NULL_TREE, then the current value of the final statement's
2440 predicate or index is used.
2441 If the value does not match a unique edge, NULL is returned. */
2443 edge
2444 find_taken_edge (basic_block bb, tree val)
2446 gimple *stmt;
2448 stmt = *gsi_last_bb (bb);
2450 /* Handle ENTRY and EXIT. */
2451 if (!stmt)
2454 else if (gimple_code (stmt) == GIMPLE_COND)
2455 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2457 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2458 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2460 else if (computed_goto_p (stmt))
2462 /* Only optimize if the argument is a label, if the argument is
2463 not a label then we cannot construct a proper CFG.
2465 It may be the case that we only need to allow the LABEL_REF to
2466 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2467 appear inside a LABEL_EXPR just to be safe. */
2468 if (val
2469 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2470 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2471 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2474 /* Otherwise we only know the taken successor edge if it's unique. */
2475 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2478 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2479 statement, determine which of the outgoing edges will be taken out of the
2480 block. Return NULL if either edge may be taken. */
2482 static edge
2483 find_taken_edge_computed_goto (basic_block bb, tree val)
2485 basic_block dest;
2486 edge e = NULL;
2488 dest = label_to_block (cfun, val);
2489 if (dest)
2490 e = find_edge (bb, dest);
2492 /* It's possible for find_edge to return NULL here on invalid code
2493 that abuses the labels-as-values extension (e.g. code that attempts to
2494 jump *between* functions via stored labels-as-values; PR 84136).
2495 If so, then we simply return that NULL for the edge.
2496 We don't currently have a way of detecting such invalid code, so we
2497 can't assert that it was the case when a NULL edge occurs here. */
2499 return e;
2502 /* Given COND_STMT and a constant value VAL for use as the predicate,
2503 determine which of the two edges will be taken out of
2504 the statement's block. Return NULL if either edge may be taken.
2505 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2506 is used. */
2508 static edge
2509 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2511 edge true_edge, false_edge;
2513 if (val == NULL_TREE)
2515 /* Use the current value of the predicate. */
2516 if (gimple_cond_true_p (cond_stmt))
2517 val = integer_one_node;
2518 else if (gimple_cond_false_p (cond_stmt))
2519 val = integer_zero_node;
2520 else
2521 return NULL;
2523 else if (TREE_CODE (val) != INTEGER_CST)
2524 return NULL;
2526 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2527 &true_edge, &false_edge);
2529 return (integer_zerop (val) ? false_edge : true_edge);
2532 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2533 which edge will be taken out of the statement's block. Return NULL if any
2534 edge may be taken.
2535 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2536 is used. */
2538 edge
2539 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2541 basic_block dest_bb;
2542 edge e;
2543 tree taken_case;
2545 if (gimple_switch_num_labels (switch_stmt) == 1)
2546 taken_case = gimple_switch_default_label (switch_stmt);
2547 else
2549 if (val == NULL_TREE)
2550 val = gimple_switch_index (switch_stmt);
2551 if (TREE_CODE (val) != INTEGER_CST)
2552 return NULL;
2553 else
2554 taken_case = find_case_label_for_value (switch_stmt, val);
2556 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2558 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2559 gcc_assert (e);
2560 return e;
2564 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2565 We can make optimal use here of the fact that the case labels are
2566 sorted: We can do a binary search for a case matching VAL. */
2568 tree
2569 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2571 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2572 tree default_case = gimple_switch_default_label (switch_stmt);
2574 for (low = 0, high = n; high - low > 1; )
2576 size_t i = (high + low) / 2;
2577 tree t = gimple_switch_label (switch_stmt, i);
2578 int cmp;
2580 /* Cache the result of comparing CASE_LOW and val. */
2581 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2583 if (cmp > 0)
2584 high = i;
2585 else
2586 low = i;
2588 if (CASE_HIGH (t) == NULL)
2590 /* A singe-valued case label. */
2591 if (cmp == 0)
2592 return t;
2594 else
2596 /* A case range. We can only handle integer ranges. */
2597 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2598 return t;
2602 return default_case;
2606 /* Dump a basic block on stderr. */
2608 void
2609 gimple_debug_bb (basic_block bb)
2611 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2615 /* Dump basic block with index N on stderr. */
2617 basic_block
2618 gimple_debug_bb_n (int n)
2620 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2621 return BASIC_BLOCK_FOR_FN (cfun, n);
2625 /* Dump the CFG on stderr.
2627 FLAGS are the same used by the tree dumping functions
2628 (see TDF_* in dumpfile.h). */
2630 void
2631 gimple_debug_cfg (dump_flags_t flags)
2633 gimple_dump_cfg (stderr, flags);
2637 /* Dump the program showing basic block boundaries on the given FILE.
2639 FLAGS are the same used by the tree dumping functions (see TDF_* in
2640 tree.h). */
2642 void
2643 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2645 if (flags & TDF_DETAILS)
2647 dump_function_header (file, current_function_decl, flags);
2648 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2649 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2650 last_basic_block_for_fn (cfun));
2652 brief_dump_cfg (file, flags);
2653 fprintf (file, "\n");
2656 if (flags & TDF_STATS)
2657 dump_cfg_stats (file);
2659 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2663 /* Dump CFG statistics on FILE. */
2665 void
2666 dump_cfg_stats (FILE *file)
2668 static long max_num_merged_labels = 0;
2669 unsigned long size, total = 0;
2670 long num_edges;
2671 basic_block bb;
2672 const char * const fmt_str = "%-30s%-13s%12s\n";
2673 const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2674 const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2675 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2676 const char *funcname = current_function_name ();
2678 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2680 fprintf (file, "---------------------------------------------------------\n");
2681 fprintf (file, fmt_str, "", " Number of ", "Memory");
2682 fprintf (file, fmt_str, "", " instances ", "used ");
2683 fprintf (file, "---------------------------------------------------------\n");
2685 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2686 total += size;
2687 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2688 SIZE_AMOUNT (size));
2690 num_edges = 0;
2691 FOR_EACH_BB_FN (bb, cfun)
2692 num_edges += EDGE_COUNT (bb->succs);
2693 size = num_edges * sizeof (class edge_def);
2694 total += size;
2695 fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2697 fprintf (file, "---------------------------------------------------------\n");
2698 fprintf (file, fmt_str_3, "Total memory used by CFG data",
2699 SIZE_AMOUNT (total));
2700 fprintf (file, "---------------------------------------------------------\n");
2701 fprintf (file, "\n");
2703 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2704 max_num_merged_labels = cfg_stats.num_merged_labels;
2706 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2707 cfg_stats.num_merged_labels, max_num_merged_labels);
2709 fprintf (file, "\n");
2713 /* Dump CFG statistics on stderr. Keep extern so that it's always
2714 linked in the final executable. */
2716 DEBUG_FUNCTION void
2717 debug_cfg_stats (void)
2719 dump_cfg_stats (stderr);
2722 /*---------------------------------------------------------------------------
2723 Miscellaneous helpers
2724 ---------------------------------------------------------------------------*/
2726 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2727 flow. Transfers of control flow associated with EH are excluded. */
2729 static bool
2730 call_can_make_abnormal_goto (gimple *t)
2732 /* If the function has no non-local labels, then a call cannot make an
2733 abnormal transfer of control. */
2734 if (!cfun->has_nonlocal_label
2735 && !cfun->calls_setjmp)
2736 return false;
2738 /* Likewise if the call has no side effects. */
2739 if (!gimple_has_side_effects (t))
2740 return false;
2742 /* Likewise if the called function is leaf. */
2743 if (gimple_call_flags (t) & ECF_LEAF)
2744 return false;
2746 return true;
2750 /* Return true if T can make an abnormal transfer of control flow.
2751 Transfers of control flow associated with EH are excluded. */
2753 bool
2754 stmt_can_make_abnormal_goto (gimple *t)
2756 if (computed_goto_p (t))
2757 return true;
2758 if (is_gimple_call (t))
2759 return call_can_make_abnormal_goto (t);
2760 return false;
2764 /* Return true if T represents a stmt that always transfers control. */
2766 bool
2767 is_ctrl_stmt (gimple *t)
2769 switch (gimple_code (t))
2771 case GIMPLE_COND:
2772 case GIMPLE_SWITCH:
2773 case GIMPLE_GOTO:
2774 case GIMPLE_RETURN:
2775 case GIMPLE_RESX:
2776 return true;
2777 default:
2778 return false;
2783 /* Return true if T is a statement that may alter the flow of control
2784 (e.g., a call to a non-returning function). */
2786 bool
2787 is_ctrl_altering_stmt (gimple *t)
2789 gcc_assert (t);
2791 switch (gimple_code (t))
2793 case GIMPLE_CALL:
2794 /* Per stmt call flag indicates whether the call could alter
2795 controlflow. */
2796 if (gimple_call_ctrl_altering_p (t))
2797 return true;
2798 break;
2800 case GIMPLE_EH_DISPATCH:
2801 /* EH_DISPATCH branches to the individual catch handlers at
2802 this level of a try or allowed-exceptions region. It can
2803 fallthru to the next statement as well. */
2804 return true;
2806 case GIMPLE_ASM:
2807 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2808 return true;
2809 break;
2811 CASE_GIMPLE_OMP:
2812 /* OpenMP directives alter control flow. */
2813 return true;
2815 case GIMPLE_TRANSACTION:
2816 /* A transaction start alters control flow. */
2817 return true;
2819 default:
2820 break;
2823 /* If a statement can throw, it alters control flow. */
2824 return stmt_can_throw_internal (cfun, t);
2828 /* Return true if T is a simple local goto. */
2830 bool
2831 simple_goto_p (gimple *t)
2833 return (gimple_code (t) == GIMPLE_GOTO
2834 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2838 /* Return true if STMT should start a new basic block. PREV_STMT is
2839 the statement preceding STMT. It is used when STMT is a label or a
2840 case label. Labels should only start a new basic block if their
2841 previous statement wasn't a label. Otherwise, sequence of labels
2842 would generate unnecessary basic blocks that only contain a single
2843 label. */
2845 static inline bool
2846 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2848 if (stmt == NULL)
2849 return false;
2851 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2852 any nondebug stmts in the block. We don't want to start another
2853 block in this case: the debug stmt will already have started the
2854 one STMT would start if we weren't outputting debug stmts. */
2855 if (prev_stmt && is_gimple_debug (prev_stmt))
2856 return false;
2858 /* Labels start a new basic block only if the preceding statement
2859 wasn't a label of the same type. This prevents the creation of
2860 consecutive blocks that have nothing but a single label. */
2861 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2863 /* Nonlocal and computed GOTO targets always start a new block. */
2864 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2865 || FORCED_LABEL (gimple_label_label (label_stmt)))
2866 return true;
2868 if (glabel *plabel = safe_dyn_cast <glabel *> (prev_stmt))
2870 if (DECL_NONLOCAL (gimple_label_label (plabel))
2871 || !DECL_ARTIFICIAL (gimple_label_label (plabel)))
2872 return true;
2874 cfg_stats.num_merged_labels++;
2875 return false;
2877 else
2878 return true;
2880 else if (gimple_code (stmt) == GIMPLE_CALL)
2882 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2883 /* setjmp acts similar to a nonlocal GOTO target and thus should
2884 start a new block. */
2885 return true;
2886 if (gimple_call_internal_p (stmt, IFN_PHI)
2887 && prev_stmt
2888 && gimple_code (prev_stmt) != GIMPLE_LABEL
2889 && (gimple_code (prev_stmt) != GIMPLE_CALL
2890 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2891 /* PHI nodes start a new block unless preceeded by a label
2892 or another PHI. */
2893 return true;
2896 return false;
2900 /* Return true if T should end a basic block. */
2902 bool
2903 stmt_ends_bb_p (gimple *t)
2905 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2908 /* Remove block annotations and other data structures. */
2910 void
2911 delete_tree_cfg_annotations (struct function *fn)
2913 vec_free (label_to_block_map_for_fn (fn));
2916 /* Return the virtual phi in BB. */
2918 gphi *
2919 get_virtual_phi (basic_block bb)
2921 for (gphi_iterator gsi = gsi_start_phis (bb);
2922 !gsi_end_p (gsi);
2923 gsi_next (&gsi))
2925 gphi *phi = gsi.phi ();
2927 if (virtual_operand_p (PHI_RESULT (phi)))
2928 return phi;
2931 return NULL;
2934 /* Return the first statement in basic block BB. */
2936 gimple *
2937 first_stmt (basic_block bb)
2939 gimple_stmt_iterator i = gsi_start_bb (bb);
2940 gimple *stmt = NULL;
2942 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2944 gsi_next (&i);
2945 stmt = NULL;
2947 return stmt;
2950 /* Return the first non-label statement in basic block BB. */
2952 static gimple *
2953 first_non_label_stmt (basic_block bb)
2955 gimple_stmt_iterator i = gsi_start_bb (bb);
2956 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2957 gsi_next (&i);
2958 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2961 /* Return the last statement in basic block BB. */
2963 gimple *
2964 last_nondebug_stmt (basic_block bb)
2966 gimple_stmt_iterator i = gsi_last_bb (bb);
2967 gimple *stmt = NULL;
2969 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2971 gsi_prev (&i);
2972 stmt = NULL;
2974 return stmt;
2977 /* Return the last statement of an otherwise empty block. Return NULL
2978 if the block is totally empty, or if it contains more than one
2979 statement. */
2981 gimple *
2982 last_and_only_stmt (basic_block bb)
2984 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2985 gimple *last, *prev;
2987 if (gsi_end_p (i))
2988 return NULL;
2990 last = gsi_stmt (i);
2991 gsi_prev_nondebug (&i);
2992 if (gsi_end_p (i))
2993 return last;
2995 /* Empty statements should no longer appear in the instruction stream.
2996 Everything that might have appeared before should be deleted by
2997 remove_useless_stmts, and the optimizers should just gsi_remove
2998 instead of smashing with build_empty_stmt.
3000 Thus the only thing that should appear here in a block containing
3001 one executable statement is a label. */
3002 prev = gsi_stmt (i);
3003 if (gimple_code (prev) == GIMPLE_LABEL)
3004 return last;
3005 else
3006 return NULL;
3009 /* Returns the basic block after which the new basic block created
3010 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3011 near its "logical" location. This is of most help to humans looking
3012 at debugging dumps. */
3014 basic_block
3015 split_edge_bb_loc (edge edge_in)
3017 basic_block dest = edge_in->dest;
3018 basic_block dest_prev = dest->prev_bb;
3020 if (dest_prev)
3022 edge e = find_edge (dest_prev, dest);
3023 if (e && !(e->flags & EDGE_COMPLEX))
3024 return edge_in->src;
3026 return dest_prev;
3029 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3030 Abort on abnormal edges. */
3032 static basic_block
3033 gimple_split_edge (edge edge_in)
3035 basic_block new_bb, after_bb, dest;
3036 edge new_edge, e;
3038 /* Abnormal edges cannot be split. */
3039 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3041 dest = edge_in->dest;
3043 after_bb = split_edge_bb_loc (edge_in);
3045 new_bb = create_empty_bb (after_bb);
3046 new_bb->count = edge_in->count ();
3048 /* We want to avoid re-allocating PHIs when we first
3049 add the fallthru edge from new_bb to dest but we also
3050 want to avoid changing PHI argument order when
3051 first redirecting edge_in away from dest. The former
3052 avoids changing PHI argument order by adding them
3053 last and then the redirection swapping it back into
3054 place by means of unordered remove.
3055 So hack around things by temporarily removing all PHIs
3056 from the destination during the edge redirection and then
3057 making sure the edges stay in order. */
3058 gimple_seq saved_phis = phi_nodes (dest);
3059 unsigned old_dest_idx = edge_in->dest_idx;
3060 set_phi_nodes (dest, NULL);
3061 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
3062 e = redirect_edge_and_branch (edge_in, new_bb);
3063 gcc_assert (e == edge_in && new_edge->dest_idx == old_dest_idx);
3064 /* set_phi_nodes sets the BB of the PHI nodes, so do it manually here. */
3065 dest->il.gimple.phi_nodes = saved_phis;
3067 return new_bb;
3071 /* Verify properties of the address expression T whose base should be
3072 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
3074 static bool
3075 verify_address (tree t, bool verify_addressable)
3077 bool old_constant;
3078 bool old_side_effects;
3079 bool new_constant;
3080 bool new_side_effects;
3082 old_constant = TREE_CONSTANT (t);
3083 old_side_effects = TREE_SIDE_EFFECTS (t);
3085 recompute_tree_invariant_for_addr_expr (t);
3086 new_side_effects = TREE_SIDE_EFFECTS (t);
3087 new_constant = TREE_CONSTANT (t);
3089 if (old_constant != new_constant)
3091 error ("constant not recomputed when %<ADDR_EXPR%> changed");
3092 return true;
3094 if (old_side_effects != new_side_effects)
3096 error ("side effects not recomputed when %<ADDR_EXPR%> changed");
3097 return true;
3100 tree base = TREE_OPERAND (t, 0);
3101 while (handled_component_p (base))
3102 base = TREE_OPERAND (base, 0);
3104 if (!(VAR_P (base)
3105 || TREE_CODE (base) == PARM_DECL
3106 || TREE_CODE (base) == RESULT_DECL))
3107 return false;
3109 if (verify_addressable && !TREE_ADDRESSABLE (base))
3111 error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
3112 return true;
3115 return false;
3119 /* Verify if EXPR is a valid GIMPLE reference expression. If
3120 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3121 if there is an error, otherwise false. */
3123 static bool
3124 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3126 const char *code_name = get_tree_code_name (TREE_CODE (expr));
3128 if (TREE_CODE (expr) == REALPART_EXPR
3129 || TREE_CODE (expr) == IMAGPART_EXPR
3130 || TREE_CODE (expr) == BIT_FIELD_REF
3131 || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3133 tree op = TREE_OPERAND (expr, 0);
3134 if (TREE_CODE (expr) != VIEW_CONVERT_EXPR
3135 && !is_gimple_reg_type (TREE_TYPE (expr)))
3137 error ("non-scalar %qs", code_name);
3138 return true;
3141 if (TREE_CODE (expr) == BIT_FIELD_REF)
3143 tree t1 = TREE_OPERAND (expr, 1);
3144 tree t2 = TREE_OPERAND (expr, 2);
3145 poly_uint64 size, bitpos;
3146 if (!poly_int_tree_p (t1, &size)
3147 || !poly_int_tree_p (t2, &bitpos)
3148 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3149 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3151 error ("invalid position or size operand to %qs", code_name);
3152 return true;
3154 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3155 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3157 error ("integral result type precision does not match "
3158 "field size of %qs", code_name);
3159 return true;
3161 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3162 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3163 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3164 size))
3166 error ("mode size of non-integral result does not "
3167 "match field size of %qs",
3168 code_name);
3169 return true;
3171 if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3172 && !type_has_mode_precision_p (TREE_TYPE (op)))
3174 error ("%qs of non-mode-precision operand", code_name);
3175 return true;
3177 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3178 && maybe_gt (size + bitpos,
3179 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3181 error ("position plus size exceeds size of referenced object in "
3182 "%qs", code_name);
3183 return true;
3187 if ((TREE_CODE (expr) == REALPART_EXPR
3188 || TREE_CODE (expr) == IMAGPART_EXPR)
3189 && !useless_type_conversion_p (TREE_TYPE (expr),
3190 TREE_TYPE (TREE_TYPE (op))))
3192 error ("type mismatch in %qs reference", code_name);
3193 debug_generic_stmt (TREE_TYPE (expr));
3194 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3195 return true;
3198 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3200 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3201 that their operand is not a register an invariant when
3202 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3203 bug). Otherwise there is nothing to verify, gross mismatches at
3204 most invoke undefined behavior. */
3205 if (require_lvalue
3206 && (is_gimple_reg (op) || is_gimple_min_invariant (op)))
3208 error ("conversion of %qs on the left hand side of %qs",
3209 get_tree_code_name (TREE_CODE (op)), code_name);
3210 debug_generic_stmt (expr);
3211 return true;
3213 else if (is_gimple_reg (op)
3214 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3216 error ("conversion of register to a different size in %qs",
3217 code_name);
3218 debug_generic_stmt (expr);
3219 return true;
3223 expr = op;
3226 bool require_non_reg = false;
3227 while (handled_component_p (expr))
3229 require_non_reg = true;
3230 code_name = get_tree_code_name (TREE_CODE (expr));
3232 if (TREE_CODE (expr) == REALPART_EXPR
3233 || TREE_CODE (expr) == IMAGPART_EXPR
3234 || TREE_CODE (expr) == BIT_FIELD_REF)
3236 error ("non-top-level %qs", code_name);
3237 return true;
3240 tree op = TREE_OPERAND (expr, 0);
3242 if (TREE_CODE (expr) == ARRAY_REF
3243 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3245 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3246 || (TREE_OPERAND (expr, 2)
3247 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3248 || (TREE_OPERAND (expr, 3)
3249 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3251 error ("invalid operands to %qs", code_name);
3252 debug_generic_stmt (expr);
3253 return true;
3257 /* Verify if the reference array element types are compatible. */
3258 if (TREE_CODE (expr) == ARRAY_REF
3259 && !useless_type_conversion_p (TREE_TYPE (expr),
3260 TREE_TYPE (TREE_TYPE (op))))
3262 error ("type mismatch in %qs", code_name);
3263 debug_generic_stmt (TREE_TYPE (expr));
3264 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3265 return true;
3267 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3268 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3269 TREE_TYPE (TREE_TYPE (op))))
3271 error ("type mismatch in %qs", code_name);
3272 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3273 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3274 return true;
3277 if (TREE_CODE (expr) == COMPONENT_REF)
3279 if (TREE_OPERAND (expr, 2)
3280 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3282 error ("invalid %qs offset operator", code_name);
3283 return true;
3285 if (!useless_type_conversion_p (TREE_TYPE (expr),
3286 TREE_TYPE (TREE_OPERAND (expr, 1))))
3288 error ("type mismatch in %qs", code_name);
3289 debug_generic_stmt (TREE_TYPE (expr));
3290 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3291 return true;
3295 expr = op;
3298 code_name = get_tree_code_name (TREE_CODE (expr));
3300 if (TREE_CODE (expr) == MEM_REF)
3302 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3303 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3304 && verify_address (TREE_OPERAND (expr, 0), false)))
3306 error ("invalid address operand in %qs", code_name);
3307 debug_generic_stmt (expr);
3308 return true;
3310 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3311 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3313 error ("invalid offset operand in %qs", code_name);
3314 debug_generic_stmt (expr);
3315 return true;
3317 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3318 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3320 error ("invalid clique in %qs", code_name);
3321 debug_generic_stmt (expr);
3322 return true;
3325 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3327 if (!TMR_BASE (expr)
3328 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3329 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3330 && verify_address (TMR_BASE (expr), false)))
3332 error ("invalid address operand in %qs", code_name);
3333 return true;
3335 if (!TMR_OFFSET (expr)
3336 || !poly_int_tree_p (TMR_OFFSET (expr))
3337 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3339 error ("invalid offset operand in %qs", code_name);
3340 debug_generic_stmt (expr);
3341 return true;
3343 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3344 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3346 error ("invalid clique in %qs", code_name);
3347 debug_generic_stmt (expr);
3348 return true;
3351 else if (INDIRECT_REF_P (expr))
3353 error ("%qs in gimple IL", code_name);
3354 debug_generic_stmt (expr);
3355 return true;
3357 else if (require_non_reg
3358 && (is_gimple_reg (expr)
3359 || (is_gimple_min_invariant (expr)
3360 /* STRING_CSTs are representatives of the string table
3361 entry which lives in memory. */
3362 && TREE_CODE (expr) != STRING_CST)))
3364 error ("%qs as base where non-register is required", code_name);
3365 debug_generic_stmt (expr);
3366 return true;
3369 if (!require_lvalue
3370 && (is_gimple_reg (expr) || is_gimple_min_invariant (expr)))
3371 return false;
3373 if (TREE_CODE (expr) != SSA_NAME && is_gimple_id (expr))
3374 return false;
3376 if (TREE_CODE (expr) != TARGET_MEM_REF
3377 && TREE_CODE (expr) != MEM_REF)
3379 error ("invalid expression for min lvalue");
3380 return true;
3383 return false;
3386 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3387 list of pointer-to types that is trivially convertible to DEST. */
3389 static bool
3390 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3392 tree src;
3394 if (!TYPE_POINTER_TO (src_obj))
3395 return true;
3397 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3398 if (useless_type_conversion_p (dest, src))
3399 return true;
3401 return false;
3404 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3405 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3407 static bool
3408 valid_fixed_convert_types_p (tree type1, tree type2)
3410 return (FIXED_POINT_TYPE_P (type1)
3411 && (INTEGRAL_TYPE_P (type2)
3412 || SCALAR_FLOAT_TYPE_P (type2)
3413 || FIXED_POINT_TYPE_P (type2)));
3416 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3417 is a problem, otherwise false. */
3419 static bool
3420 verify_gimple_call (gcall *stmt)
3422 tree fn = gimple_call_fn (stmt);
3423 tree fntype, fndecl;
3424 unsigned i;
3426 if (gimple_call_internal_p (stmt))
3428 if (fn)
3430 error ("gimple call has two targets");
3431 debug_generic_stmt (fn);
3432 return true;
3435 else
3437 if (!fn)
3439 error ("gimple call has no target");
3440 return true;
3444 if (fn && !is_gimple_call_addr (fn))
3446 error ("invalid function in gimple call");
3447 debug_generic_stmt (fn);
3448 return true;
3451 if (fn
3452 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3453 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3454 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3456 error ("non-function in gimple call");
3457 return true;
3460 fndecl = gimple_call_fndecl (stmt);
3461 if (fndecl
3462 && TREE_CODE (fndecl) == FUNCTION_DECL
3463 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3464 && !DECL_PURE_P (fndecl)
3465 && !TREE_READONLY (fndecl))
3467 error ("invalid pure const state for function");
3468 return true;
3471 tree lhs = gimple_call_lhs (stmt);
3472 if (lhs
3473 && (!is_gimple_reg (lhs)
3474 && (!is_gimple_lvalue (lhs)
3475 || verify_types_in_gimple_reference
3476 (TREE_CODE (lhs) == WITH_SIZE_EXPR
3477 ? TREE_OPERAND (lhs, 0) : lhs, true))))
3479 error ("invalid LHS in gimple call");
3480 return true;
3483 if (gimple_call_ctrl_altering_p (stmt)
3484 && gimple_call_noreturn_p (stmt)
3485 && should_remove_lhs_p (lhs))
3487 error ("LHS in %<noreturn%> call");
3488 return true;
3491 fntype = gimple_call_fntype (stmt);
3492 if (fntype
3493 && lhs
3494 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3495 /* ??? At least C++ misses conversions at assignments from
3496 void * call results.
3497 For now simply allow arbitrary pointer type conversions. */
3498 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3499 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3501 error ("invalid conversion in gimple call");
3502 debug_generic_stmt (TREE_TYPE (lhs));
3503 debug_generic_stmt (TREE_TYPE (fntype));
3504 return true;
3507 if (gimple_call_chain (stmt)
3508 && !is_gimple_val (gimple_call_chain (stmt)))
3510 error ("invalid static chain in gimple call");
3511 debug_generic_stmt (gimple_call_chain (stmt));
3512 return true;
3515 /* If there is a static chain argument, the call should either be
3516 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3517 if (gimple_call_chain (stmt)
3518 && fndecl
3519 && !DECL_STATIC_CHAIN (fndecl))
3521 error ("static chain with function that doesn%'t use one");
3522 return true;
3525 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3527 switch (DECL_FUNCTION_CODE (fndecl))
3529 case BUILT_IN_UNREACHABLE:
3530 case BUILT_IN_UNREACHABLE_TRAP:
3531 case BUILT_IN_TRAP:
3532 if (gimple_call_num_args (stmt) > 0)
3534 /* Built-in unreachable with parameters might not be caught by
3535 undefined behavior sanitizer. Front-ends do check users do not
3536 call them that way but we also produce calls to
3537 __builtin_unreachable internally, for example when IPA figures
3538 out a call cannot happen in a legal program. In such cases,
3539 we must make sure arguments are stripped off. */
3540 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3541 "with arguments");
3542 return true;
3544 break;
3545 default:
3546 break;
3550 /* For a call to .DEFERRED_INIT,
3551 LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL)
3552 we should guarantee that when the 1st argument is a constant, it should
3553 be the same as the size of the LHS. */
3555 if (gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
3557 tree size_of_arg0 = gimple_call_arg (stmt, 0);
3558 tree size_of_lhs = TYPE_SIZE_UNIT (TREE_TYPE (lhs));
3560 if (TREE_CODE (lhs) == SSA_NAME)
3561 lhs = SSA_NAME_VAR (lhs);
3563 poly_uint64 size_from_arg0, size_from_lhs;
3564 bool is_constant_size_arg0 = poly_int_tree_p (size_of_arg0,
3565 &size_from_arg0);
3566 bool is_constant_size_lhs = poly_int_tree_p (size_of_lhs,
3567 &size_from_lhs);
3568 if (is_constant_size_arg0 && is_constant_size_lhs)
3569 if (maybe_ne (size_from_arg0, size_from_lhs))
3571 error ("%<DEFERRED_INIT%> calls should have same "
3572 "constant size for the first argument and LHS");
3573 return true;
3577 /* ??? The C frontend passes unpromoted arguments in case it
3578 didn't see a function declaration before the call. So for now
3579 leave the call arguments mostly unverified. Once we gimplify
3580 unit-at-a-time we have a chance to fix this. */
3581 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3583 tree arg = gimple_call_arg (stmt, i);
3584 if ((is_gimple_reg_type (TREE_TYPE (arg))
3585 && !is_gimple_val (arg))
3586 || (!is_gimple_reg_type (TREE_TYPE (arg))
3587 && !is_gimple_lvalue (arg)))
3589 error ("invalid argument to gimple call");
3590 debug_generic_expr (arg);
3591 return true;
3593 if (!is_gimple_reg (arg))
3595 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
3596 arg = TREE_OPERAND (arg, 0);
3597 if (verify_types_in_gimple_reference (arg, false))
3598 return true;
3602 return false;
3605 /* Verifies the gimple comparison with the result type TYPE and
3606 the operands OP0 and OP1, comparison code is CODE. */
3608 static bool
3609 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3611 tree op0_type = TREE_TYPE (op0);
3612 tree op1_type = TREE_TYPE (op1);
3614 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3616 error ("invalid operands in gimple comparison");
3617 return true;
3620 /* For comparisons we do not have the operations type as the
3621 effective type the comparison is carried out in. Instead
3622 we require that either the first operand is trivially
3623 convertible into the second, or the other way around. */
3624 if (!useless_type_conversion_p (op0_type, op1_type)
3625 && !useless_type_conversion_p (op1_type, op0_type))
3627 error ("mismatching comparison operand types");
3628 debug_generic_expr (op0_type);
3629 debug_generic_expr (op1_type);
3630 return true;
3633 /* The resulting type of a comparison may be an effective boolean type. */
3634 if (INTEGRAL_TYPE_P (type)
3635 && (TREE_CODE (type) == BOOLEAN_TYPE
3636 || TYPE_PRECISION (type) == 1))
3638 if ((VECTOR_TYPE_P (op0_type)
3639 || VECTOR_TYPE_P (op1_type))
3640 && code != EQ_EXPR && code != NE_EXPR
3641 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3642 && !VECTOR_INTEGER_TYPE_P (op0_type))
3644 error ("unsupported operation or type for vector comparison"
3645 " returning a boolean");
3646 debug_generic_expr (op0_type);
3647 debug_generic_expr (op1_type);
3648 return true;
3651 /* Or a boolean vector type with the same element count
3652 as the comparison operand types. */
3653 else if (VECTOR_TYPE_P (type)
3654 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3656 if (TREE_CODE (op0_type) != VECTOR_TYPE
3657 || TREE_CODE (op1_type) != VECTOR_TYPE)
3659 error ("non-vector operands in vector comparison");
3660 debug_generic_expr (op0_type);
3661 debug_generic_expr (op1_type);
3662 return true;
3665 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3666 TYPE_VECTOR_SUBPARTS (op0_type)))
3668 error ("invalid vector comparison resulting type");
3669 debug_generic_expr (type);
3670 return true;
3673 else
3675 error ("bogus comparison result type");
3676 debug_generic_expr (type);
3677 return true;
3680 return false;
3683 /* Verify a gimple assignment statement STMT with an unary rhs.
3684 Returns true if anything is wrong. */
3686 static bool
3687 verify_gimple_assign_unary (gassign *stmt)
3689 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3690 tree lhs = gimple_assign_lhs (stmt);
3691 tree lhs_type = TREE_TYPE (lhs);
3692 tree rhs1 = gimple_assign_rhs1 (stmt);
3693 tree rhs1_type = TREE_TYPE (rhs1);
3695 if (!is_gimple_reg (lhs))
3697 error ("non-register as LHS of unary operation");
3698 return true;
3701 if (!is_gimple_val (rhs1))
3703 error ("invalid operand in unary operation");
3704 return true;
3707 const char* const code_name = get_tree_code_name (rhs_code);
3709 /* First handle conversions. */
3710 switch (rhs_code)
3712 CASE_CONVERT:
3714 /* Allow conversions between vectors with the same number of elements,
3715 provided that the conversion is OK for the element types too. */
3716 if (VECTOR_TYPE_P (lhs_type)
3717 && VECTOR_TYPE_P (rhs1_type)
3718 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3719 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3721 lhs_type = TREE_TYPE (lhs_type);
3722 rhs1_type = TREE_TYPE (rhs1_type);
3724 else if (VECTOR_TYPE_P (lhs_type) || VECTOR_TYPE_P (rhs1_type))
3726 error ("invalid vector types in nop conversion");
3727 debug_generic_expr (lhs_type);
3728 debug_generic_expr (rhs1_type);
3729 return true;
3732 /* Allow conversions from pointer type to integral type only if
3733 there is no sign or zero extension involved.
3734 For targets were the precision of ptrofftype doesn't match that
3735 of pointers we allow conversions to types where
3736 POINTERS_EXTEND_UNSIGNED specifies how that works. */
3737 if ((POINTER_TYPE_P (lhs_type)
3738 && INTEGRAL_TYPE_P (rhs1_type))
3739 || (POINTER_TYPE_P (rhs1_type)
3740 && INTEGRAL_TYPE_P (lhs_type)
3741 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3742 #if defined(POINTERS_EXTEND_UNSIGNED)
3743 || (TYPE_MODE (rhs1_type) == ptr_mode
3744 && (TYPE_PRECISION (lhs_type)
3745 == BITS_PER_WORD /* word_mode */
3746 || (TYPE_PRECISION (lhs_type)
3747 == GET_MODE_PRECISION (Pmode))))
3748 #endif
3750 return false;
3752 /* Allow conversion from integral to offset type and vice versa. */
3753 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3754 && INTEGRAL_TYPE_P (rhs1_type))
3755 || (INTEGRAL_TYPE_P (lhs_type)
3756 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3757 return false;
3759 /* Otherwise assert we are converting between types of the
3760 same kind. */
3761 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3763 error ("invalid types in nop conversion");
3764 debug_generic_expr (lhs_type);
3765 debug_generic_expr (rhs1_type);
3766 return true;
3769 return false;
3772 case ADDR_SPACE_CONVERT_EXPR:
3774 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3775 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3776 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3778 error ("invalid types in address space conversion");
3779 debug_generic_expr (lhs_type);
3780 debug_generic_expr (rhs1_type);
3781 return true;
3784 return false;
3787 case FIXED_CONVERT_EXPR:
3789 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3790 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3792 error ("invalid types in fixed-point conversion");
3793 debug_generic_expr (lhs_type);
3794 debug_generic_expr (rhs1_type);
3795 return true;
3798 return false;
3801 case FLOAT_EXPR:
3803 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3804 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3805 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3807 error ("invalid types in conversion to floating-point");
3808 debug_generic_expr (lhs_type);
3809 debug_generic_expr (rhs1_type);
3810 return true;
3813 return false;
3816 case FIX_TRUNC_EXPR:
3818 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3819 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3820 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3822 error ("invalid types in conversion to integer");
3823 debug_generic_expr (lhs_type);
3824 debug_generic_expr (rhs1_type);
3825 return true;
3828 return false;
3831 case VEC_UNPACK_HI_EXPR:
3832 case VEC_UNPACK_LO_EXPR:
3833 case VEC_UNPACK_FLOAT_HI_EXPR:
3834 case VEC_UNPACK_FLOAT_LO_EXPR:
3835 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3836 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3837 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3838 || TREE_CODE (lhs_type) != VECTOR_TYPE
3839 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3840 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3841 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3842 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3843 || ((rhs_code == VEC_UNPACK_HI_EXPR
3844 || rhs_code == VEC_UNPACK_LO_EXPR)
3845 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3846 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3847 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3848 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3849 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3850 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3851 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3852 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3853 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3854 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3855 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3856 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3857 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3858 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3859 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3860 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3862 error ("type mismatch in %qs expression", code_name);
3863 debug_generic_expr (lhs_type);
3864 debug_generic_expr (rhs1_type);
3865 return true;
3868 return false;
3870 case NEGATE_EXPR:
3871 case ABS_EXPR:
3872 case BIT_NOT_EXPR:
3873 case PAREN_EXPR:
3874 case CONJ_EXPR:
3875 /* Disallow pointer and offset types for many of the unary gimple. */
3876 if (POINTER_TYPE_P (lhs_type)
3877 || TREE_CODE (lhs_type) == OFFSET_TYPE)
3879 error ("invalid types for %qs", code_name);
3880 debug_generic_expr (lhs_type);
3881 debug_generic_expr (rhs1_type);
3882 return true;
3884 break;
3886 case ABSU_EXPR:
3887 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3888 || !TYPE_UNSIGNED (lhs_type)
3889 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3890 || TYPE_UNSIGNED (rhs1_type)
3891 || element_precision (lhs_type) != element_precision (rhs1_type))
3893 error ("invalid types for %qs", code_name);
3894 debug_generic_expr (lhs_type);
3895 debug_generic_expr (rhs1_type);
3896 return true;
3898 return false;
3900 case VEC_DUPLICATE_EXPR:
3901 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3902 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3904 error ("%qs should be from a scalar to a like vector", code_name);
3905 debug_generic_expr (lhs_type);
3906 debug_generic_expr (rhs1_type);
3907 return true;
3909 return false;
3911 default:
3912 gcc_unreachable ();
3915 /* For the remaining codes assert there is no conversion involved. */
3916 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3918 error ("non-trivial conversion in unary operation");
3919 debug_generic_expr (lhs_type);
3920 debug_generic_expr (rhs1_type);
3921 return true;
3924 return false;
3927 /* Verify a gimple assignment statement STMT with a binary rhs.
3928 Returns true if anything is wrong. */
3930 static bool
3931 verify_gimple_assign_binary (gassign *stmt)
3933 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3934 tree lhs = gimple_assign_lhs (stmt);
3935 tree lhs_type = TREE_TYPE (lhs);
3936 tree rhs1 = gimple_assign_rhs1 (stmt);
3937 tree rhs1_type = TREE_TYPE (rhs1);
3938 tree rhs2 = gimple_assign_rhs2 (stmt);
3939 tree rhs2_type = TREE_TYPE (rhs2);
3941 if (!is_gimple_reg (lhs))
3943 error ("non-register as LHS of binary operation");
3944 return true;
3947 if (!is_gimple_val (rhs1)
3948 || !is_gimple_val (rhs2))
3950 error ("invalid operands in binary operation");
3951 return true;
3954 const char* const code_name = get_tree_code_name (rhs_code);
3956 /* First handle operations that involve different types. */
3957 switch (rhs_code)
3959 case COMPLEX_EXPR:
3961 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3962 || !(INTEGRAL_TYPE_P (rhs1_type)
3963 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3964 || !(INTEGRAL_TYPE_P (rhs2_type)
3965 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3967 error ("type mismatch in %qs", code_name);
3968 debug_generic_expr (lhs_type);
3969 debug_generic_expr (rhs1_type);
3970 debug_generic_expr (rhs2_type);
3971 return true;
3974 return false;
3977 case LSHIFT_EXPR:
3978 case RSHIFT_EXPR:
3979 case LROTATE_EXPR:
3980 case RROTATE_EXPR:
3982 /* Shifts and rotates are ok on integral types, fixed point
3983 types and integer vector types. */
3984 if ((!INTEGRAL_TYPE_P (rhs1_type)
3985 && !FIXED_POINT_TYPE_P (rhs1_type)
3986 && ! (VECTOR_TYPE_P (rhs1_type)
3987 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3988 || (!INTEGRAL_TYPE_P (rhs2_type)
3989 /* Vector shifts of vectors are also ok. */
3990 && ! (VECTOR_TYPE_P (rhs1_type)
3991 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3992 && VECTOR_TYPE_P (rhs2_type)
3993 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3994 || !useless_type_conversion_p (lhs_type, rhs1_type))
3996 error ("type mismatch in %qs", code_name);
3997 debug_generic_expr (lhs_type);
3998 debug_generic_expr (rhs1_type);
3999 debug_generic_expr (rhs2_type);
4000 return true;
4003 return false;
4006 case WIDEN_LSHIFT_EXPR:
4008 if (!INTEGRAL_TYPE_P (lhs_type)
4009 || !INTEGRAL_TYPE_P (rhs1_type)
4010 || TREE_CODE (rhs2) != INTEGER_CST
4011 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
4013 error ("type mismatch in %qs", code_name);
4014 debug_generic_expr (lhs_type);
4015 debug_generic_expr (rhs1_type);
4016 debug_generic_expr (rhs2_type);
4017 return true;
4020 return false;
4023 case VEC_WIDEN_LSHIFT_HI_EXPR:
4024 case VEC_WIDEN_LSHIFT_LO_EXPR:
4026 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4027 || TREE_CODE (lhs_type) != VECTOR_TYPE
4028 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4029 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
4030 || TREE_CODE (rhs2) != INTEGER_CST
4031 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
4032 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
4034 error ("type mismatch in %qs", code_name);
4035 debug_generic_expr (lhs_type);
4036 debug_generic_expr (rhs1_type);
4037 debug_generic_expr (rhs2_type);
4038 return true;
4041 return false;
4044 case PLUS_EXPR:
4045 case MINUS_EXPR:
4047 tree lhs_etype = lhs_type;
4048 tree rhs1_etype = rhs1_type;
4049 tree rhs2_etype = rhs2_type;
4050 if (VECTOR_TYPE_P (lhs_type))
4052 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4053 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
4055 error ("invalid non-vector operands to %qs", code_name);
4056 return true;
4058 lhs_etype = TREE_TYPE (lhs_type);
4059 rhs1_etype = TREE_TYPE (rhs1_type);
4060 rhs2_etype = TREE_TYPE (rhs2_type);
4062 if (POINTER_TYPE_P (lhs_etype)
4063 || POINTER_TYPE_P (rhs1_etype)
4064 || POINTER_TYPE_P (rhs2_etype))
4066 error ("invalid (pointer) operands %qs", code_name);
4067 return true;
4070 /* Continue with generic binary expression handling. */
4071 break;
4074 case POINTER_PLUS_EXPR:
4076 if (!POINTER_TYPE_P (rhs1_type)
4077 || !useless_type_conversion_p (lhs_type, rhs1_type)
4078 || !ptrofftype_p (rhs2_type))
4080 error ("type mismatch in %qs", code_name);
4081 debug_generic_stmt (lhs_type);
4082 debug_generic_stmt (rhs1_type);
4083 debug_generic_stmt (rhs2_type);
4084 return true;
4087 return false;
4090 case POINTER_DIFF_EXPR:
4092 if (!POINTER_TYPE_P (rhs1_type)
4093 || !POINTER_TYPE_P (rhs2_type)
4094 /* Because we special-case pointers to void we allow difference
4095 of arbitrary pointers with the same mode. */
4096 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
4097 || !INTEGRAL_TYPE_P (lhs_type)
4098 || TYPE_UNSIGNED (lhs_type)
4099 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
4101 error ("type mismatch in %qs", code_name);
4102 debug_generic_stmt (lhs_type);
4103 debug_generic_stmt (rhs1_type);
4104 debug_generic_stmt (rhs2_type);
4105 return true;
4108 return false;
4111 case TRUTH_ANDIF_EXPR:
4112 case TRUTH_ORIF_EXPR:
4113 case TRUTH_AND_EXPR:
4114 case TRUTH_OR_EXPR:
4115 case TRUTH_XOR_EXPR:
4117 gcc_unreachable ();
4119 case LT_EXPR:
4120 case LE_EXPR:
4121 case GT_EXPR:
4122 case GE_EXPR:
4123 case EQ_EXPR:
4124 case NE_EXPR:
4125 case UNORDERED_EXPR:
4126 case ORDERED_EXPR:
4127 case UNLT_EXPR:
4128 case UNLE_EXPR:
4129 case UNGT_EXPR:
4130 case UNGE_EXPR:
4131 case UNEQ_EXPR:
4132 case LTGT_EXPR:
4133 /* Comparisons are also binary, but the result type is not
4134 connected to the operand types. */
4135 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4137 case WIDEN_MULT_EXPR:
4138 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4139 return true;
4140 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4141 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4143 case WIDEN_SUM_EXPR:
4145 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4146 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4147 && ((!INTEGRAL_TYPE_P (rhs1_type)
4148 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4149 || (!INTEGRAL_TYPE_P (lhs_type)
4150 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4151 || !useless_type_conversion_p (lhs_type, rhs2_type)
4152 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
4153 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4155 error ("type mismatch in %qs", code_name);
4156 debug_generic_expr (lhs_type);
4157 debug_generic_expr (rhs1_type);
4158 debug_generic_expr (rhs2_type);
4159 return true;
4161 return false;
4164 case VEC_WIDEN_MULT_HI_EXPR:
4165 case VEC_WIDEN_MULT_LO_EXPR:
4166 case VEC_WIDEN_MULT_EVEN_EXPR:
4167 case VEC_WIDEN_MULT_ODD_EXPR:
4169 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4170 || TREE_CODE (lhs_type) != VECTOR_TYPE
4171 || !types_compatible_p (rhs1_type, rhs2_type)
4172 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
4173 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4175 error ("type mismatch in %qs", code_name);
4176 debug_generic_expr (lhs_type);
4177 debug_generic_expr (rhs1_type);
4178 debug_generic_expr (rhs2_type);
4179 return true;
4181 return false;
4184 case VEC_PACK_TRUNC_EXPR:
4185 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4186 vector boolean types. */
4187 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4188 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4189 && types_compatible_p (rhs1_type, rhs2_type)
4190 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
4191 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4192 return false;
4194 /* Fallthru. */
4195 case VEC_PACK_SAT_EXPR:
4196 case VEC_PACK_FIX_TRUNC_EXPR:
4198 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4199 || TREE_CODE (lhs_type) != VECTOR_TYPE
4200 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4201 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4202 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4203 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4204 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4205 || !types_compatible_p (rhs1_type, rhs2_type)
4206 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4207 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4208 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4209 TYPE_VECTOR_SUBPARTS (lhs_type)))
4211 error ("type mismatch in %qs", code_name);
4212 debug_generic_expr (lhs_type);
4213 debug_generic_expr (rhs1_type);
4214 debug_generic_expr (rhs2_type);
4215 return true;
4218 return false;
4221 case VEC_PACK_FLOAT_EXPR:
4222 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4223 || TREE_CODE (lhs_type) != VECTOR_TYPE
4224 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4225 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4226 || !types_compatible_p (rhs1_type, rhs2_type)
4227 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4228 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4229 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4230 TYPE_VECTOR_SUBPARTS (lhs_type)))
4232 error ("type mismatch in %qs", code_name);
4233 debug_generic_expr (lhs_type);
4234 debug_generic_expr (rhs1_type);
4235 debug_generic_expr (rhs2_type);
4236 return true;
4239 return false;
4241 case MULT_EXPR:
4242 case MULT_HIGHPART_EXPR:
4243 case TRUNC_DIV_EXPR:
4244 case CEIL_DIV_EXPR:
4245 case FLOOR_DIV_EXPR:
4246 case ROUND_DIV_EXPR:
4247 case TRUNC_MOD_EXPR:
4248 case CEIL_MOD_EXPR:
4249 case FLOOR_MOD_EXPR:
4250 case ROUND_MOD_EXPR:
4251 case RDIV_EXPR:
4252 case EXACT_DIV_EXPR:
4253 case BIT_IOR_EXPR:
4254 case BIT_XOR_EXPR:
4255 /* Disallow pointer and offset types for many of the binary gimple. */
4256 if (POINTER_TYPE_P (lhs_type)
4257 || TREE_CODE (lhs_type) == OFFSET_TYPE)
4259 error ("invalid types for %qs", code_name);
4260 debug_generic_expr (lhs_type);
4261 debug_generic_expr (rhs1_type);
4262 debug_generic_expr (rhs2_type);
4263 return true;
4265 /* Continue with generic binary expression handling. */
4266 break;
4268 case MIN_EXPR:
4269 case MAX_EXPR:
4270 /* Continue with generic binary expression handling. */
4271 break;
4273 case BIT_AND_EXPR:
4274 if (POINTER_TYPE_P (lhs_type)
4275 && TREE_CODE (rhs2) == INTEGER_CST)
4276 break;
4277 /* Disallow pointer and offset types for many of the binary gimple. */
4278 if (POINTER_TYPE_P (lhs_type)
4279 || TREE_CODE (lhs_type) == OFFSET_TYPE)
4281 error ("invalid types for %qs", code_name);
4282 debug_generic_expr (lhs_type);
4283 debug_generic_expr (rhs1_type);
4284 debug_generic_expr (rhs2_type);
4285 return true;
4287 /* Continue with generic binary expression handling. */
4288 break;
4290 case VEC_SERIES_EXPR:
4291 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4293 error ("type mismatch in %qs", code_name);
4294 debug_generic_expr (rhs1_type);
4295 debug_generic_expr (rhs2_type);
4296 return true;
4298 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4299 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4301 error ("vector type expected in %qs", code_name);
4302 debug_generic_expr (lhs_type);
4303 return true;
4305 return false;
4307 default:
4308 gcc_unreachable ();
4311 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4312 || !useless_type_conversion_p (lhs_type, rhs2_type))
4314 error ("type mismatch in binary expression");
4315 debug_generic_stmt (lhs_type);
4316 debug_generic_stmt (rhs1_type);
4317 debug_generic_stmt (rhs2_type);
4318 return true;
4321 return false;
4324 /* Verify a gimple assignment statement STMT with a ternary rhs.
4325 Returns true if anything is wrong. */
4327 static bool
4328 verify_gimple_assign_ternary (gassign *stmt)
4330 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4331 tree lhs = gimple_assign_lhs (stmt);
4332 tree lhs_type = TREE_TYPE (lhs);
4333 tree rhs1 = gimple_assign_rhs1 (stmt);
4334 tree rhs1_type = TREE_TYPE (rhs1);
4335 tree rhs2 = gimple_assign_rhs2 (stmt);
4336 tree rhs2_type = TREE_TYPE (rhs2);
4337 tree rhs3 = gimple_assign_rhs3 (stmt);
4338 tree rhs3_type = TREE_TYPE (rhs3);
4340 if (!is_gimple_reg (lhs))
4342 error ("non-register as LHS of ternary operation");
4343 return true;
4346 if (!is_gimple_val (rhs1)
4347 || !is_gimple_val (rhs2)
4348 || !is_gimple_val (rhs3))
4350 error ("invalid operands in ternary operation");
4351 return true;
4354 const char* const code_name = get_tree_code_name (rhs_code);
4356 /* First handle operations that involve different types. */
4357 switch (rhs_code)
4359 case WIDEN_MULT_PLUS_EXPR:
4360 case WIDEN_MULT_MINUS_EXPR:
4361 if ((!INTEGRAL_TYPE_P (rhs1_type)
4362 && !FIXED_POINT_TYPE_P (rhs1_type))
4363 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4364 || !useless_type_conversion_p (lhs_type, rhs3_type)
4365 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4366 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4368 error ("type mismatch in %qs", code_name);
4369 debug_generic_expr (lhs_type);
4370 debug_generic_expr (rhs1_type);
4371 debug_generic_expr (rhs2_type);
4372 debug_generic_expr (rhs3_type);
4373 return true;
4375 break;
4377 case VEC_COND_EXPR:
4378 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4379 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4380 TYPE_VECTOR_SUBPARTS (lhs_type)))
4382 error ("the first argument of a %qs must be of a "
4383 "boolean vector type of the same number of elements "
4384 "as the result", code_name);
4385 debug_generic_expr (lhs_type);
4386 debug_generic_expr (rhs1_type);
4387 return true;
4389 /* Fallthrough. */
4390 case COND_EXPR:
4391 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4392 || !useless_type_conversion_p (lhs_type, rhs3_type))
4394 error ("type mismatch in %qs", code_name);
4395 debug_generic_expr (lhs_type);
4396 debug_generic_expr (rhs2_type);
4397 debug_generic_expr (rhs3_type);
4398 return true;
4400 break;
4402 case VEC_PERM_EXPR:
4403 /* If permute is constant, then we allow for lhs and rhs
4404 to have different vector types, provided:
4405 (1) lhs, rhs1, rhs2 have same element type.
4406 (2) rhs3 vector is constant and has integer element type.
4407 (3) len(lhs) == len(rhs3) && len(rhs1) == len(rhs2). */
4409 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4410 || TREE_CODE (rhs1_type) != VECTOR_TYPE
4411 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4412 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4414 error ("vector types expected in %qs", code_name);
4415 debug_generic_expr (lhs_type);
4416 debug_generic_expr (rhs1_type);
4417 debug_generic_expr (rhs2_type);
4418 debug_generic_expr (rhs3_type);
4419 return true;
4422 /* If rhs3 is constant, we allow lhs, rhs1 and rhs2 to be different vector types,
4423 as long as lhs, rhs1 and rhs2 have same element type. */
4424 if (TREE_CONSTANT (rhs3)
4425 ? (!useless_type_conversion_p (TREE_TYPE (lhs_type), TREE_TYPE (rhs1_type))
4426 || !useless_type_conversion_p (TREE_TYPE (lhs_type), TREE_TYPE (rhs2_type)))
4427 : (!useless_type_conversion_p (lhs_type, rhs1_type)
4428 || !useless_type_conversion_p (lhs_type, rhs2_type)))
4430 error ("type mismatch in %qs", code_name);
4431 debug_generic_expr (lhs_type);
4432 debug_generic_expr (rhs1_type);
4433 debug_generic_expr (rhs2_type);
4434 debug_generic_expr (rhs3_type);
4435 return true;
4438 /* If rhs3 is constant, relax the check len(rhs2) == len(rhs3). */
4439 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4440 TYPE_VECTOR_SUBPARTS (rhs2_type))
4441 || (!TREE_CONSTANT(rhs3)
4442 && maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4443 TYPE_VECTOR_SUBPARTS (rhs3_type)))
4444 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4445 TYPE_VECTOR_SUBPARTS (lhs_type)))
4447 error ("vectors with different element number found in %qs",
4448 code_name);
4449 debug_generic_expr (lhs_type);
4450 debug_generic_expr (rhs1_type);
4451 debug_generic_expr (rhs2_type);
4452 debug_generic_expr (rhs3_type);
4453 return true;
4456 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4457 || (TREE_CODE (rhs3) != VECTOR_CST
4458 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4459 (TREE_TYPE (rhs3_type)))
4460 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4461 (TREE_TYPE (rhs1_type))))))
4463 error ("invalid mask type in %qs", code_name);
4464 debug_generic_expr (lhs_type);
4465 debug_generic_expr (rhs1_type);
4466 debug_generic_expr (rhs2_type);
4467 debug_generic_expr (rhs3_type);
4468 return true;
4471 return false;
4473 case SAD_EXPR:
4474 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4475 || !useless_type_conversion_p (lhs_type, rhs3_type)
4476 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4477 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4479 error ("type mismatch in %qs", code_name);
4480 debug_generic_expr (lhs_type);
4481 debug_generic_expr (rhs1_type);
4482 debug_generic_expr (rhs2_type);
4483 debug_generic_expr (rhs3_type);
4484 return true;
4487 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4488 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4489 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4491 error ("vector types expected in %qs", code_name);
4492 debug_generic_expr (lhs_type);
4493 debug_generic_expr (rhs1_type);
4494 debug_generic_expr (rhs2_type);
4495 debug_generic_expr (rhs3_type);
4496 return true;
4499 return false;
4501 case BIT_INSERT_EXPR:
4502 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4504 error ("type mismatch in %qs", code_name);
4505 debug_generic_expr (lhs_type);
4506 debug_generic_expr (rhs1_type);
4507 return true;
4509 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4510 && INTEGRAL_TYPE_P (rhs2_type))
4511 /* Vector element insert. */
4512 || (VECTOR_TYPE_P (rhs1_type)
4513 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))
4514 /* Aligned sub-vector insert. */
4515 || (VECTOR_TYPE_P (rhs1_type)
4516 && VECTOR_TYPE_P (rhs2_type)
4517 && types_compatible_p (TREE_TYPE (rhs1_type),
4518 TREE_TYPE (rhs2_type))
4519 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type),
4520 TYPE_VECTOR_SUBPARTS (rhs2_type))
4521 && multiple_p (wi::to_poly_offset (rhs3),
4522 wi::to_poly_offset (TYPE_SIZE (rhs2_type))))))
4524 error ("not allowed type combination in %qs", code_name);
4525 debug_generic_expr (rhs1_type);
4526 debug_generic_expr (rhs2_type);
4527 return true;
4529 if (! tree_fits_uhwi_p (rhs3)
4530 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4531 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4533 error ("invalid position or size in %qs", code_name);
4534 return true;
4536 if (INTEGRAL_TYPE_P (rhs1_type)
4537 && !type_has_mode_precision_p (rhs1_type))
4539 error ("%qs into non-mode-precision operand", code_name);
4540 return true;
4542 if (INTEGRAL_TYPE_P (rhs1_type))
4544 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4545 if (bitpos >= TYPE_PRECISION (rhs1_type)
4546 || (bitpos + TYPE_PRECISION (rhs2_type)
4547 > TYPE_PRECISION (rhs1_type)))
4549 error ("insertion out of range in %qs", code_name);
4550 return true;
4553 else if (VECTOR_TYPE_P (rhs1_type))
4555 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4556 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4557 if (bitpos % bitsize != 0)
4559 error ("%qs not at element boundary", code_name);
4560 return true;
4563 return false;
4565 case DOT_PROD_EXPR:
4567 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4568 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4569 && ((!INTEGRAL_TYPE_P (rhs1_type)
4570 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4571 || (!INTEGRAL_TYPE_P (lhs_type)
4572 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4573 /* rhs1_type and rhs2_type may differ in sign. */
4574 || !tree_nop_conversion_p (rhs1_type, rhs2_type)
4575 || !useless_type_conversion_p (lhs_type, rhs3_type)
4576 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4577 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4579 error ("type mismatch in %qs", code_name);
4580 debug_generic_expr (lhs_type);
4581 debug_generic_expr (rhs1_type);
4582 debug_generic_expr (rhs2_type);
4583 return true;
4585 return false;
4588 case REALIGN_LOAD_EXPR:
4589 /* FIXME. */
4590 return false;
4592 default:
4593 gcc_unreachable ();
4595 return false;
4598 /* Verify a gimple assignment statement STMT with a single rhs.
4599 Returns true if anything is wrong. */
4601 static bool
4602 verify_gimple_assign_single (gassign *stmt)
4604 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4605 tree lhs = gimple_assign_lhs (stmt);
4606 tree lhs_type = TREE_TYPE (lhs);
4607 tree rhs1 = gimple_assign_rhs1 (stmt);
4608 tree rhs1_type = TREE_TYPE (rhs1);
4609 bool res = false;
4611 const char* const code_name = get_tree_code_name (rhs_code);
4613 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4615 error ("non-trivial conversion in %qs", code_name);
4616 debug_generic_expr (lhs_type);
4617 debug_generic_expr (rhs1_type);
4618 return true;
4621 if (gimple_clobber_p (stmt)
4622 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4624 error ("%qs LHS in clobber statement",
4625 get_tree_code_name (TREE_CODE (lhs)));
4626 debug_generic_expr (lhs);
4627 return true;
4630 if (TREE_CODE (lhs) == WITH_SIZE_EXPR)
4632 error ("%qs LHS in assignment statement",
4633 get_tree_code_name (TREE_CODE (lhs)));
4634 debug_generic_expr (lhs);
4635 return true;
4638 if (handled_component_p (lhs)
4639 || TREE_CODE (lhs) == MEM_REF
4640 || TREE_CODE (lhs) == TARGET_MEM_REF)
4641 res |= verify_types_in_gimple_reference (lhs, true);
4643 /* Special codes we cannot handle via their class. */
4644 switch (rhs_code)
4646 case ADDR_EXPR:
4648 tree op = TREE_OPERAND (rhs1, 0);
4649 if (!is_gimple_addressable (op))
4651 error ("invalid operand in %qs", code_name);
4652 return true;
4655 /* Technically there is no longer a need for matching types, but
4656 gimple hygiene asks for this check. In LTO we can end up
4657 combining incompatible units and thus end up with addresses
4658 of globals that change their type to a common one. */
4659 if (!in_lto_p
4660 && !types_compatible_p (TREE_TYPE (op),
4661 TREE_TYPE (TREE_TYPE (rhs1)))
4662 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4663 TREE_TYPE (op)))
4665 error ("type mismatch in %qs", code_name);
4666 debug_generic_stmt (TREE_TYPE (rhs1));
4667 debug_generic_stmt (TREE_TYPE (op));
4668 return true;
4671 return (verify_address (rhs1, true)
4672 || verify_types_in_gimple_reference (op, true));
4675 /* tcc_reference */
4676 case INDIRECT_REF:
4677 error ("%qs in gimple IL", code_name);
4678 return true;
4680 case WITH_SIZE_EXPR:
4681 if (!is_gimple_val (TREE_OPERAND (rhs1, 1)))
4683 error ("invalid %qs size argument in load", code_name);
4684 debug_generic_stmt (lhs);
4685 debug_generic_stmt (rhs1);
4686 return true;
4688 rhs1 = TREE_OPERAND (rhs1, 0);
4689 /* Fallthru. */
4690 case COMPONENT_REF:
4691 case BIT_FIELD_REF:
4692 case ARRAY_REF:
4693 case ARRAY_RANGE_REF:
4694 case VIEW_CONVERT_EXPR:
4695 case REALPART_EXPR:
4696 case IMAGPART_EXPR:
4697 case TARGET_MEM_REF:
4698 case MEM_REF:
4699 if (!is_gimple_reg (lhs)
4700 && is_gimple_reg_type (TREE_TYPE (lhs)))
4702 error ("invalid RHS for gimple memory store: %qs", code_name);
4703 debug_generic_stmt (lhs);
4704 debug_generic_stmt (rhs1);
4705 return true;
4707 return res || verify_types_in_gimple_reference (rhs1, false);
4709 /* tcc_constant */
4710 case SSA_NAME:
4711 case INTEGER_CST:
4712 case REAL_CST:
4713 case FIXED_CST:
4714 case COMPLEX_CST:
4715 case VECTOR_CST:
4716 case STRING_CST:
4717 return res;
4719 /* tcc_declaration */
4720 case CONST_DECL:
4721 return res;
4722 case VAR_DECL:
4723 case PARM_DECL:
4724 if (!is_gimple_reg (lhs)
4725 && !is_gimple_reg (rhs1)
4726 && is_gimple_reg_type (TREE_TYPE (lhs)))
4728 error ("invalid RHS for gimple memory store: %qs", code_name);
4729 debug_generic_stmt (lhs);
4730 debug_generic_stmt (rhs1);
4731 return true;
4733 return res;
4735 case CONSTRUCTOR:
4736 if (VECTOR_TYPE_P (rhs1_type))
4738 unsigned int i;
4739 tree elt_i, elt_v, elt_t = NULL_TREE;
4741 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4742 return res;
4743 /* For vector CONSTRUCTORs we require that either it is empty
4744 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4745 (then the element count must be correct to cover the whole
4746 outer vector and index must be NULL on all elements, or it is
4747 a CONSTRUCTOR of scalar elements, where we as an exception allow
4748 smaller number of elements (assuming zero filling) and
4749 consecutive indexes as compared to NULL indexes (such
4750 CONSTRUCTORs can appear in the IL from FEs). */
4751 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4753 if (elt_t == NULL_TREE)
4755 elt_t = TREE_TYPE (elt_v);
4756 if (VECTOR_TYPE_P (elt_t))
4758 tree elt_t = TREE_TYPE (elt_v);
4759 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4760 TREE_TYPE (elt_t)))
4762 error ("incorrect type of vector %qs elements",
4763 code_name);
4764 debug_generic_stmt (rhs1);
4765 return true;
4767 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4768 * TYPE_VECTOR_SUBPARTS (elt_t),
4769 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4771 error ("incorrect number of vector %qs elements",
4772 code_name);
4773 debug_generic_stmt (rhs1);
4774 return true;
4777 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4778 elt_t))
4780 error ("incorrect type of vector %qs elements",
4781 code_name);
4782 debug_generic_stmt (rhs1);
4783 return true;
4785 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4786 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4788 error ("incorrect number of vector %qs elements",
4789 code_name);
4790 debug_generic_stmt (rhs1);
4791 return true;
4794 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4796 error ("incorrect type of vector CONSTRUCTOR elements");
4797 debug_generic_stmt (rhs1);
4798 return true;
4800 if (elt_i != NULL_TREE
4801 && (VECTOR_TYPE_P (elt_t)
4802 || TREE_CODE (elt_i) != INTEGER_CST
4803 || compare_tree_int (elt_i, i) != 0))
4805 error ("vector %qs with non-NULL element index",
4806 code_name);
4807 debug_generic_stmt (rhs1);
4808 return true;
4810 if (!is_gimple_val (elt_v))
4812 error ("vector %qs element is not a GIMPLE value",
4813 code_name);
4814 debug_generic_stmt (rhs1);
4815 return true;
4819 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4821 error ("non-vector %qs with elements", code_name);
4822 debug_generic_stmt (rhs1);
4823 return true;
4825 return res;
4827 case OBJ_TYPE_REF:
4828 /* FIXME. */
4829 return res;
4831 default:;
4834 return res;
4837 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4838 is a problem, otherwise false. */
4840 static bool
4841 verify_gimple_assign (gassign *stmt)
4843 if (gimple_assign_nontemporal_move_p (stmt))
4845 tree lhs = gimple_assign_lhs (stmt);
4846 if (is_gimple_reg (lhs))
4848 error ("nontemporal store lhs cannot be a gimple register");
4849 debug_generic_stmt (lhs);
4850 return true;
4854 switch (gimple_assign_rhs_class (stmt))
4856 case GIMPLE_SINGLE_RHS:
4857 return verify_gimple_assign_single (stmt);
4859 case GIMPLE_UNARY_RHS:
4860 return verify_gimple_assign_unary (stmt);
4862 case GIMPLE_BINARY_RHS:
4863 return verify_gimple_assign_binary (stmt);
4865 case GIMPLE_TERNARY_RHS:
4866 return verify_gimple_assign_ternary (stmt);
4868 default:
4869 gcc_unreachable ();
4873 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4874 is a problem, otherwise false. */
4876 static bool
4877 verify_gimple_return (greturn *stmt)
4879 tree op = gimple_return_retval (stmt);
4880 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4882 /* We cannot test for present return values as we do not fix up missing
4883 return values from the original source. */
4884 if (op == NULL)
4885 return false;
4887 if (!is_gimple_val (op)
4888 && TREE_CODE (op) != RESULT_DECL)
4890 error ("invalid operand in return statement");
4891 debug_generic_stmt (op);
4892 return true;
4895 if ((TREE_CODE (op) == RESULT_DECL
4896 && DECL_BY_REFERENCE (op))
4897 || (TREE_CODE (op) == SSA_NAME
4898 && SSA_NAME_VAR (op)
4899 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4900 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4901 op = TREE_TYPE (op);
4903 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4905 error ("invalid conversion in return statement");
4906 debug_generic_stmt (restype);
4907 debug_generic_stmt (TREE_TYPE (op));
4908 return true;
4911 return false;
4915 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4916 is a problem, otherwise false. */
4918 static bool
4919 verify_gimple_goto (ggoto *stmt)
4921 tree dest = gimple_goto_dest (stmt);
4923 /* ??? We have two canonical forms of direct goto destinations, a
4924 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4925 if (TREE_CODE (dest) != LABEL_DECL
4926 && (!is_gimple_val (dest)
4927 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4929 error ("goto destination is neither a label nor a pointer");
4930 return true;
4933 return false;
4936 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4937 is a problem, otherwise false. */
4939 static bool
4940 verify_gimple_switch (gswitch *stmt)
4942 unsigned int i, n;
4943 tree elt, prev_upper_bound = NULL_TREE;
4944 tree index_type, elt_type = NULL_TREE;
4946 if (!is_gimple_val (gimple_switch_index (stmt)))
4948 error ("invalid operand to switch statement");
4949 debug_generic_stmt (gimple_switch_index (stmt));
4950 return true;
4953 index_type = TREE_TYPE (gimple_switch_index (stmt));
4954 if (! INTEGRAL_TYPE_P (index_type))
4956 error ("non-integral type switch statement");
4957 debug_generic_expr (index_type);
4958 return true;
4961 elt = gimple_switch_label (stmt, 0);
4962 if (CASE_LOW (elt) != NULL_TREE
4963 || CASE_HIGH (elt) != NULL_TREE
4964 || CASE_CHAIN (elt) != NULL_TREE)
4966 error ("invalid default case label in switch statement");
4967 debug_generic_expr (elt);
4968 return true;
4971 n = gimple_switch_num_labels (stmt);
4972 for (i = 1; i < n; i++)
4974 elt = gimple_switch_label (stmt, i);
4976 if (CASE_CHAIN (elt))
4978 error ("invalid %<CASE_CHAIN%>");
4979 debug_generic_expr (elt);
4980 return true;
4982 if (! CASE_LOW (elt))
4984 error ("invalid case label in switch statement");
4985 debug_generic_expr (elt);
4986 return true;
4988 if (CASE_HIGH (elt)
4989 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4991 error ("invalid case range in switch statement");
4992 debug_generic_expr (elt);
4993 return true;
4996 if (! elt_type)
4998 elt_type = TREE_TYPE (CASE_LOW (elt));
4999 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
5001 error ("type precision mismatch in switch statement");
5002 return true;
5005 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
5006 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
5008 error ("type mismatch for case label in switch statement");
5009 debug_generic_expr (elt);
5010 return true;
5013 if (prev_upper_bound)
5015 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
5017 error ("case labels not sorted in switch statement");
5018 return true;
5022 prev_upper_bound = CASE_HIGH (elt);
5023 if (! prev_upper_bound)
5024 prev_upper_bound = CASE_LOW (elt);
5027 return false;
5030 /* Verify a gimple debug statement STMT.
5031 Returns true if anything is wrong. */
5033 static bool
5034 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
5036 /* There isn't much that could be wrong in a gimple debug stmt. A
5037 gimple debug bind stmt, for example, maps a tree, that's usually
5038 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
5039 component or member of an aggregate type, to another tree, that
5040 can be an arbitrary expression. These stmts expand into debug
5041 insns, and are converted to debug notes by var-tracking.cc. */
5042 return false;
5045 /* Verify a gimple label statement STMT.
5046 Returns true if anything is wrong. */
5048 static bool
5049 verify_gimple_label (glabel *stmt)
5051 tree decl = gimple_label_label (stmt);
5052 int uid;
5053 bool err = false;
5055 if (TREE_CODE (decl) != LABEL_DECL)
5056 return true;
5057 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
5058 && DECL_CONTEXT (decl) != current_function_decl)
5060 error ("label context is not the current function declaration");
5061 err |= true;
5064 uid = LABEL_DECL_UID (decl);
5065 if (cfun->cfg
5066 && (uid == -1
5067 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
5069 error ("incorrect entry in %<label_to_block_map%>");
5070 err |= true;
5073 uid = EH_LANDING_PAD_NR (decl);
5074 if (uid)
5076 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
5077 if (decl != lp->post_landing_pad)
5079 error ("incorrect setting of landing pad number");
5080 err |= true;
5084 return err;
5087 /* Verify a gimple cond statement STMT.
5088 Returns true if anything is wrong. */
5090 static bool
5091 verify_gimple_cond (gcond *stmt)
5093 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
5095 error ("invalid comparison code in gimple cond");
5096 return true;
5098 if (!(!gimple_cond_true_label (stmt)
5099 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
5100 || !(!gimple_cond_false_label (stmt)
5101 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
5103 error ("invalid labels in gimple cond");
5104 return true;
5107 return verify_gimple_comparison (boolean_type_node,
5108 gimple_cond_lhs (stmt),
5109 gimple_cond_rhs (stmt),
5110 gimple_cond_code (stmt));
5113 /* Verify the GIMPLE statement STMT. Returns true if there is an
5114 error, otherwise false. */
5116 static bool
5117 verify_gimple_stmt (gimple *stmt)
5119 switch (gimple_code (stmt))
5121 case GIMPLE_ASSIGN:
5122 return verify_gimple_assign (as_a <gassign *> (stmt));
5124 case GIMPLE_LABEL:
5125 return verify_gimple_label (as_a <glabel *> (stmt));
5127 case GIMPLE_CALL:
5128 return verify_gimple_call (as_a <gcall *> (stmt));
5130 case GIMPLE_COND:
5131 return verify_gimple_cond (as_a <gcond *> (stmt));
5133 case GIMPLE_GOTO:
5134 return verify_gimple_goto (as_a <ggoto *> (stmt));
5136 case GIMPLE_SWITCH:
5137 return verify_gimple_switch (as_a <gswitch *> (stmt));
5139 case GIMPLE_RETURN:
5140 return verify_gimple_return (as_a <greturn *> (stmt));
5142 case GIMPLE_ASM:
5143 return false;
5145 case GIMPLE_TRANSACTION:
5146 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
5148 /* Tuples that do not have tree operands. */
5149 case GIMPLE_NOP:
5150 case GIMPLE_PREDICT:
5151 case GIMPLE_RESX:
5152 case GIMPLE_EH_DISPATCH:
5153 case GIMPLE_EH_MUST_NOT_THROW:
5154 return false;
5156 CASE_GIMPLE_OMP:
5157 /* OpenMP directives are validated by the FE and never operated
5158 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
5159 non-gimple expressions when the main index variable has had
5160 its address taken. This does not affect the loop itself
5161 because the header of an GIMPLE_OMP_FOR is merely used to determine
5162 how to setup the parallel iteration. */
5163 return false;
5165 case GIMPLE_ASSUME:
5166 return false;
5168 case GIMPLE_DEBUG:
5169 return verify_gimple_debug (stmt);
5171 default:
5172 gcc_unreachable ();
5176 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
5177 and false otherwise. */
5179 static bool
5180 verify_gimple_phi (gphi *phi)
5182 bool err = false;
5183 unsigned i;
5184 tree phi_result = gimple_phi_result (phi);
5185 bool virtual_p;
5187 if (!phi_result)
5189 error ("invalid %<PHI%> result");
5190 return true;
5193 virtual_p = virtual_operand_p (phi_result);
5194 if (TREE_CODE (phi_result) != SSA_NAME
5195 || (virtual_p
5196 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
5198 error ("invalid %<PHI%> result");
5199 err = true;
5202 for (i = 0; i < gimple_phi_num_args (phi); i++)
5204 tree t = gimple_phi_arg_def (phi, i);
5206 if (!t)
5208 error ("missing %<PHI%> def");
5209 err |= true;
5210 continue;
5212 /* Addressable variables do have SSA_NAMEs but they
5213 are not considered gimple values. */
5214 else if ((TREE_CODE (t) == SSA_NAME
5215 && virtual_p != virtual_operand_p (t))
5216 || (virtual_p
5217 && (TREE_CODE (t) != SSA_NAME
5218 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
5219 || (!virtual_p
5220 && !is_gimple_val (t)))
5222 error ("invalid %<PHI%> argument");
5223 debug_generic_expr (t);
5224 err |= true;
5226 #ifdef ENABLE_TYPES_CHECKING
5227 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
5229 error ("incompatible types in %<PHI%> argument %u", i);
5230 debug_generic_stmt (TREE_TYPE (phi_result));
5231 debug_generic_stmt (TREE_TYPE (t));
5232 err |= true;
5234 #endif
5237 return err;
5240 /* Verify the GIMPLE statements inside the sequence STMTS. */
5242 static bool
5243 verify_gimple_in_seq_2 (gimple_seq stmts)
5245 gimple_stmt_iterator ittr;
5246 bool err = false;
5248 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5250 gimple *stmt = gsi_stmt (ittr);
5252 switch (gimple_code (stmt))
5254 case GIMPLE_BIND:
5255 err |= verify_gimple_in_seq_2 (
5256 gimple_bind_body (as_a <gbind *> (stmt)));
5257 break;
5259 case GIMPLE_TRY:
5260 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5261 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5262 break;
5264 case GIMPLE_EH_FILTER:
5265 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5266 break;
5268 case GIMPLE_EH_ELSE:
5270 geh_else *eh_else = as_a <geh_else *> (stmt);
5271 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5272 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5274 break;
5276 case GIMPLE_CATCH:
5277 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5278 as_a <gcatch *> (stmt)));
5279 break;
5281 case GIMPLE_ASSUME:
5282 err |= verify_gimple_in_seq_2 (gimple_assume_body (stmt));
5283 break;
5285 case GIMPLE_TRANSACTION:
5286 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5287 break;
5289 default:
5291 bool err2 = verify_gimple_stmt (stmt);
5292 if (err2)
5293 debug_gimple_stmt (stmt);
5294 err |= err2;
5299 return err;
5302 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5303 is a problem, otherwise false. */
5305 static bool
5306 verify_gimple_transaction (gtransaction *stmt)
5308 tree lab;
5310 lab = gimple_transaction_label_norm (stmt);
5311 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5312 return true;
5313 lab = gimple_transaction_label_uninst (stmt);
5314 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5315 return true;
5316 lab = gimple_transaction_label_over (stmt);
5317 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5318 return true;
5320 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5324 /* Verify the GIMPLE statements inside the statement list STMTS. */
5326 DEBUG_FUNCTION bool
5327 verify_gimple_in_seq (gimple_seq stmts, bool ice)
5329 timevar_push (TV_TREE_STMT_VERIFY);
5330 bool res = verify_gimple_in_seq_2 (stmts);
5331 if (res && ice)
5332 internal_error ("%<verify_gimple%> failed");
5333 timevar_pop (TV_TREE_STMT_VERIFY);
5334 return res;
5337 /* Return true when the T can be shared. */
5339 static bool
5340 tree_node_can_be_shared (tree t)
5342 if (IS_TYPE_OR_DECL_P (t)
5343 || TREE_CODE (t) == SSA_NAME
5344 || TREE_CODE (t) == IDENTIFIER_NODE
5345 || TREE_CODE (t) == CASE_LABEL_EXPR
5346 || is_gimple_min_invariant (t))
5347 return true;
5349 if (t == error_mark_node)
5350 return true;
5352 return false;
5355 /* Called via walk_tree. Verify tree sharing. */
5357 static tree
5358 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5360 hash_set<void *> *visited = (hash_set<void *> *) data;
5362 if (tree_node_can_be_shared (*tp))
5364 *walk_subtrees = false;
5365 return NULL;
5368 if (visited->add (*tp))
5369 return *tp;
5371 return NULL;
5374 /* Called via walk_gimple_stmt. Verify tree sharing. */
5376 static tree
5377 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5379 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5380 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5383 static bool eh_error_found;
5384 bool
5385 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5386 hash_set<gimple *> *visited)
5388 if (!visited->contains (stmt))
5390 error ("dead statement in EH table");
5391 debug_gimple_stmt (stmt);
5392 eh_error_found = true;
5394 return true;
5397 /* Verify if the location LOCs block is in BLOCKS. */
5399 static bool
5400 verify_location (hash_set<tree> *blocks, location_t loc)
5402 tree block = LOCATION_BLOCK (loc);
5403 if (block != NULL_TREE
5404 && !blocks->contains (block))
5406 error ("location references block not in block tree");
5407 return true;
5409 if (block != NULL_TREE)
5410 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5411 return false;
5414 /* Called via walk_tree. Verify that expressions have no blocks. */
5416 static tree
5417 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5419 if (!EXPR_P (*tp))
5421 *walk_subtrees = false;
5422 return NULL;
5425 location_t loc = EXPR_LOCATION (*tp);
5426 if (LOCATION_BLOCK (loc) != NULL)
5427 return *tp;
5429 return NULL;
5432 /* Called via walk_tree. Verify locations of expressions. */
5434 static tree
5435 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5437 hash_set<tree> *blocks = (hash_set<tree> *) data;
5438 tree t = *tp;
5440 /* ??? This doesn't really belong here but there's no good place to
5441 stick this remainder of old verify_expr. */
5442 /* ??? This barfs on debug stmts which contain binds to vars with
5443 different function context. */
5444 #if 0
5445 if (VAR_P (t)
5446 || TREE_CODE (t) == PARM_DECL
5447 || TREE_CODE (t) == RESULT_DECL)
5449 tree context = decl_function_context (t);
5450 if (context != cfun->decl
5451 && !SCOPE_FILE_SCOPE_P (context)
5452 && !TREE_STATIC (t)
5453 && !DECL_EXTERNAL (t))
5455 error ("local declaration from a different function");
5456 return t;
5459 #endif
5461 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5463 tree x = DECL_DEBUG_EXPR (t);
5464 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5465 if (addr)
5466 return addr;
5468 if ((VAR_P (t)
5469 || TREE_CODE (t) == PARM_DECL
5470 || TREE_CODE (t) == RESULT_DECL)
5471 && DECL_HAS_VALUE_EXPR_P (t))
5473 tree x = DECL_VALUE_EXPR (t);
5474 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5475 if (addr)
5476 return addr;
5479 if (!EXPR_P (t))
5481 *walk_subtrees = false;
5482 return NULL;
5485 location_t loc = EXPR_LOCATION (t);
5486 if (verify_location (blocks, loc))
5487 return t;
5489 return NULL;
5492 /* Called via walk_gimple_op. Verify locations of expressions. */
5494 static tree
5495 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5497 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5498 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5501 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5503 static void
5504 collect_subblocks (hash_set<tree> *blocks, tree block)
5506 tree t;
5507 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5509 blocks->add (t);
5510 collect_subblocks (blocks, t);
5514 /* Disable warnings about missing quoting in GCC diagnostics for
5515 the verification errors. Their format strings don't follow
5516 GCC diagnostic conventions and trigger an ICE in the end. */
5517 #if __GNUC__ >= 10
5518 # pragma GCC diagnostic push
5519 # pragma GCC diagnostic ignored "-Wformat-diag"
5520 #endif
5522 /* Verify the GIMPLE statements in the CFG of FN. */
5524 DEBUG_FUNCTION bool
5525 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow, bool ice)
5527 basic_block bb;
5528 bool err = false;
5530 timevar_push (TV_TREE_STMT_VERIFY);
5531 hash_set<void *> visited;
5532 hash_set<gimple *> visited_throwing_stmts;
5534 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5535 hash_set<tree> blocks;
5536 if (DECL_INITIAL (fn->decl))
5538 blocks.add (DECL_INITIAL (fn->decl));
5539 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5542 FOR_EACH_BB_FN (bb, fn)
5544 gimple_stmt_iterator gsi;
5545 edge_iterator ei;
5546 edge e;
5548 for (gphi_iterator gpi = gsi_start_phis (bb);
5549 !gsi_end_p (gpi);
5550 gsi_next (&gpi))
5552 gphi *phi = gpi.phi ();
5553 bool err2 = false;
5554 unsigned i;
5556 if (gimple_bb (phi) != bb)
5558 error ("gimple_bb (phi) is set to a wrong basic block");
5559 err2 = true;
5562 err2 |= verify_gimple_phi (phi);
5564 /* Only PHI arguments have locations. */
5565 if (gimple_location (phi) != UNKNOWN_LOCATION)
5567 error ("PHI node with location");
5568 err2 = true;
5571 for (i = 0; i < gimple_phi_num_args (phi); i++)
5573 tree arg = gimple_phi_arg_def (phi, i);
5574 tree addr = walk_tree (&arg, verify_node_sharing_1,
5575 &visited, NULL);
5576 if (addr)
5578 error ("incorrect sharing of tree nodes");
5579 debug_generic_expr (addr);
5580 err2 |= true;
5582 location_t loc = gimple_phi_arg_location (phi, i);
5583 if (virtual_operand_p (gimple_phi_result (phi))
5584 && loc != UNKNOWN_LOCATION)
5586 error ("virtual PHI with argument locations");
5587 err2 = true;
5589 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5590 if (addr)
5592 debug_generic_expr (addr);
5593 err2 = true;
5595 err2 |= verify_location (&blocks, loc);
5598 if (err2)
5599 debug_gimple_stmt (phi);
5600 err |= err2;
5603 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5605 gimple *stmt = gsi_stmt (gsi);
5606 bool err2 = false;
5607 struct walk_stmt_info wi;
5608 tree addr;
5609 int lp_nr;
5611 if (gimple_bb (stmt) != bb)
5613 error ("gimple_bb (stmt) is set to a wrong basic block");
5614 err2 = true;
5617 err2 |= verify_gimple_stmt (stmt);
5618 err2 |= verify_location (&blocks, gimple_location (stmt));
5620 memset (&wi, 0, sizeof (wi));
5621 wi.info = (void *) &visited;
5622 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5623 if (addr)
5625 error ("incorrect sharing of tree nodes");
5626 debug_generic_expr (addr);
5627 err2 |= true;
5630 memset (&wi, 0, sizeof (wi));
5631 wi.info = (void *) &blocks;
5632 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5633 if (addr)
5635 debug_generic_expr (addr);
5636 err2 |= true;
5639 /* If the statement is marked as part of an EH region, then it is
5640 expected that the statement could throw. Verify that when we
5641 have optimizations that simplify statements such that we prove
5642 that they cannot throw, that we update other data structures
5643 to match. */
5644 lp_nr = lookup_stmt_eh_lp (stmt);
5645 if (lp_nr != 0)
5646 visited_throwing_stmts.add (stmt);
5647 if (lp_nr > 0)
5649 if (!stmt_could_throw_p (cfun, stmt))
5651 if (verify_nothrow)
5653 error ("statement marked for throw, but doesn%'t");
5654 err2 |= true;
5657 else if (!gsi_one_before_end_p (gsi))
5659 error ("statement marked for throw in middle of block");
5660 err2 |= true;
5664 if (err2)
5665 debug_gimple_stmt (stmt);
5666 err |= err2;
5669 FOR_EACH_EDGE (e, ei, bb->succs)
5670 if (e->goto_locus != UNKNOWN_LOCATION)
5671 err |= verify_location (&blocks, e->goto_locus);
5674 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5675 eh_error_found = false;
5676 if (eh_table)
5677 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5678 (&visited_throwing_stmts);
5680 if (ice && (err || eh_error_found))
5681 internal_error ("verify_gimple failed");
5683 verify_histograms ();
5684 timevar_pop (TV_TREE_STMT_VERIFY);
5686 return (err || eh_error_found);
5690 /* Verifies that the flow information is OK. */
5692 static bool
5693 gimple_verify_flow_info (void)
5695 bool err = false;
5696 basic_block bb;
5697 gimple_stmt_iterator gsi;
5698 gimple *stmt;
5699 edge e;
5700 edge_iterator ei;
5702 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5703 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5705 error ("ENTRY_BLOCK has IL associated with it");
5706 err = true;
5709 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5710 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5712 error ("EXIT_BLOCK has IL associated with it");
5713 err = true;
5716 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5717 if (e->flags & EDGE_FALLTHRU)
5719 error ("fallthru to exit from bb %d", e->src->index);
5720 err = true;
5722 if (cfun->cfg->full_profile
5723 && !ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.initialized_p ())
5725 error ("entry block count not initialized");
5726 err = true;
5728 if (cfun->cfg->full_profile
5729 && !EXIT_BLOCK_PTR_FOR_FN (cfun)->count.initialized_p ())
5731 error ("exit block count not initialized");
5732 err = true;
5734 if (cfun->cfg->full_profile
5735 && !single_succ_edge
5736 (ENTRY_BLOCK_PTR_FOR_FN (cfun))->probability.initialized_p ())
5738 error ("probability of edge from entry block not initialized");
5739 err = true;
5743 FOR_EACH_BB_FN (bb, cfun)
5745 bool found_ctrl_stmt = false;
5747 stmt = NULL;
5749 if (cfun->cfg->full_profile)
5751 if (!bb->count.initialized_p ())
5753 error ("count of bb %d not initialized", bb->index);
5754 err = true;
5756 FOR_EACH_EDGE (e, ei, bb->succs)
5757 if (!e->probability.initialized_p ())
5759 error ("probability of edge %d->%d not initialized",
5760 bb->index, e->dest->index);
5761 err = true;
5765 /* Skip labels on the start of basic block. */
5766 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5768 tree label;
5769 gimple *prev_stmt = stmt;
5771 stmt = gsi_stmt (gsi);
5773 if (gimple_code (stmt) != GIMPLE_LABEL)
5774 break;
5776 label = gimple_label_label (as_a <glabel *> (stmt));
5777 if (prev_stmt && DECL_NONLOCAL (label))
5779 error ("nonlocal label %qD is not first in a sequence "
5780 "of labels in bb %d", label, bb->index);
5781 err = true;
5784 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5786 error ("EH landing pad label %qD is not first in a sequence "
5787 "of labels in bb %d", label, bb->index);
5788 err = true;
5791 if (label_to_block (cfun, label) != bb)
5793 error ("label %qD to block does not match in bb %d",
5794 label, bb->index);
5795 err = true;
5798 if (decl_function_context (label) != current_function_decl)
5800 error ("label %qD has incorrect context in bb %d",
5801 label, bb->index);
5802 err = true;
5806 /* Verify that body of basic block BB is free of control flow. */
5807 bool seen_nondebug_stmt = false;
5808 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5810 gimple *stmt = gsi_stmt (gsi);
5812 /* Do NOT disregard debug stmts after found_ctrl_stmt. */
5813 if (found_ctrl_stmt)
5815 error ("control flow in the middle of basic block %d",
5816 bb->index);
5817 err = true;
5820 if (stmt_ends_bb_p (stmt))
5821 found_ctrl_stmt = true;
5823 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5825 error ("label %qD in the middle of basic block %d",
5826 gimple_label_label (label_stmt), bb->index);
5827 err = true;
5830 /* Check that no statements appear between a returns_twice call
5831 and its associated abnormal edge. */
5832 if (gimple_code (stmt) == GIMPLE_CALL
5833 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
5835 bool misplaced = false;
5836 /* TM is an exception: it points abnormal edges just after the
5837 call that starts a transaction, i.e. it must end the BB. */
5838 if (gimple_call_builtin_p (stmt, BUILT_IN_TM_START))
5840 if (single_succ_p (bb)
5841 && bb_has_abnormal_pred (single_succ (bb))
5842 && !gsi_one_nondebug_before_end_p (gsi))
5844 error ("returns_twice call is not last in basic block "
5845 "%d", bb->index);
5846 misplaced = true;
5849 else
5851 if (seen_nondebug_stmt && bb_has_abnormal_pred (bb))
5853 error ("returns_twice call is not first in basic block "
5854 "%d", bb->index);
5855 misplaced = true;
5858 if (misplaced)
5860 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
5861 err = true;
5864 if (!is_gimple_debug (stmt))
5865 seen_nondebug_stmt = true;
5868 gsi = gsi_last_nondebug_bb (bb);
5869 if (gsi_end_p (gsi))
5870 continue;
5872 stmt = gsi_stmt (gsi);
5874 if (gimple_code (stmt) == GIMPLE_LABEL)
5875 continue;
5877 if (verify_eh_edges (stmt))
5878 err = true;
5880 if (is_ctrl_stmt (stmt))
5882 FOR_EACH_EDGE (e, ei, bb->succs)
5883 if (e->flags & EDGE_FALLTHRU)
5885 error ("fallthru edge after a control statement in bb %d",
5886 bb->index);
5887 err = true;
5891 if (gimple_code (stmt) != GIMPLE_COND)
5893 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5894 after anything else but if statement. */
5895 FOR_EACH_EDGE (e, ei, bb->succs)
5896 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5898 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5899 bb->index);
5900 err = true;
5904 switch (gimple_code (stmt))
5906 case GIMPLE_COND:
5908 edge true_edge;
5909 edge false_edge;
5911 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5913 if (!true_edge
5914 || !false_edge
5915 || !(true_edge->flags & EDGE_TRUE_VALUE)
5916 || !(false_edge->flags & EDGE_FALSE_VALUE)
5917 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5918 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5919 || EDGE_COUNT (bb->succs) >= 3)
5921 error ("wrong outgoing edge flags at end of bb %d",
5922 bb->index);
5923 err = true;
5926 break;
5928 case GIMPLE_GOTO:
5929 if (simple_goto_p (stmt))
5931 error ("explicit goto at end of bb %d", bb->index);
5932 err = true;
5934 else
5936 /* FIXME. We should double check that the labels in the
5937 destination blocks have their address taken. */
5938 FOR_EACH_EDGE (e, ei, bb->succs)
5939 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5940 | EDGE_FALSE_VALUE))
5941 || !(e->flags & EDGE_ABNORMAL))
5943 error ("wrong outgoing edge flags at end of bb %d",
5944 bb->index);
5945 err = true;
5948 break;
5950 case GIMPLE_CALL:
5951 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5952 break;
5953 /* fallthru */
5954 case GIMPLE_RETURN:
5955 if (!single_succ_p (bb)
5956 || (single_succ_edge (bb)->flags
5957 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5958 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5960 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5961 err = true;
5963 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5965 error ("return edge does not point to exit in bb %d",
5966 bb->index);
5967 err = true;
5969 break;
5971 case GIMPLE_SWITCH:
5973 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5974 tree prev;
5975 edge e;
5976 size_t i, n;
5978 n = gimple_switch_num_labels (switch_stmt);
5980 /* Mark all the destination basic blocks. */
5981 for (i = 0; i < n; ++i)
5983 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5984 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5985 label_bb->aux = (void *)1;
5988 /* Verify that the case labels are sorted. */
5989 prev = gimple_switch_label (switch_stmt, 0);
5990 for (i = 1; i < n; ++i)
5992 tree c = gimple_switch_label (switch_stmt, i);
5993 if (!CASE_LOW (c))
5995 error ("found default case not at the start of "
5996 "case vector");
5997 err = true;
5998 continue;
6000 if (CASE_LOW (prev)
6001 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
6003 error ("case labels not sorted: ");
6004 print_generic_expr (stderr, prev);
6005 fprintf (stderr," is greater than ");
6006 print_generic_expr (stderr, c);
6007 fprintf (stderr," but comes before it.\n");
6008 err = true;
6010 prev = c;
6012 /* VRP will remove the default case if it can prove it will
6013 never be executed. So do not verify there always exists
6014 a default case here. */
6016 FOR_EACH_EDGE (e, ei, bb->succs)
6018 if (!e->dest->aux)
6020 error ("extra outgoing edge %d->%d",
6021 bb->index, e->dest->index);
6022 err = true;
6025 e->dest->aux = (void *)2;
6026 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
6027 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
6029 error ("wrong outgoing edge flags at end of bb %d",
6030 bb->index);
6031 err = true;
6035 /* Check that we have all of them. */
6036 for (i = 0; i < n; ++i)
6038 basic_block label_bb = gimple_switch_label_bb (cfun,
6039 switch_stmt, i);
6041 if (label_bb->aux != (void *)2)
6043 error ("missing edge %i->%i", bb->index, label_bb->index);
6044 err = true;
6048 FOR_EACH_EDGE (e, ei, bb->succs)
6049 e->dest->aux = (void *)0;
6051 break;
6053 case GIMPLE_EH_DISPATCH:
6054 if (verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt)))
6055 err = true;
6056 break;
6058 default:
6059 break;
6063 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
6064 verify_dominators (CDI_DOMINATORS);
6066 return err;
6069 #if __GNUC__ >= 10
6070 # pragma GCC diagnostic pop
6071 #endif
6073 /* Updates phi nodes after creating a forwarder block joined
6074 by edge FALLTHRU. */
6076 static void
6077 gimple_make_forwarder_block (edge fallthru)
6079 edge e;
6080 edge_iterator ei;
6081 basic_block dummy, bb;
6082 tree var;
6083 gphi_iterator gsi;
6084 bool forward_location_p;
6086 dummy = fallthru->src;
6087 bb = fallthru->dest;
6089 if (single_pred_p (bb))
6090 return;
6092 /* We can forward location info if we have only one predecessor. */
6093 forward_location_p = single_pred_p (dummy);
6095 /* If we redirected a branch we must create new PHI nodes at the
6096 start of BB. */
6097 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
6099 gphi *phi, *new_phi;
6101 phi = gsi.phi ();
6102 var = gimple_phi_result (phi);
6103 new_phi = create_phi_node (var, bb);
6104 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
6105 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
6106 forward_location_p
6107 ? gimple_phi_arg_location (phi, 0) : UNKNOWN_LOCATION);
6110 /* Add the arguments we have stored on edges. */
6111 FOR_EACH_EDGE (e, ei, bb->preds)
6113 if (e == fallthru)
6114 continue;
6116 flush_pending_stmts (e);
6121 /* Return a non-special label in the head of basic block BLOCK.
6122 Create one if it doesn't exist. */
6124 tree
6125 gimple_block_label (basic_block bb)
6127 gimple_stmt_iterator i, s = gsi_start_bb (bb);
6128 bool first = true;
6129 tree label;
6130 glabel *stmt;
6132 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
6134 stmt = dyn_cast <glabel *> (gsi_stmt (i));
6135 if (!stmt)
6136 break;
6137 label = gimple_label_label (stmt);
6138 if (!DECL_NONLOCAL (label))
6140 if (!first)
6141 gsi_move_before (&i, &s);
6142 return label;
6146 label = create_artificial_label (UNKNOWN_LOCATION);
6147 stmt = gimple_build_label (label);
6148 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
6149 return label;
6153 /* Attempt to perform edge redirection by replacing a possibly complex
6154 jump instruction by a goto or by removing the jump completely.
6155 This can apply only if all edges now point to the same block. The
6156 parameters and return values are equivalent to
6157 redirect_edge_and_branch. */
6159 static edge
6160 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
6162 basic_block src = e->src;
6163 gimple_stmt_iterator i;
6164 gimple *stmt;
6166 /* We can replace or remove a complex jump only when we have exactly
6167 two edges. */
6168 if (EDGE_COUNT (src->succs) != 2
6169 /* Verify that all targets will be TARGET. Specifically, the
6170 edge that is not E must also go to TARGET. */
6171 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
6172 return NULL;
6174 i = gsi_last_bb (src);
6175 if (gsi_end_p (i))
6176 return NULL;
6178 stmt = gsi_stmt (i);
6180 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
6182 gsi_remove (&i, true);
6183 e = ssa_redirect_edge (e, target);
6184 e->flags = EDGE_FALLTHRU;
6185 return e;
6188 return NULL;
6192 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
6193 edge representing the redirected branch. */
6195 static edge
6196 gimple_redirect_edge_and_branch (edge e, basic_block dest)
6198 basic_block bb = e->src;
6199 gimple_stmt_iterator gsi;
6200 edge ret;
6201 gimple *stmt;
6203 if (e->flags & EDGE_ABNORMAL)
6204 return NULL;
6206 if (e->dest == dest)
6207 return NULL;
6209 if (e->flags & EDGE_EH)
6210 return redirect_eh_edge (e, dest);
6212 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
6214 ret = gimple_try_redirect_by_replacing_jump (e, dest);
6215 if (ret)
6216 return ret;
6219 gsi = gsi_last_nondebug_bb (bb);
6220 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
6222 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
6224 case GIMPLE_COND:
6225 /* For COND_EXPR, we only need to redirect the edge. */
6226 break;
6228 case GIMPLE_GOTO:
6229 /* No non-abnormal edges should lead from a non-simple goto, and
6230 simple ones should be represented implicitly. */
6231 gcc_unreachable ();
6233 case GIMPLE_SWITCH:
6235 gswitch *switch_stmt = as_a <gswitch *> (stmt);
6236 tree label = gimple_block_label (dest);
6237 tree cases = get_cases_for_edge (e, switch_stmt);
6239 /* If we have a list of cases associated with E, then use it
6240 as it's a lot faster than walking the entire case vector. */
6241 if (cases)
6243 edge e2 = find_edge (e->src, dest);
6244 tree last, first;
6246 first = cases;
6247 while (cases)
6249 last = cases;
6250 CASE_LABEL (cases) = label;
6251 cases = CASE_CHAIN (cases);
6254 /* If there was already an edge in the CFG, then we need
6255 to move all the cases associated with E to E2. */
6256 if (e2)
6258 tree cases2 = get_cases_for_edge (e2, switch_stmt);
6260 CASE_CHAIN (last) = CASE_CHAIN (cases2);
6261 CASE_CHAIN (cases2) = first;
6263 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
6265 else
6267 size_t i, n = gimple_switch_num_labels (switch_stmt);
6269 for (i = 0; i < n; i++)
6271 tree elt = gimple_switch_label (switch_stmt, i);
6272 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
6273 CASE_LABEL (elt) = label;
6277 break;
6279 case GIMPLE_ASM:
6281 gasm *asm_stmt = as_a <gasm *> (stmt);
6282 int i, n = gimple_asm_nlabels (asm_stmt);
6283 tree label = NULL;
6285 for (i = 0; i < n; ++i)
6287 tree cons = gimple_asm_label_op (asm_stmt, i);
6288 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
6290 if (!label)
6291 label = gimple_block_label (dest);
6292 TREE_VALUE (cons) = label;
6296 /* If we didn't find any label matching the former edge in the
6297 asm labels, we must be redirecting the fallthrough
6298 edge. */
6299 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
6301 break;
6303 case GIMPLE_RETURN:
6304 gsi_remove (&gsi, true);
6305 e->flags |= EDGE_FALLTHRU;
6306 break;
6308 case GIMPLE_OMP_RETURN:
6309 case GIMPLE_OMP_CONTINUE:
6310 case GIMPLE_OMP_SECTIONS_SWITCH:
6311 case GIMPLE_OMP_FOR:
6312 /* The edges from OMP constructs can be simply redirected. */
6313 break;
6315 case GIMPLE_EH_DISPATCH:
6316 if (!(e->flags & EDGE_FALLTHRU))
6317 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
6318 break;
6320 case GIMPLE_TRANSACTION:
6321 if (e->flags & EDGE_TM_ABORT)
6322 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6323 gimple_block_label (dest));
6324 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6325 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6326 gimple_block_label (dest));
6327 else
6328 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6329 gimple_block_label (dest));
6330 break;
6332 default:
6333 /* Otherwise it must be a fallthru edge, and we don't need to
6334 do anything besides redirecting it. */
6335 gcc_assert (e->flags & EDGE_FALLTHRU);
6336 break;
6339 /* Update/insert PHI nodes as necessary. */
6341 /* Now update the edges in the CFG. */
6342 e = ssa_redirect_edge (e, dest);
6344 return e;
6347 /* Returns true if it is possible to remove edge E by redirecting
6348 it to the destination of the other edge from E->src. */
6350 static bool
6351 gimple_can_remove_branch_p (const_edge e)
6353 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6354 return false;
6356 return true;
6359 /* Simple wrapper, as we can always redirect fallthru edges. */
6361 static basic_block
6362 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6364 e = gimple_redirect_edge_and_branch (e, dest);
6365 gcc_assert (e);
6367 return NULL;
6371 /* Splits basic block BB after statement STMT (but at least after the
6372 labels). If STMT is NULL, BB is split just after the labels. */
6374 static basic_block
6375 gimple_split_block (basic_block bb, void *stmt)
6377 gimple_stmt_iterator gsi;
6378 gimple_stmt_iterator gsi_tgt;
6379 gimple_seq list;
6380 basic_block new_bb;
6381 edge e;
6382 edge_iterator ei;
6384 new_bb = create_empty_bb (bb);
6386 /* Redirect the outgoing edges. */
6387 new_bb->succs = bb->succs;
6388 bb->succs = NULL;
6389 FOR_EACH_EDGE (e, ei, new_bb->succs)
6390 e->src = new_bb;
6392 /* Get a stmt iterator pointing to the first stmt to move. */
6393 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6394 gsi = gsi_after_labels (bb);
6395 else
6397 gsi = gsi_for_stmt ((gimple *) stmt);
6398 gsi_next (&gsi);
6401 /* Move everything from GSI to the new basic block. */
6402 if (gsi_end_p (gsi))
6403 return new_bb;
6405 /* Split the statement list - avoid re-creating new containers as this
6406 brings ugly quadratic memory consumption in the inliner.
6407 (We are still quadratic since we need to update stmt BB pointers,
6408 sadly.) */
6409 gsi_split_seq_before (&gsi, &list);
6410 set_bb_seq (new_bb, list);
6411 for (gsi_tgt = gsi_start (list);
6412 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6413 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6415 return new_bb;
6419 /* Moves basic block BB after block AFTER. */
6421 static bool
6422 gimple_move_block_after (basic_block bb, basic_block after)
6424 if (bb->prev_bb == after)
6425 return true;
6427 unlink_block (bb);
6428 link_block (bb, after);
6430 return true;
6434 /* Return TRUE if block BB has no executable statements, otherwise return
6435 FALSE. */
6437 static bool
6438 gimple_empty_block_p (basic_block bb)
6440 /* BB must have no executable statements. */
6441 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6442 if (phi_nodes (bb))
6443 return false;
6444 while (!gsi_end_p (gsi))
6446 gimple *stmt = gsi_stmt (gsi);
6447 if (is_gimple_debug (stmt))
6449 else if (gimple_code (stmt) == GIMPLE_NOP
6450 || gimple_code (stmt) == GIMPLE_PREDICT)
6452 else
6453 return false;
6454 gsi_next (&gsi);
6456 return true;
6460 /* Split a basic block if it ends with a conditional branch and if the
6461 other part of the block is not empty. */
6463 static basic_block
6464 gimple_split_block_before_cond_jump (basic_block bb)
6466 gimple *last, *split_point;
6467 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6468 if (gsi_end_p (gsi))
6469 return NULL;
6470 last = gsi_stmt (gsi);
6471 if (gimple_code (last) != GIMPLE_COND
6472 && gimple_code (last) != GIMPLE_SWITCH)
6473 return NULL;
6474 gsi_prev (&gsi);
6475 split_point = gsi_stmt (gsi);
6476 return split_block (bb, split_point)->dest;
6480 /* Return true if basic_block can be duplicated. */
6482 static bool
6483 gimple_can_duplicate_bb_p (const_basic_block bb)
6485 gimple *last = last_nondebug_stmt (CONST_CAST_BB (bb));
6487 /* Do checks that can only fail for the last stmt, to minimize the work in the
6488 stmt loop. */
6489 if (last) {
6490 /* A transaction is a single entry multiple exit region. It
6491 must be duplicated in its entirety or not at all. */
6492 if (gimple_code (last) == GIMPLE_TRANSACTION)
6493 return false;
6495 /* An IFN_UNIQUE call must be duplicated as part of its group,
6496 or not at all. */
6497 if (is_gimple_call (last)
6498 && gimple_call_internal_p (last)
6499 && gimple_call_internal_unique_p (last))
6500 return false;
6502 /* Prohibit duplication of returns_twice calls, otherwise associated
6503 abnormal edges also need to be duplicated properly.
6504 return_twice functions will always be the last statement. */
6505 if (is_gimple_call (last)
6506 && (gimple_call_flags (last) & ECF_RETURNS_TWICE))
6507 return false;
6510 for (gimple_stmt_iterator gsi = gsi_start_bb (CONST_CAST_BB (bb));
6511 !gsi_end_p (gsi); gsi_next (&gsi))
6513 gimple *g = gsi_stmt (gsi);
6515 /* An IFN_GOMP_SIMT_ENTER_ALLOC/IFN_GOMP_SIMT_EXIT call must be
6516 duplicated as part of its group, or not at all.
6517 The IFN_GOMP_SIMT_VOTE_ANY and IFN_GOMP_SIMT_XCHG_* are part of such a
6518 group, so the same holds there. */
6519 if (is_gimple_call (g)
6520 && (gimple_call_internal_p (g, IFN_GOMP_SIMT_ENTER_ALLOC)
6521 || gimple_call_internal_p (g, IFN_GOMP_SIMT_EXIT)
6522 || gimple_call_internal_p (g, IFN_GOMP_SIMT_VOTE_ANY)
6523 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_BFLY)
6524 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_IDX)))
6525 return false;
6528 return true;
6531 /* Create a duplicate of the basic block BB. NOTE: This does not
6532 preserve SSA form. */
6534 static basic_block
6535 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6537 basic_block new_bb;
6538 gimple_stmt_iterator gsi_tgt;
6540 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6542 /* Copy the PHI nodes. We ignore PHI node arguments here because
6543 the incoming edges have not been setup yet. */
6544 for (gphi_iterator gpi = gsi_start_phis (bb);
6545 !gsi_end_p (gpi);
6546 gsi_next (&gpi))
6548 gphi *phi, *copy;
6549 phi = gpi.phi ();
6550 copy = create_phi_node (NULL_TREE, new_bb);
6551 create_new_def_for (gimple_phi_result (phi), copy,
6552 gimple_phi_result_ptr (copy));
6553 gimple_set_uid (copy, gimple_uid (phi));
6556 gsi_tgt = gsi_start_bb (new_bb);
6557 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6558 !gsi_end_p (gsi);
6559 gsi_next (&gsi))
6561 def_operand_p def_p;
6562 ssa_op_iter op_iter;
6563 tree lhs;
6564 gimple *stmt, *copy;
6566 stmt = gsi_stmt (gsi);
6567 if (gimple_code (stmt) == GIMPLE_LABEL)
6568 continue;
6570 /* Don't duplicate label debug stmts. */
6571 if (gimple_debug_bind_p (stmt)
6572 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6573 == LABEL_DECL)
6574 continue;
6576 /* Create a new copy of STMT and duplicate STMT's virtual
6577 operands. */
6578 copy = gimple_copy (stmt);
6579 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6581 maybe_duplicate_eh_stmt (copy, stmt);
6582 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6584 /* When copying around a stmt writing into a local non-user
6585 aggregate, make sure it won't share stack slot with other
6586 vars. */
6587 lhs = gimple_get_lhs (stmt);
6588 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6590 tree base = get_base_address (lhs);
6591 if (base
6592 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6593 && DECL_IGNORED_P (base)
6594 && !TREE_STATIC (base)
6595 && !DECL_EXTERNAL (base)
6596 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6597 DECL_NONSHAREABLE (base) = 1;
6600 /* If requested remap dependence info of cliques brought in
6601 via inlining. */
6602 if (id)
6603 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6605 tree op = gimple_op (copy, i);
6606 if (!op)
6607 continue;
6608 if (TREE_CODE (op) == ADDR_EXPR
6609 || TREE_CODE (op) == WITH_SIZE_EXPR)
6610 op = TREE_OPERAND (op, 0);
6611 while (handled_component_p (op))
6612 op = TREE_OPERAND (op, 0);
6613 if ((TREE_CODE (op) == MEM_REF
6614 || TREE_CODE (op) == TARGET_MEM_REF)
6615 && MR_DEPENDENCE_CLIQUE (op) > 1
6616 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6618 if (!id->dependence_map)
6619 id->dependence_map = new hash_map<dependence_hash,
6620 unsigned short>;
6621 bool existed;
6622 unsigned short &newc = id->dependence_map->get_or_insert
6623 (MR_DEPENDENCE_CLIQUE (op), &existed);
6624 if (!existed)
6626 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6627 newc = get_new_clique (cfun);
6629 MR_DEPENDENCE_CLIQUE (op) = newc;
6633 /* Create new names for all the definitions created by COPY and
6634 add replacement mappings for each new name. */
6635 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6636 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6639 return new_bb;
6642 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6644 static void
6645 add_phi_args_after_copy_edge (edge e_copy)
6647 basic_block bb, bb_copy = e_copy->src, dest;
6648 edge e;
6649 edge_iterator ei;
6650 gphi *phi, *phi_copy;
6651 tree def;
6652 gphi_iterator psi, psi_copy;
6654 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6655 return;
6657 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6659 if (e_copy->dest->flags & BB_DUPLICATED)
6660 dest = get_bb_original (e_copy->dest);
6661 else
6662 dest = e_copy->dest;
6664 e = find_edge (bb, dest);
6665 if (!e)
6667 /* During loop unrolling the target of the latch edge is copied.
6668 In this case we are not looking for edge to dest, but to
6669 duplicated block whose original was dest. */
6670 FOR_EACH_EDGE (e, ei, bb->succs)
6672 if ((e->dest->flags & BB_DUPLICATED)
6673 && get_bb_original (e->dest) == dest)
6674 break;
6677 gcc_assert (e != NULL);
6680 for (psi = gsi_start_phis (e->dest),
6681 psi_copy = gsi_start_phis (e_copy->dest);
6682 !gsi_end_p (psi);
6683 gsi_next (&psi), gsi_next (&psi_copy))
6685 phi = psi.phi ();
6686 phi_copy = psi_copy.phi ();
6687 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6688 add_phi_arg (phi_copy, def, e_copy,
6689 gimple_phi_arg_location_from_edge (phi, e));
6694 /* Basic block BB_COPY was created by code duplication. Add phi node
6695 arguments for edges going out of BB_COPY. The blocks that were
6696 duplicated have BB_DUPLICATED set. */
6698 void
6699 add_phi_args_after_copy_bb (basic_block bb_copy)
6701 edge e_copy;
6702 edge_iterator ei;
6704 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6706 add_phi_args_after_copy_edge (e_copy);
6710 /* Blocks in REGION_COPY array of length N_REGION were created by
6711 duplication of basic blocks. Add phi node arguments for edges
6712 going from these blocks. If E_COPY is not NULL, also add
6713 phi node arguments for its destination.*/
6715 void
6716 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6717 edge e_copy)
6719 unsigned i;
6721 for (i = 0; i < n_region; i++)
6722 region_copy[i]->flags |= BB_DUPLICATED;
6724 for (i = 0; i < n_region; i++)
6725 add_phi_args_after_copy_bb (region_copy[i]);
6726 if (e_copy)
6727 add_phi_args_after_copy_edge (e_copy);
6729 for (i = 0; i < n_region; i++)
6730 region_copy[i]->flags &= ~BB_DUPLICATED;
6733 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6734 important exit edge EXIT. By important we mean that no SSA name defined
6735 inside region is live over the other exit edges of the region. All entry
6736 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6737 to the duplicate of the region. Dominance and loop information is
6738 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6739 UPDATE_DOMINANCE is false then we assume that the caller will update the
6740 dominance information after calling this function. The new basic
6741 blocks are stored to REGION_COPY in the same order as they had in REGION,
6742 provided that REGION_COPY is not NULL.
6743 The function returns false if it is unable to copy the region,
6744 true otherwise.
6746 It is callers responsibility to update profile. */
6748 bool
6749 gimple_duplicate_seme_region (edge entry, edge exit,
6750 basic_block *region, unsigned n_region,
6751 basic_block *region_copy,
6752 bool update_dominance)
6754 unsigned i;
6755 bool free_region_copy = false, copying_header = false;
6756 class loop *loop = entry->dest->loop_father;
6757 edge exit_copy;
6758 edge redirected;
6760 if (!can_copy_bbs_p (region, n_region))
6761 return false;
6763 /* Some sanity checking. Note that we do not check for all possible
6764 missuses of the functions. I.e. if you ask to copy something weird,
6765 it will work, but the state of structures probably will not be
6766 correct. */
6767 for (i = 0; i < n_region; i++)
6769 /* We do not handle subloops, i.e. all the blocks must belong to the
6770 same loop. */
6771 if (region[i]->loop_father != loop)
6772 return false;
6774 if (region[i] != entry->dest
6775 && region[i] == loop->header)
6776 return false;
6779 /* In case the function is used for loop header copying (which is the primary
6780 use), ensure that EXIT and its copy will be new latch and entry edges. */
6781 if (loop->header == entry->dest)
6783 copying_header = true;
6785 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6786 return false;
6788 for (i = 0; i < n_region; i++)
6789 if (region[i] != exit->src
6790 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6791 return false;
6794 initialize_original_copy_tables ();
6796 if (copying_header)
6797 set_loop_copy (loop, loop_outer (loop));
6798 else
6799 set_loop_copy (loop, loop);
6801 if (!region_copy)
6803 region_copy = XNEWVEC (basic_block, n_region);
6804 free_region_copy = true;
6807 /* Record blocks outside the region that are dominated by something
6808 inside. */
6809 auto_vec<basic_block> doms;
6810 if (update_dominance)
6811 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6813 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6814 split_edge_bb_loc (entry), update_dominance);
6816 if (copying_header)
6818 loop->header = exit->dest;
6819 loop->latch = exit->src;
6822 /* Redirect the entry and add the phi node arguments. */
6823 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6824 gcc_assert (redirected != NULL);
6825 flush_pending_stmts (entry);
6827 /* Concerning updating of dominators: We must recount dominators
6828 for entry block and its copy. Anything that is outside of the
6829 region, but was dominated by something inside needs recounting as
6830 well. */
6831 if (update_dominance)
6833 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6834 doms.safe_push (get_bb_original (entry->dest));
6835 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6838 /* Add the other PHI node arguments. */
6839 add_phi_args_after_copy (region_copy, n_region, NULL);
6841 if (free_region_copy)
6842 free (region_copy);
6844 free_original_copy_tables ();
6845 return true;
6848 /* Checks if BB is part of the region defined by N_REGION BBS. */
6849 static bool
6850 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6852 unsigned int n;
6854 for (n = 0; n < n_region; n++)
6856 if (bb == bbs[n])
6857 return true;
6859 return false;
6863 /* For each PHI in BB, copy the argument associated with SRC_E to TGT_E.
6864 Assuming the argument exists, just does not have a value. */
6866 void
6867 copy_phi_arg_into_existing_phi (edge src_e, edge tgt_e)
6869 int src_idx = src_e->dest_idx;
6870 int tgt_idx = tgt_e->dest_idx;
6872 /* Iterate over each PHI in e->dest. */
6873 for (gphi_iterator gsi = gsi_start_phis (src_e->dest),
6874 gsi2 = gsi_start_phis (tgt_e->dest);
6875 !gsi_end_p (gsi);
6876 gsi_next (&gsi), gsi_next (&gsi2))
6878 gphi *src_phi = gsi.phi ();
6879 gphi *dest_phi = gsi2.phi ();
6880 tree val = gimple_phi_arg_def (src_phi, src_idx);
6881 location_t locus = gimple_phi_arg_location (src_phi, src_idx);
6883 SET_PHI_ARG_DEF (dest_phi, tgt_idx, val);
6884 gimple_phi_arg_set_location (dest_phi, tgt_idx, locus);
6888 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6889 are stored to REGION_COPY in the same order in that they appear
6890 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6891 the region, EXIT an exit from it. The condition guarding EXIT
6892 is moved to ENTRY. Returns true if duplication succeeds, false
6893 otherwise.
6895 For example,
6897 some_code;
6898 if (cond)
6900 else
6903 is transformed to
6905 if (cond)
6907 some_code;
6910 else
6912 some_code;
6917 bool
6918 gimple_duplicate_sese_tail (edge entry, edge exit,
6919 basic_block *region, unsigned n_region,
6920 basic_block *region_copy)
6922 unsigned i;
6923 bool free_region_copy = false;
6924 class loop *loop = exit->dest->loop_father;
6925 class loop *orig_loop = entry->dest->loop_father;
6926 basic_block switch_bb, entry_bb, nentry_bb;
6927 profile_count total_count = profile_count::uninitialized (),
6928 exit_count = profile_count::uninitialized ();
6929 edge exits[2], nexits[2], e;
6930 gimple_stmt_iterator gsi;
6931 edge sorig, snew;
6932 basic_block exit_bb;
6933 class loop *target, *aloop, *cloop;
6935 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6936 exits[0] = exit;
6937 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6939 if (!can_copy_bbs_p (region, n_region))
6940 return false;
6942 initialize_original_copy_tables ();
6943 set_loop_copy (orig_loop, loop);
6945 target= loop;
6946 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6948 if (bb_part_of_region_p (aloop->header, region, n_region))
6950 cloop = duplicate_loop (aloop, target);
6951 duplicate_subloops (aloop, cloop);
6955 if (!region_copy)
6957 region_copy = XNEWVEC (basic_block, n_region);
6958 free_region_copy = true;
6961 gcc_assert (!need_ssa_update_p (cfun));
6963 /* Record blocks outside the region that are dominated by something
6964 inside. */
6965 auto_vec<basic_block> doms = get_dominated_by_region (CDI_DOMINATORS, region,
6966 n_region);
6968 total_count = exit->src->count;
6969 exit_count = exit->count ();
6970 /* Fix up corner cases, to avoid division by zero or creation of negative
6971 frequencies. */
6972 if (exit_count > total_count)
6973 exit_count = total_count;
6975 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6976 split_edge_bb_loc (exit), true);
6977 if (total_count.initialized_p () && exit_count.initialized_p ())
6979 scale_bbs_frequencies_profile_count (region, n_region,
6980 total_count - exit_count,
6981 total_count);
6982 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6983 total_count);
6986 /* Create the switch block, and put the exit condition to it. */
6987 entry_bb = entry->dest;
6988 nentry_bb = get_bb_copy (entry_bb);
6989 if (!*gsi_last_bb (entry->src)
6990 || !stmt_ends_bb_p (*gsi_last_bb (entry->src)))
6991 switch_bb = entry->src;
6992 else
6993 switch_bb = split_edge (entry);
6994 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6996 gcond *cond_stmt = as_a <gcond *> (*gsi_last_bb (exit->src));
6997 cond_stmt = as_a <gcond *> (gimple_copy (cond_stmt));
6999 gsi = gsi_last_bb (switch_bb);
7000 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
7002 sorig = single_succ_edge (switch_bb);
7003 sorig->flags = exits[1]->flags;
7004 sorig->probability = exits[1]->probability;
7005 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
7006 snew->probability = exits[0]->probability;
7009 /* Register the new edge from SWITCH_BB in loop exit lists. */
7010 rescan_loop_exit (snew, true, false);
7012 /* Add the PHI node arguments. */
7013 add_phi_args_after_copy (region_copy, n_region, snew);
7015 /* Get rid of now superfluous conditions and associated edges (and phi node
7016 arguments). */
7017 exit_bb = exit->dest;
7019 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
7020 PENDING_STMT (e) = NULL;
7022 /* The latch of ORIG_LOOP was copied, and so was the backedge
7023 to the original header. We redirect this backedge to EXIT_BB. */
7024 for (i = 0; i < n_region; i++)
7025 if (get_bb_original (region_copy[i]) == orig_loop->latch)
7027 gcc_assert (single_succ_edge (region_copy[i]));
7028 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
7029 PENDING_STMT (e) = NULL;
7030 copy_phi_arg_into_existing_phi (nexits[0], e);
7032 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
7033 PENDING_STMT (e) = NULL;
7035 /* Anything that is outside of the region, but was dominated by something
7036 inside needs to update dominance info. */
7037 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
7039 if (free_region_copy)
7040 free (region_copy);
7042 free_original_copy_tables ();
7043 return true;
7046 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
7047 adding blocks when the dominator traversal reaches EXIT. This
7048 function silently assumes that ENTRY strictly dominates EXIT. */
7050 void
7051 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
7052 vec<basic_block> *bbs_p)
7054 basic_block son;
7056 for (son = first_dom_son (CDI_DOMINATORS, entry);
7057 son;
7058 son = next_dom_son (CDI_DOMINATORS, son))
7060 bbs_p->safe_push (son);
7061 if (son != exit)
7062 gather_blocks_in_sese_region (son, exit, bbs_p);
7066 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
7067 The duplicates are recorded in VARS_MAP. */
7069 static void
7070 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
7071 tree to_context)
7073 tree t = *tp, new_t;
7074 struct function *f = DECL_STRUCT_FUNCTION (to_context);
7076 if (DECL_CONTEXT (t) == to_context)
7077 return;
7079 bool existed;
7080 tree &loc = vars_map->get_or_insert (t, &existed);
7082 if (!existed)
7084 if (SSA_VAR_P (t))
7086 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
7087 add_local_decl (f, new_t);
7089 else
7091 gcc_assert (TREE_CODE (t) == CONST_DECL);
7092 new_t = copy_node (t);
7094 DECL_CONTEXT (new_t) = to_context;
7096 loc = new_t;
7098 else
7099 new_t = loc;
7101 *tp = new_t;
7105 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
7106 VARS_MAP maps old ssa names and var_decls to the new ones. */
7108 static tree
7109 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
7110 tree to_context)
7112 tree new_name;
7114 gcc_assert (!virtual_operand_p (name));
7116 tree *loc = vars_map->get (name);
7118 if (!loc)
7120 tree decl = SSA_NAME_VAR (name);
7121 if (decl)
7123 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
7124 replace_by_duplicate_decl (&decl, vars_map, to_context);
7125 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
7126 decl, SSA_NAME_DEF_STMT (name));
7128 else
7129 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
7130 name, SSA_NAME_DEF_STMT (name));
7132 /* Now that we've used the def stmt to define new_name, make sure it
7133 doesn't define name anymore. */
7134 SSA_NAME_DEF_STMT (name) = NULL;
7136 vars_map->put (name, new_name);
7138 else
7139 new_name = *loc;
7141 return new_name;
7144 struct move_stmt_d
7146 tree orig_block;
7147 tree new_block;
7148 tree from_context;
7149 tree to_context;
7150 hash_map<tree, tree> *vars_map;
7151 htab_t new_label_map;
7152 hash_map<void *, void *> *eh_map;
7153 bool remap_decls_p;
7156 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
7157 contained in *TP if it has been ORIG_BLOCK previously and change the
7158 DECL_CONTEXT of every local variable referenced in *TP. */
7160 static tree
7161 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
7163 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
7164 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
7165 tree t = *tp;
7167 if (EXPR_P (t))
7169 tree block = TREE_BLOCK (t);
7170 if (block == NULL_TREE)
7172 else if (block == p->orig_block
7173 || p->orig_block == NULL_TREE)
7175 /* tree_node_can_be_shared says we can share invariant
7176 addresses but unshare_expr copies them anyways. Make sure
7177 to unshare before adjusting the block in place - we do not
7178 always see a copy here. */
7179 if (TREE_CODE (t) == ADDR_EXPR
7180 && is_gimple_min_invariant (t))
7181 *tp = t = unshare_expr (t);
7182 TREE_SET_BLOCK (t, p->new_block);
7184 else if (flag_checking)
7186 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
7187 block = BLOCK_SUPERCONTEXT (block);
7188 gcc_assert (block == p->orig_block);
7191 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
7193 if (TREE_CODE (t) == SSA_NAME)
7194 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
7195 else if (TREE_CODE (t) == PARM_DECL
7196 && gimple_in_ssa_p (cfun))
7197 *tp = *(p->vars_map->get (t));
7198 else if (TREE_CODE (t) == LABEL_DECL)
7200 if (p->new_label_map)
7202 struct tree_map in, *out;
7203 in.base.from = t;
7204 out = (struct tree_map *)
7205 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
7206 if (out)
7207 *tp = t = out->to;
7210 /* For FORCED_LABELs we can end up with references from other
7211 functions if some SESE regions are outlined. It is UB to
7212 jump in between them, but they could be used just for printing
7213 addresses etc. In that case, DECL_CONTEXT on the label should
7214 be the function containing the glabel stmt with that LABEL_DECL,
7215 rather than whatever function a reference to the label was seen
7216 last time. */
7217 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
7218 DECL_CONTEXT (t) = p->to_context;
7220 else if (p->remap_decls_p)
7222 /* Replace T with its duplicate. T should no longer appear in the
7223 parent function, so this looks wasteful; however, it may appear
7224 in referenced_vars, and more importantly, as virtual operands of
7225 statements, and in alias lists of other variables. It would be
7226 quite difficult to expunge it from all those places. ??? It might
7227 suffice to do this for addressable variables. */
7228 if ((VAR_P (t) && !is_global_var (t))
7229 || TREE_CODE (t) == CONST_DECL)
7230 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
7232 *walk_subtrees = 0;
7234 else if (TYPE_P (t))
7235 *walk_subtrees = 0;
7237 return NULL_TREE;
7240 /* Helper for move_stmt_r. Given an EH region number for the source
7241 function, map that to the duplicate EH regio number in the dest. */
7243 static int
7244 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
7246 eh_region old_r, new_r;
7248 old_r = get_eh_region_from_number (old_nr);
7249 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
7251 return new_r->index;
7254 /* Similar, but operate on INTEGER_CSTs. */
7256 static tree
7257 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
7259 int old_nr, new_nr;
7261 old_nr = tree_to_shwi (old_t_nr);
7262 new_nr = move_stmt_eh_region_nr (old_nr, p);
7264 return build_int_cst (integer_type_node, new_nr);
7267 /* Like move_stmt_op, but for gimple statements.
7269 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
7270 contained in the current statement in *GSI_P and change the
7271 DECL_CONTEXT of every local variable referenced in the current
7272 statement. */
7274 static tree
7275 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
7276 struct walk_stmt_info *wi)
7278 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
7279 gimple *stmt = gsi_stmt (*gsi_p);
7280 tree block = gimple_block (stmt);
7282 if (block == p->orig_block
7283 || (p->orig_block == NULL_TREE
7284 && block != NULL_TREE))
7285 gimple_set_block (stmt, p->new_block);
7287 switch (gimple_code (stmt))
7289 case GIMPLE_CALL:
7290 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
7292 tree r, fndecl = gimple_call_fndecl (stmt);
7293 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
7294 switch (DECL_FUNCTION_CODE (fndecl))
7296 case BUILT_IN_EH_COPY_VALUES:
7297 r = gimple_call_arg (stmt, 1);
7298 r = move_stmt_eh_region_tree_nr (r, p);
7299 gimple_call_set_arg (stmt, 1, r);
7300 /* FALLTHRU */
7302 case BUILT_IN_EH_POINTER:
7303 case BUILT_IN_EH_FILTER:
7304 r = gimple_call_arg (stmt, 0);
7305 r = move_stmt_eh_region_tree_nr (r, p);
7306 gimple_call_set_arg (stmt, 0, r);
7307 break;
7309 default:
7310 break;
7313 break;
7315 case GIMPLE_RESX:
7317 gresx *resx_stmt = as_a <gresx *> (stmt);
7318 int r = gimple_resx_region (resx_stmt);
7319 r = move_stmt_eh_region_nr (r, p);
7320 gimple_resx_set_region (resx_stmt, r);
7322 break;
7324 case GIMPLE_EH_DISPATCH:
7326 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
7327 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
7328 r = move_stmt_eh_region_nr (r, p);
7329 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
7331 break;
7333 case GIMPLE_OMP_RETURN:
7334 case GIMPLE_OMP_CONTINUE:
7335 break;
7337 case GIMPLE_LABEL:
7339 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7340 so that such labels can be referenced from other regions.
7341 Make sure to update it when seeing a GIMPLE_LABEL though,
7342 that is the owner of the label. */
7343 walk_gimple_op (stmt, move_stmt_op, wi);
7344 *handled_ops_p = true;
7345 tree label = gimple_label_label (as_a <glabel *> (stmt));
7346 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
7347 DECL_CONTEXT (label) = p->to_context;
7349 break;
7351 default:
7352 if (is_gimple_omp (stmt))
7354 /* Do not remap variables inside OMP directives. Variables
7355 referenced in clauses and directive header belong to the
7356 parent function and should not be moved into the child
7357 function. */
7358 bool save_remap_decls_p = p->remap_decls_p;
7359 p->remap_decls_p = false;
7360 *handled_ops_p = true;
7362 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7363 move_stmt_op, wi);
7365 p->remap_decls_p = save_remap_decls_p;
7367 break;
7370 return NULL_TREE;
7373 /* Move basic block BB from function CFUN to function DEST_FN. The
7374 block is moved out of the original linked list and placed after
7375 block AFTER in the new list. Also, the block is removed from the
7376 original array of blocks and placed in DEST_FN's array of blocks.
7377 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7378 updated to reflect the moved edges.
7380 The local variables are remapped to new instances, VARS_MAP is used
7381 to record the mapping. */
7383 static void
7384 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7385 basic_block after, bool update_edge_count_p,
7386 struct move_stmt_d *d)
7388 struct control_flow_graph *cfg;
7389 edge_iterator ei;
7390 edge e;
7391 gimple_stmt_iterator si;
7392 unsigned old_len;
7394 /* Remove BB from dominance structures. */
7395 delete_from_dominance_info (CDI_DOMINATORS, bb);
7397 /* Move BB from its current loop to the copy in the new function. */
7398 if (current_loops)
7400 class loop *new_loop = (class loop *)bb->loop_father->aux;
7401 if (new_loop)
7402 bb->loop_father = new_loop;
7405 /* Link BB to the new linked list. */
7406 move_block_after (bb, after);
7408 /* Update the edge count in the corresponding flowgraphs. */
7409 if (update_edge_count_p)
7410 FOR_EACH_EDGE (e, ei, bb->succs)
7412 cfun->cfg->x_n_edges--;
7413 dest_cfun->cfg->x_n_edges++;
7416 /* Remove BB from the original basic block array. */
7417 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7418 cfun->cfg->x_n_basic_blocks--;
7420 /* Grow DEST_CFUN's basic block array if needed. */
7421 cfg = dest_cfun->cfg;
7422 cfg->x_n_basic_blocks++;
7423 if (bb->index >= cfg->x_last_basic_block)
7424 cfg->x_last_basic_block = bb->index + 1;
7426 old_len = vec_safe_length (cfg->x_basic_block_info);
7427 if ((unsigned) cfg->x_last_basic_block >= old_len)
7428 vec_safe_grow_cleared (cfg->x_basic_block_info,
7429 cfg->x_last_basic_block + 1);
7431 (*cfg->x_basic_block_info)[bb->index] = bb;
7433 /* Remap the variables in phi nodes. */
7434 for (gphi_iterator psi = gsi_start_phis (bb);
7435 !gsi_end_p (psi); )
7437 gphi *phi = psi.phi ();
7438 use_operand_p use;
7439 tree op = PHI_RESULT (phi);
7440 ssa_op_iter oi;
7441 unsigned i;
7443 if (virtual_operand_p (op))
7445 /* Remove the phi nodes for virtual operands (alias analysis will be
7446 run for the new function, anyway). But replace all uses that
7447 might be outside of the region we move. */
7448 use_operand_p use_p;
7449 imm_use_iterator iter;
7450 gimple *use_stmt;
7451 FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
7452 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7453 SET_USE (use_p, SSA_NAME_VAR (op));
7454 remove_phi_node (&psi, true);
7455 continue;
7458 SET_PHI_RESULT (phi,
7459 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7460 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7462 op = USE_FROM_PTR (use);
7463 if (TREE_CODE (op) == SSA_NAME)
7464 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7467 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7469 location_t locus = gimple_phi_arg_location (phi, i);
7470 tree block = LOCATION_BLOCK (locus);
7472 if (locus == UNKNOWN_LOCATION)
7473 continue;
7474 if (d->orig_block == NULL_TREE || block == d->orig_block)
7476 locus = set_block (locus, d->new_block);
7477 gimple_phi_arg_set_location (phi, i, locus);
7481 gsi_next (&psi);
7484 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7486 gimple *stmt = gsi_stmt (si);
7487 struct walk_stmt_info wi;
7489 memset (&wi, 0, sizeof (wi));
7490 wi.info = d;
7491 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7493 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7495 tree label = gimple_label_label (label_stmt);
7496 int uid = LABEL_DECL_UID (label);
7498 gcc_assert (uid > -1);
7500 old_len = vec_safe_length (cfg->x_label_to_block_map);
7501 if (old_len <= (unsigned) uid)
7502 vec_safe_grow_cleared (cfg->x_label_to_block_map, uid + 1);
7504 (*cfg->x_label_to_block_map)[uid] = bb;
7505 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7507 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7509 if (uid >= dest_cfun->cfg->last_label_uid)
7510 dest_cfun->cfg->last_label_uid = uid + 1;
7513 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7514 remove_stmt_from_eh_lp_fn (cfun, stmt);
7516 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7517 gimple_remove_stmt_histograms (cfun, stmt);
7519 /* We cannot leave any operands allocated from the operand caches of
7520 the current function. */
7521 free_stmt_operands (cfun, stmt);
7522 push_cfun (dest_cfun);
7523 update_stmt (stmt);
7524 if (is_gimple_call (stmt))
7525 notice_special_calls (as_a <gcall *> (stmt));
7526 pop_cfun ();
7529 FOR_EACH_EDGE (e, ei, bb->succs)
7530 if (e->goto_locus != UNKNOWN_LOCATION)
7532 tree block = LOCATION_BLOCK (e->goto_locus);
7533 if (d->orig_block == NULL_TREE
7534 || block == d->orig_block)
7535 e->goto_locus = set_block (e->goto_locus, d->new_block);
7539 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7540 the outermost EH region. Use REGION as the incoming base EH region.
7541 If there is no single outermost region, return NULL and set *ALL to
7542 true. */
7544 static eh_region
7545 find_outermost_region_in_block (struct function *src_cfun,
7546 basic_block bb, eh_region region,
7547 bool *all)
7549 gimple_stmt_iterator si;
7551 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7553 gimple *stmt = gsi_stmt (si);
7554 eh_region stmt_region;
7555 int lp_nr;
7557 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7558 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7559 if (stmt_region)
7561 if (region == NULL)
7562 region = stmt_region;
7563 else if (stmt_region != region)
7565 region = eh_region_outermost (src_cfun, stmt_region, region);
7566 if (region == NULL)
7568 *all = true;
7569 return NULL;
7575 return region;
7578 static tree
7579 new_label_mapper (tree decl, void *data)
7581 htab_t hash = (htab_t) data;
7582 struct tree_map *m;
7583 void **slot;
7585 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7587 m = XNEW (struct tree_map);
7588 m->hash = DECL_UID (decl);
7589 m->base.from = decl;
7590 m->to = create_artificial_label (UNKNOWN_LOCATION);
7591 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7592 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7593 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7595 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7596 gcc_assert (*slot == NULL);
7598 *slot = m;
7600 return m->to;
7603 /* Tree walker to replace the decls used inside value expressions by
7604 duplicates. */
7606 static tree
7607 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7609 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7611 switch (TREE_CODE (*tp))
7613 case VAR_DECL:
7614 case PARM_DECL:
7615 case RESULT_DECL:
7616 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7617 break;
7618 default:
7619 break;
7622 if (IS_TYPE_OR_DECL_P (*tp))
7623 *walk_subtrees = false;
7625 return NULL;
7628 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7629 subblocks. */
7631 static void
7632 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7633 tree to_context)
7635 tree *tp, t;
7637 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7639 t = *tp;
7640 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7641 continue;
7642 replace_by_duplicate_decl (&t, vars_map, to_context);
7643 if (t != *tp)
7645 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7647 tree x = DECL_VALUE_EXPR (*tp);
7648 struct replace_decls_d rd = { vars_map, to_context };
7649 unshare_expr (x);
7650 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7651 SET_DECL_VALUE_EXPR (t, x);
7652 DECL_HAS_VALUE_EXPR_P (t) = 1;
7654 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7655 *tp = t;
7659 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7660 replace_block_vars_by_duplicates (block, vars_map, to_context);
7663 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7664 from FN1 to FN2. */
7666 static void
7667 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7668 class loop *loop)
7670 /* Discard it from the old loop array. */
7671 (*get_loops (fn1))[loop->num] = NULL;
7673 /* Place it in the new loop array, assigning it a new number. */
7674 loop->num = number_of_loops (fn2);
7675 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7677 /* Recurse to children. */
7678 for (loop = loop->inner; loop; loop = loop->next)
7679 fixup_loop_arrays_after_move (fn1, fn2, loop);
7682 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7683 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7685 DEBUG_FUNCTION void
7686 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7688 basic_block bb;
7689 edge_iterator ei;
7690 edge e;
7691 bitmap bbs = BITMAP_ALLOC (NULL);
7692 int i;
7694 gcc_assert (entry != NULL);
7695 gcc_assert (entry != exit);
7696 gcc_assert (bbs_p != NULL);
7698 gcc_assert (bbs_p->length () > 0);
7700 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7701 bitmap_set_bit (bbs, bb->index);
7703 gcc_assert (bitmap_bit_p (bbs, entry->index));
7704 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7706 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7708 if (bb == entry)
7710 gcc_assert (single_pred_p (entry));
7711 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7713 else
7714 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7716 e = ei_edge (ei);
7717 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7720 if (bb == exit)
7722 gcc_assert (single_succ_p (exit));
7723 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7725 else
7726 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7728 e = ei_edge (ei);
7729 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7733 BITMAP_FREE (bbs);
7736 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7738 bool
7739 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7741 bitmap release_names = (bitmap)data;
7743 if (TREE_CODE (from) != SSA_NAME)
7744 return true;
7746 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7747 return true;
7750 /* Return LOOP_DIST_ALIAS call if present in BB. */
7752 static gimple *
7753 find_loop_dist_alias (basic_block bb)
7755 gimple_stmt_iterator gsi = gsi_last_bb (bb);
7756 if (!safe_is_a <gcond *> (*gsi))
7757 return NULL;
7759 gsi_prev (&gsi);
7760 if (gsi_end_p (gsi))
7761 return NULL;
7763 gimple *g = gsi_stmt (gsi);
7764 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7765 return g;
7766 return NULL;
7769 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7770 to VALUE and update any immediate uses of it's LHS. */
7772 void
7773 fold_loop_internal_call (gimple *g, tree value)
7775 tree lhs = gimple_call_lhs (g);
7776 use_operand_p use_p;
7777 imm_use_iterator iter;
7778 gimple *use_stmt;
7779 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7781 replace_call_with_value (&gsi, value);
7782 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7784 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7785 SET_USE (use_p, value);
7786 update_stmt (use_stmt);
7787 /* If we turn conditional to constant, scale profile counts.
7788 We know that the conditional was created by loop distribution
7789 and all basic blocks dominated by the taken edge are part of
7790 the loop distributed. */
7791 if (gimple_code (use_stmt) == GIMPLE_COND)
7793 edge true_edge, false_edge;
7794 extract_true_false_edges_from_block (gimple_bb (use_stmt),
7795 &true_edge, &false_edge);
7796 edge taken_edge = NULL, other_edge = NULL;
7797 if (gimple_cond_true_p (as_a <gcond *>(use_stmt)))
7799 taken_edge = true_edge;
7800 other_edge = false_edge;
7802 else if (gimple_cond_false_p (as_a <gcond *>(use_stmt)))
7804 taken_edge = false_edge;
7805 other_edge = true_edge;
7807 if (taken_edge
7808 && !(taken_edge->probability == profile_probability::always ()))
7810 profile_count old_count = taken_edge->count ();
7811 profile_count new_count = taken_edge->src->count;
7812 taken_edge->probability = profile_probability::always ();
7813 other_edge->probability = profile_probability::never ();
7814 /* If we have multiple predecessors, we can't use the dominance
7815 test. This should not happen as the guarded code should
7816 start with pre-header. */
7817 gcc_assert (single_pred_edge (taken_edge->dest));
7818 if (old_count.nonzero_p ())
7820 taken_edge->dest->count
7821 = taken_edge->dest->count.apply_scale (new_count,
7822 old_count);
7823 scale_strictly_dominated_blocks (taken_edge->dest,
7824 new_count, old_count);
7831 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7832 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7833 single basic block in the original CFG and the new basic block is
7834 returned. DEST_CFUN must not have a CFG yet.
7836 Note that the region need not be a pure SESE region. Blocks inside
7837 the region may contain calls to abort/exit. The only restriction
7838 is that ENTRY_BB should be the only entry point and it must
7839 dominate EXIT_BB.
7841 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7842 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7843 to the new function.
7845 All local variables referenced in the region are assumed to be in
7846 the corresponding BLOCK_VARS and unexpanded variable lists
7847 associated with DEST_CFUN.
7849 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7850 reimplement move_sese_region_to_fn by duplicating the region rather than
7851 moving it. */
7853 basic_block
7854 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7855 basic_block exit_bb, tree orig_block)
7857 vec<basic_block> bbs;
7858 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7859 basic_block after, bb, *entry_pred, *exit_succ, abb;
7860 struct function *saved_cfun = cfun;
7861 int *entry_flag, *exit_flag;
7862 profile_probability *entry_prob, *exit_prob;
7863 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7864 edge e;
7865 edge_iterator ei;
7866 htab_t new_label_map;
7867 hash_map<void *, void *> *eh_map;
7868 class loop *loop = entry_bb->loop_father;
7869 class loop *loop0 = get_loop (saved_cfun, 0);
7870 struct move_stmt_d d;
7872 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7873 region. */
7874 gcc_assert (entry_bb != exit_bb
7875 && (!exit_bb
7876 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7878 /* Collect all the blocks in the region. Manually add ENTRY_BB
7879 because it won't be added by dfs_enumerate_from. */
7880 bbs.create (0);
7881 bbs.safe_push (entry_bb);
7882 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7884 if (flag_checking)
7885 verify_sese (entry_bb, exit_bb, &bbs);
7887 /* The blocks that used to be dominated by something in BBS will now be
7888 dominated by the new block. */
7889 auto_vec<basic_block> dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7890 bbs.address (),
7891 bbs.length ());
7893 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7894 the predecessor edges to ENTRY_BB and the successor edges to
7895 EXIT_BB so that we can re-attach them to the new basic block that
7896 will replace the region. */
7897 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7898 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7899 entry_flag = XNEWVEC (int, num_entry_edges);
7900 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7901 i = 0;
7902 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7904 entry_prob[i] = e->probability;
7905 entry_flag[i] = e->flags;
7906 entry_pred[i++] = e->src;
7907 remove_edge (e);
7910 if (exit_bb)
7912 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7913 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7914 exit_flag = XNEWVEC (int, num_exit_edges);
7915 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7916 i = 0;
7917 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7919 exit_prob[i] = e->probability;
7920 exit_flag[i] = e->flags;
7921 exit_succ[i++] = e->dest;
7922 remove_edge (e);
7925 else
7927 num_exit_edges = 0;
7928 exit_succ = NULL;
7929 exit_flag = NULL;
7930 exit_prob = NULL;
7933 /* Switch context to the child function to initialize DEST_FN's CFG. */
7934 gcc_assert (dest_cfun->cfg == NULL);
7935 push_cfun (dest_cfun);
7937 init_empty_tree_cfg ();
7939 /* Initialize EH information for the new function. */
7940 eh_map = NULL;
7941 new_label_map = NULL;
7942 if (saved_cfun->eh)
7944 eh_region region = NULL;
7945 bool all = false;
7947 FOR_EACH_VEC_ELT (bbs, i, bb)
7949 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7950 if (all)
7951 break;
7954 init_eh_for_function ();
7955 if (region != NULL || all)
7957 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7958 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7959 new_label_mapper, new_label_map);
7963 /* Initialize an empty loop tree. */
7964 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7965 init_loops_structure (dest_cfun, loops, 1);
7966 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7967 set_loops_for_fn (dest_cfun, loops);
7969 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7971 /* Move the outlined loop tree part. */
7972 num_nodes = bbs.length ();
7973 FOR_EACH_VEC_ELT (bbs, i, bb)
7975 if (bb->loop_father->header == bb)
7977 class loop *this_loop = bb->loop_father;
7978 /* Avoid the need to remap SSA names used in nb_iterations. */
7979 free_numbers_of_iterations_estimates (this_loop);
7980 class loop *outer = loop_outer (this_loop);
7981 if (outer == loop
7982 /* If the SESE region contains some bbs ending with
7983 a noreturn call, those are considered to belong
7984 to the outermost loop in saved_cfun, rather than
7985 the entry_bb's loop_father. */
7986 || outer == loop0)
7988 if (outer != loop)
7989 num_nodes -= this_loop->num_nodes;
7990 flow_loop_tree_node_remove (bb->loop_father);
7991 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7992 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7995 else if (bb->loop_father == loop0 && loop0 != loop)
7996 num_nodes--;
7998 /* Remove loop exits from the outlined region. */
7999 if (loops_for_fn (saved_cfun)->exits)
8000 FOR_EACH_EDGE (e, ei, bb->succs)
8002 struct loops *l = loops_for_fn (saved_cfun);
8003 loop_exit **slot
8004 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
8005 NO_INSERT);
8006 if (slot)
8007 l->exits->clear_slot (slot);
8011 /* Adjust the number of blocks in the tree root of the outlined part. */
8012 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
8014 /* Setup a mapping to be used by move_block_to_fn. */
8015 loop->aux = current_loops->tree_root;
8016 loop0->aux = current_loops->tree_root;
8018 /* Fix up orig_loop_num. If the block referenced in it has been moved
8019 to dest_cfun, update orig_loop_num field, otherwise clear it. */
8020 signed char *moved_orig_loop_num = NULL;
8021 for (auto dloop : loops_list (dest_cfun, 0))
8022 if (dloop->orig_loop_num)
8024 if (moved_orig_loop_num == NULL)
8025 moved_orig_loop_num
8026 = XCNEWVEC (signed char, vec_safe_length (larray));
8027 if ((*larray)[dloop->orig_loop_num] != NULL
8028 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
8030 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
8031 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
8032 moved_orig_loop_num[dloop->orig_loop_num]++;
8033 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
8035 else
8037 moved_orig_loop_num[dloop->orig_loop_num] = -1;
8038 dloop->orig_loop_num = 0;
8041 pop_cfun ();
8043 if (moved_orig_loop_num)
8045 FOR_EACH_VEC_ELT (bbs, i, bb)
8047 gimple *g = find_loop_dist_alias (bb);
8048 if (g == NULL)
8049 continue;
8051 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
8052 gcc_assert (orig_loop_num
8053 && (unsigned) orig_loop_num < vec_safe_length (larray));
8054 if (moved_orig_loop_num[orig_loop_num] == 2)
8056 /* If we have moved both loops with this orig_loop_num into
8057 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
8058 too, update the first argument. */
8059 gcc_assert ((*larray)[orig_loop_num] != NULL
8060 && (get_loop (saved_cfun, orig_loop_num) == NULL));
8061 tree t = build_int_cst (integer_type_node,
8062 (*larray)[orig_loop_num]->num);
8063 gimple_call_set_arg (g, 0, t);
8064 update_stmt (g);
8065 /* Make sure the following loop will not update it. */
8066 moved_orig_loop_num[orig_loop_num] = 0;
8068 else
8069 /* Otherwise at least one of the loops stayed in saved_cfun.
8070 Remove the LOOP_DIST_ALIAS call. */
8071 fold_loop_internal_call (g, gimple_call_arg (g, 1));
8073 FOR_EACH_BB_FN (bb, saved_cfun)
8075 gimple *g = find_loop_dist_alias (bb);
8076 if (g == NULL)
8077 continue;
8078 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
8079 gcc_assert (orig_loop_num
8080 && (unsigned) orig_loop_num < vec_safe_length (larray));
8081 if (moved_orig_loop_num[orig_loop_num])
8082 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
8083 of the corresponding loops was moved, remove it. */
8084 fold_loop_internal_call (g, gimple_call_arg (g, 1));
8086 XDELETEVEC (moved_orig_loop_num);
8088 ggc_free (larray);
8090 /* Move blocks from BBS into DEST_CFUN. */
8091 gcc_assert (bbs.length () >= 2);
8092 after = dest_cfun->cfg->x_entry_block_ptr;
8093 hash_map<tree, tree> vars_map;
8095 memset (&d, 0, sizeof (d));
8096 d.orig_block = orig_block;
8097 d.new_block = DECL_INITIAL (dest_cfun->decl);
8098 d.from_context = cfun->decl;
8099 d.to_context = dest_cfun->decl;
8100 d.vars_map = &vars_map;
8101 d.new_label_map = new_label_map;
8102 d.eh_map = eh_map;
8103 d.remap_decls_p = true;
8105 if (gimple_in_ssa_p (cfun))
8106 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
8108 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
8109 set_ssa_default_def (dest_cfun, arg, narg);
8110 vars_map.put (arg, narg);
8113 FOR_EACH_VEC_ELT (bbs, i, bb)
8115 /* No need to update edge counts on the last block. It has
8116 already been updated earlier when we detached the region from
8117 the original CFG. */
8118 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
8119 after = bb;
8122 /* Adjust the maximum clique used. */
8123 dest_cfun->last_clique = saved_cfun->last_clique;
8125 loop->aux = NULL;
8126 loop0->aux = NULL;
8127 /* Loop sizes are no longer correct, fix them up. */
8128 loop->num_nodes -= num_nodes;
8129 for (class loop *outer = loop_outer (loop);
8130 outer; outer = loop_outer (outer))
8131 outer->num_nodes -= num_nodes;
8132 loop0->num_nodes -= bbs.length () - num_nodes;
8134 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
8136 class loop *aloop;
8137 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
8138 if (aloop != NULL)
8140 if (aloop->simduid)
8142 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
8143 d.to_context);
8144 dest_cfun->has_simduid_loops = true;
8146 if (aloop->force_vectorize)
8147 dest_cfun->has_force_vectorize_loops = true;
8151 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
8152 if (orig_block)
8154 tree block;
8155 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
8156 == NULL_TREE);
8157 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
8158 = BLOCK_SUBBLOCKS (orig_block);
8159 for (block = BLOCK_SUBBLOCKS (orig_block);
8160 block; block = BLOCK_CHAIN (block))
8161 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
8162 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
8165 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
8166 &vars_map, dest_cfun->decl);
8168 if (new_label_map)
8169 htab_delete (new_label_map);
8170 if (eh_map)
8171 delete eh_map;
8173 /* We need to release ssa-names in a defined order, so first find them,
8174 and then iterate in ascending version order. */
8175 bitmap release_names = BITMAP_ALLOC (NULL);
8176 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
8177 bitmap_iterator bi;
8178 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
8179 release_ssa_name (ssa_name (i));
8180 BITMAP_FREE (release_names);
8182 /* Rewire the entry and exit blocks. The successor to the entry
8183 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
8184 the child function. Similarly, the predecessor of DEST_FN's
8185 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
8186 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
8187 various CFG manipulation function get to the right CFG.
8189 FIXME, this is silly. The CFG ought to become a parameter to
8190 these helpers. */
8191 push_cfun (dest_cfun);
8192 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
8193 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
8194 if (exit_bb)
8196 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
8197 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
8199 else
8200 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
8201 pop_cfun ();
8203 /* Back in the original function, the SESE region has disappeared,
8204 create a new basic block in its place. */
8205 bb = create_empty_bb (entry_pred[0]);
8206 if (current_loops)
8207 add_bb_to_loop (bb, loop);
8208 profile_count count = profile_count::zero ();
8209 for (i = 0; i < num_entry_edges; i++)
8211 e = make_edge (entry_pred[i], bb, entry_flag[i]);
8212 e->probability = entry_prob[i];
8213 count += e->count ();
8215 bb->count = count;
8217 for (i = 0; i < num_exit_edges; i++)
8219 e = make_edge (bb, exit_succ[i], exit_flag[i]);
8220 e->probability = exit_prob[i];
8223 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
8224 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
8225 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
8227 if (exit_bb)
8229 free (exit_prob);
8230 free (exit_flag);
8231 free (exit_succ);
8233 free (entry_prob);
8234 free (entry_flag);
8235 free (entry_pred);
8236 bbs.release ();
8238 return bb;
8241 /* Dump default def DEF to file FILE using FLAGS and indentation
8242 SPC. */
8244 static void
8245 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
8247 for (int i = 0; i < spc; ++i)
8248 fprintf (file, " ");
8249 dump_ssaname_info_to_file (file, def, spc);
8251 print_generic_expr (file, TREE_TYPE (def), flags);
8252 fprintf (file, " ");
8253 print_generic_expr (file, def, flags);
8254 fprintf (file, " = ");
8255 print_generic_expr (file, SSA_NAME_VAR (def), flags);
8256 fprintf (file, ";\n");
8259 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
8261 static void
8262 print_no_sanitize_attr_value (FILE *file, tree value)
8264 unsigned int flags = tree_to_uhwi (value);
8265 bool first = true;
8266 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
8268 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
8270 if (!first)
8271 fprintf (file, " | ");
8272 fprintf (file, "%s", sanitizer_opts[i].name);
8273 first = false;
8278 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
8281 void
8282 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
8284 tree arg, var, old_current_fndecl = current_function_decl;
8285 struct function *dsf;
8286 bool ignore_topmost_bind = false, any_var = false;
8287 basic_block bb;
8288 tree chain;
8289 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
8290 && decl_is_tm_clone (fndecl));
8291 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
8293 tree fntype = TREE_TYPE (fndecl);
8294 tree attrs[] = { DECL_ATTRIBUTES (fndecl), TYPE_ATTRIBUTES (fntype) };
8296 for (int i = 0; i != 2; ++i)
8298 if (!attrs[i])
8299 continue;
8301 fprintf (file, "__attribute__((");
8303 bool first = true;
8304 tree chain;
8305 for (chain = attrs[i]; chain; first = false, chain = TREE_CHAIN (chain))
8307 if (!first)
8308 fprintf (file, ", ");
8310 tree name = get_attribute_name (chain);
8311 print_generic_expr (file, name, dump_flags);
8312 if (TREE_VALUE (chain) != NULL_TREE)
8314 fprintf (file, " (");
8316 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
8317 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
8318 else if (!strcmp (IDENTIFIER_POINTER (name),
8319 "omp declare variant base"))
8321 tree a = TREE_VALUE (chain);
8322 print_generic_expr (file, TREE_PURPOSE (a), dump_flags);
8323 fprintf (file, " match ");
8324 print_omp_context_selector (file, TREE_VALUE (a),
8325 dump_flags);
8327 else
8328 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
8329 fprintf (file, ")");
8333 fprintf (file, "))\n");
8336 current_function_decl = fndecl;
8337 if (flags & TDF_GIMPLE)
8339 static bool hotness_bb_param_printed = false;
8340 if (profile_info != NULL
8341 && !hotness_bb_param_printed)
8343 hotness_bb_param_printed = true;
8344 fprintf (file,
8345 "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
8346 " */\n", get_hot_bb_threshold ());
8349 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
8350 dump_flags | TDF_SLIM);
8351 fprintf (file, " __GIMPLE (%s",
8352 (fun->curr_properties & PROP_ssa) ? "ssa"
8353 : (fun->curr_properties & PROP_cfg) ? "cfg"
8354 : "");
8356 if (fun && fun->cfg)
8358 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (fun);
8359 if (bb->count.initialized_p ())
8360 fprintf (file, ",%s(%" PRIu64 ")",
8361 profile_quality_as_string (bb->count.quality ()),
8362 bb->count.value ());
8363 if (dump_flags & TDF_UID)
8364 fprintf (file, ")\n%sD_%u (", function_name (fun),
8365 DECL_UID (fndecl));
8366 else
8367 fprintf (file, ")\n%s (", function_name (fun));
8370 else
8372 print_generic_expr (file, TREE_TYPE (fntype), dump_flags);
8373 if (dump_flags & TDF_UID)
8374 fprintf (file, " %sD.%u %s(", function_name (fun), DECL_UID (fndecl),
8375 tmclone ? "[tm-clone] " : "");
8376 else
8377 fprintf (file, " %s %s(", function_name (fun),
8378 tmclone ? "[tm-clone] " : "");
8381 arg = DECL_ARGUMENTS (fndecl);
8382 while (arg)
8384 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
8385 fprintf (file, " ");
8386 print_generic_expr (file, arg, dump_flags);
8387 if (DECL_CHAIN (arg))
8388 fprintf (file, ", ");
8389 arg = DECL_CHAIN (arg);
8391 fprintf (file, ")\n");
8393 dsf = DECL_STRUCT_FUNCTION (fndecl);
8394 if (dsf && (flags & TDF_EH))
8395 dump_eh_tree (file, dsf);
8397 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
8399 dump_node (fndecl, TDF_SLIM | flags, file);
8400 current_function_decl = old_current_fndecl;
8401 return;
8404 /* When GIMPLE is lowered, the variables are no longer available in
8405 BIND_EXPRs, so display them separately. */
8406 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
8408 unsigned ix;
8409 ignore_topmost_bind = true;
8411 fprintf (file, "{\n");
8412 if (gimple_in_ssa_p (fun)
8413 && (flags & TDF_ALIAS))
8415 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
8416 arg = DECL_CHAIN (arg))
8418 tree def = ssa_default_def (fun, arg);
8419 if (def)
8420 dump_default_def (file, def, 2, flags);
8423 tree res = DECL_RESULT (fun->decl);
8424 if (res != NULL_TREE
8425 && DECL_BY_REFERENCE (res))
8427 tree def = ssa_default_def (fun, res);
8428 if (def)
8429 dump_default_def (file, def, 2, flags);
8432 tree static_chain = fun->static_chain_decl;
8433 if (static_chain != NULL_TREE)
8435 tree def = ssa_default_def (fun, static_chain);
8436 if (def)
8437 dump_default_def (file, def, 2, flags);
8441 if (!vec_safe_is_empty (fun->local_decls))
8442 FOR_EACH_LOCAL_DECL (fun, ix, var)
8444 print_generic_decl (file, var, flags);
8445 fprintf (file, "\n");
8447 any_var = true;
8450 tree name;
8452 if (gimple_in_ssa_p (fun))
8453 FOR_EACH_SSA_NAME (ix, name, fun)
8455 if (!SSA_NAME_VAR (name)
8456 /* SSA name with decls without a name still get
8457 dumped as _N, list those explicitely as well even
8458 though we've dumped the decl declaration as D.xxx
8459 above. */
8460 || !SSA_NAME_IDENTIFIER (name))
8462 fprintf (file, " ");
8463 print_generic_expr (file, TREE_TYPE (name), flags);
8464 fprintf (file, " ");
8465 print_generic_expr (file, name, flags);
8466 fprintf (file, ";\n");
8468 any_var = true;
8473 if (fun && fun->decl == fndecl
8474 && fun->cfg
8475 && basic_block_info_for_fn (fun))
8477 /* If the CFG has been built, emit a CFG-based dump. */
8478 if (!ignore_topmost_bind)
8479 fprintf (file, "{\n");
8481 if (any_var && n_basic_blocks_for_fn (fun))
8482 fprintf (file, "\n");
8484 FOR_EACH_BB_FN (bb, fun)
8485 dump_bb (file, bb, 2, flags);
8487 fprintf (file, "}\n");
8489 else if (fun && (fun->curr_properties & PROP_gimple_any))
8491 /* The function is now in GIMPLE form but the CFG has not been
8492 built yet. Emit the single sequence of GIMPLE statements
8493 that make up its body. */
8494 gimple_seq body = gimple_body (fndecl);
8496 if (gimple_seq_first_stmt (body)
8497 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8498 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8499 print_gimple_seq (file, body, 0, flags);
8500 else
8502 if (!ignore_topmost_bind)
8503 fprintf (file, "{\n");
8505 if (any_var)
8506 fprintf (file, "\n");
8508 print_gimple_seq (file, body, 2, flags);
8509 fprintf (file, "}\n");
8512 else
8514 int indent;
8516 /* Make a tree based dump. */
8517 chain = DECL_SAVED_TREE (fndecl);
8518 if (chain && TREE_CODE (chain) == BIND_EXPR)
8520 if (ignore_topmost_bind)
8522 chain = BIND_EXPR_BODY (chain);
8523 indent = 2;
8525 else
8526 indent = 0;
8528 else
8530 if (!ignore_topmost_bind)
8532 fprintf (file, "{\n");
8533 /* No topmost bind, pretend it's ignored for later. */
8534 ignore_topmost_bind = true;
8536 indent = 2;
8539 if (any_var)
8540 fprintf (file, "\n");
8542 print_generic_stmt_indented (file, chain, flags, indent);
8543 if (ignore_topmost_bind)
8544 fprintf (file, "}\n");
8547 if (flags & TDF_ENUMERATE_LOCALS)
8548 dump_enumerated_decls (file, flags);
8549 fprintf (file, "\n\n");
8551 current_function_decl = old_current_fndecl;
8554 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8556 DEBUG_FUNCTION void
8557 debug_function (tree fn, dump_flags_t flags)
8559 dump_function_to_file (fn, stderr, flags);
8563 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8565 static void
8566 print_pred_bbs (FILE *file, basic_block bb)
8568 edge e;
8569 edge_iterator ei;
8571 FOR_EACH_EDGE (e, ei, bb->preds)
8572 fprintf (file, "bb_%d ", e->src->index);
8576 /* Print on FILE the indexes for the successors of basic_block BB. */
8578 static void
8579 print_succ_bbs (FILE *file, basic_block bb)
8581 edge e;
8582 edge_iterator ei;
8584 FOR_EACH_EDGE (e, ei, bb->succs)
8585 fprintf (file, "bb_%d ", e->dest->index);
8588 /* Print to FILE the basic block BB following the VERBOSITY level. */
8590 void
8591 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8593 char *s_indent = (char *) alloca ((size_t) indent + 1);
8594 memset ((void *) s_indent, ' ', (size_t) indent);
8595 s_indent[indent] = '\0';
8597 /* Print basic_block's header. */
8598 if (verbosity >= 2)
8600 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8601 print_pred_bbs (file, bb);
8602 fprintf (file, "}, succs = {");
8603 print_succ_bbs (file, bb);
8604 fprintf (file, "})\n");
8607 /* Print basic_block's body. */
8608 if (verbosity >= 3)
8610 fprintf (file, "%s {\n", s_indent);
8611 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8612 fprintf (file, "%s }\n", s_indent);
8616 /* Print loop information. */
8618 void
8619 print_loop_info (FILE *file, const class loop *loop, const char *prefix)
8621 if (loop->can_be_parallel)
8622 fprintf (file, ", can_be_parallel");
8623 if (loop->warned_aggressive_loop_optimizations)
8624 fprintf (file, ", warned_aggressive_loop_optimizations");
8625 if (loop->dont_vectorize)
8626 fprintf (file, ", dont_vectorize");
8627 if (loop->force_vectorize)
8628 fprintf (file, ", force_vectorize");
8629 if (loop->in_oacc_kernels_region)
8630 fprintf (file, ", in_oacc_kernels_region");
8631 if (loop->finite_p)
8632 fprintf (file, ", finite_p");
8633 if (loop->unroll)
8634 fprintf (file, "\n%sunroll %d", prefix, loop->unroll);
8635 if (loop->nb_iterations)
8637 fprintf (file, "\n%sniter ", prefix);
8638 print_generic_expr (file, loop->nb_iterations);
8641 if (loop->any_upper_bound)
8643 fprintf (file, "\n%supper_bound ", prefix);
8644 print_decu (loop->nb_iterations_upper_bound, file);
8646 if (loop->any_likely_upper_bound)
8648 fprintf (file, "\n%slikely_upper_bound ", prefix);
8649 print_decu (loop->nb_iterations_likely_upper_bound, file);
8652 if (loop->any_estimate)
8654 fprintf (file, "\n%sestimate ", prefix);
8655 print_decu (loop->nb_iterations_estimate, file);
8657 bool reliable;
8658 sreal iterations;
8659 if (loop->num && expected_loop_iterations_by_profile (loop, &iterations, &reliable))
8661 fprintf (file, "\n%siterations by profile: %f (%s%s) entry count:", prefix,
8662 iterations.to_double (), reliable ? "reliable" : "unreliable",
8663 maybe_flat_loop_profile (loop) ? ", maybe flat" : "");
8664 loop_count_in (loop).dump (file, cfun);
8669 static void print_loop_and_siblings (FILE *, class loop *, int, int);
8671 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8672 VERBOSITY level this outputs the contents of the loop, or just its
8673 structure. */
8675 static void
8676 print_loop (FILE *file, class loop *loop, int indent, int verbosity)
8678 char *s_indent;
8679 basic_block bb;
8681 if (loop == NULL)
8682 return;
8684 s_indent = (char *) alloca ((size_t) indent + 1);
8685 memset ((void *) s_indent, ' ', (size_t) indent);
8686 s_indent[indent] = '\0';
8688 /* Print loop's header. */
8689 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8690 if (loop->header)
8691 fprintf (file, "header = %d", loop->header->index);
8692 else
8694 fprintf (file, "deleted)\n");
8695 return;
8697 if (loop->latch)
8698 fprintf (file, ", latch = %d", loop->latch->index);
8699 else
8700 fprintf (file, ", multiple latches");
8701 print_loop_info (file, loop, s_indent);
8702 fprintf (file, ")\n");
8704 /* Print loop's body. */
8705 if (verbosity >= 1)
8707 fprintf (file, "%s{\n", s_indent);
8708 FOR_EACH_BB_FN (bb, cfun)
8709 if (bb->loop_father == loop)
8710 print_loops_bb (file, bb, indent, verbosity);
8712 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8713 fprintf (file, "%s}\n", s_indent);
8717 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8718 spaces. Following VERBOSITY level this outputs the contents of the
8719 loop, or just its structure. */
8721 static void
8722 print_loop_and_siblings (FILE *file, class loop *loop, int indent,
8723 int verbosity)
8725 if (loop == NULL)
8726 return;
8728 print_loop (file, loop, indent, verbosity);
8729 print_loop_and_siblings (file, loop->next, indent, verbosity);
8732 /* Follow a CFG edge from the entry point of the program, and on entry
8733 of a loop, pretty print the loop structure on FILE. */
8735 void
8736 print_loops (FILE *file, int verbosity)
8738 basic_block bb;
8740 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8741 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8742 if (bb && bb->loop_father)
8743 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8746 /* Dump a loop. */
8748 DEBUG_FUNCTION void
8749 debug (class loop &ref)
8751 print_loop (stderr, &ref, 0, /*verbosity*/0);
8754 DEBUG_FUNCTION void
8755 debug (class loop *ptr)
8757 if (ptr)
8758 debug (*ptr);
8759 else
8760 fprintf (stderr, "<nil>\n");
8763 /* Dump a loop verbosely. */
8765 DEBUG_FUNCTION void
8766 debug_verbose (class loop &ref)
8768 print_loop (stderr, &ref, 0, /*verbosity*/3);
8771 DEBUG_FUNCTION void
8772 debug_verbose (class loop *ptr)
8774 if (ptr)
8775 debug (*ptr);
8776 else
8777 fprintf (stderr, "<nil>\n");
8781 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8783 DEBUG_FUNCTION void
8784 debug_loops (int verbosity)
8786 print_loops (stderr, verbosity);
8789 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8791 DEBUG_FUNCTION void
8792 debug_loop (class loop *loop, int verbosity)
8794 print_loop (stderr, loop, 0, verbosity);
8797 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8798 level. */
8800 DEBUG_FUNCTION void
8801 debug_loop_num (unsigned num, int verbosity)
8803 debug_loop (get_loop (cfun, num), verbosity);
8806 /* Return true if BB ends with a call, possibly followed by some
8807 instructions that must stay with the call. Return false,
8808 otherwise. */
8810 static bool
8811 gimple_block_ends_with_call_p (basic_block bb)
8813 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8814 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8818 /* Return true if BB ends with a conditional branch. Return false,
8819 otherwise. */
8821 static bool
8822 gimple_block_ends_with_condjump_p (const_basic_block bb)
8824 return safe_is_a <gcond *> (*gsi_last_bb (const_cast <basic_block> (bb)));
8828 /* Return true if statement T may terminate execution of BB in ways not
8829 explicitly represtented in the CFG. */
8831 bool
8832 stmt_can_terminate_bb_p (gimple *t)
8834 tree fndecl = NULL_TREE;
8835 int call_flags = 0;
8837 /* Eh exception not handled internally terminates execution of the whole
8838 function. */
8839 if (stmt_can_throw_external (cfun, t))
8840 return true;
8842 /* NORETURN and LONGJMP calls already have an edge to exit.
8843 CONST and PURE calls do not need one.
8844 We don't currently check for CONST and PURE here, although
8845 it would be a good idea, because those attributes are
8846 figured out from the RTL in mark_constant_function, and
8847 the counter incrementation code from -fprofile-arcs
8848 leads to different results from -fbranch-probabilities. */
8849 if (is_gimple_call (t))
8851 fndecl = gimple_call_fndecl (t);
8852 call_flags = gimple_call_flags (t);
8855 if (is_gimple_call (t)
8856 && fndecl
8857 && fndecl_built_in_p (fndecl)
8858 && (call_flags & ECF_NOTHROW)
8859 && !(call_flags & ECF_RETURNS_TWICE)
8860 /* fork() doesn't really return twice, but the effect of
8861 wrapping it in __gcov_fork() which calls __gcov_dump() and
8862 __gcov_reset() and clears the counters before forking has the same
8863 effect as returning twice. Force a fake edge. */
8864 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8865 return false;
8867 if (is_gimple_call (t))
8869 edge_iterator ei;
8870 edge e;
8871 basic_block bb;
8873 if (call_flags & (ECF_PURE | ECF_CONST)
8874 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8875 return false;
8877 /* Function call may do longjmp, terminate program or do other things.
8878 Special case noreturn that have non-abnormal edges out as in this case
8879 the fact is sufficiently represented by lack of edges out of T. */
8880 if (!(call_flags & ECF_NORETURN))
8881 return true;
8883 bb = gimple_bb (t);
8884 FOR_EACH_EDGE (e, ei, bb->succs)
8885 if ((e->flags & EDGE_FAKE) == 0)
8886 return true;
8889 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8890 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_basic_p (asm_stmt))
8891 return true;
8893 return false;
8897 /* Add fake edges to the function exit for any non constant and non
8898 noreturn calls (or noreturn calls with EH/abnormal edges),
8899 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8900 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8901 that were split.
8903 The goal is to expose cases in which entering a basic block does
8904 not imply that all subsequent instructions must be executed. */
8906 static int
8907 gimple_flow_call_edges_add (sbitmap blocks)
8909 int i;
8910 int blocks_split = 0;
8911 int last_bb = last_basic_block_for_fn (cfun);
8912 bool check_last_block = false;
8914 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8915 return 0;
8917 if (! blocks)
8918 check_last_block = true;
8919 else
8920 check_last_block = bitmap_bit_p (blocks,
8921 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8923 /* In the last basic block, before epilogue generation, there will be
8924 a fallthru edge to EXIT. Special care is required if the last insn
8925 of the last basic block is a call because make_edge folds duplicate
8926 edges, which would result in the fallthru edge also being marked
8927 fake, which would result in the fallthru edge being removed by
8928 remove_fake_edges, which would result in an invalid CFG.
8930 Moreover, we can't elide the outgoing fake edge, since the block
8931 profiler needs to take this into account in order to solve the minimal
8932 spanning tree in the case that the call doesn't return.
8934 Handle this by adding a dummy instruction in a new last basic block. */
8935 if (check_last_block)
8937 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8938 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8939 gimple *t = NULL;
8941 if (!gsi_end_p (gsi))
8942 t = gsi_stmt (gsi);
8944 if (t && stmt_can_terminate_bb_p (t))
8946 edge e;
8948 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8949 if (e)
8951 gsi_insert_on_edge (e, gimple_build_nop ());
8952 gsi_commit_edge_inserts ();
8957 /* Now add fake edges to the function exit for any non constant
8958 calls since there is no way that we can determine if they will
8959 return or not... */
8960 for (i = 0; i < last_bb; i++)
8962 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8963 gimple_stmt_iterator gsi;
8964 gimple *stmt, *last_stmt;
8966 if (!bb)
8967 continue;
8969 if (blocks && !bitmap_bit_p (blocks, i))
8970 continue;
8972 gsi = gsi_last_nondebug_bb (bb);
8973 if (!gsi_end_p (gsi))
8975 last_stmt = gsi_stmt (gsi);
8978 stmt = gsi_stmt (gsi);
8979 if (stmt_can_terminate_bb_p (stmt))
8981 edge e;
8983 /* The handling above of the final block before the
8984 epilogue should be enough to verify that there is
8985 no edge to the exit block in CFG already.
8986 Calling make_edge in such case would cause us to
8987 mark that edge as fake and remove it later. */
8988 if (flag_checking && stmt == last_stmt)
8990 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8991 gcc_assert (e == NULL);
8994 /* Note that the following may create a new basic block
8995 and renumber the existing basic blocks. */
8996 if (stmt != last_stmt)
8998 e = split_block (bb, stmt);
8999 if (e)
9000 blocks_split++;
9002 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
9003 e->probability = profile_probability::guessed_never ();
9005 gsi_prev (&gsi);
9007 while (!gsi_end_p (gsi));
9011 if (blocks_split)
9012 checking_verify_flow_info ();
9014 return blocks_split;
9017 /* Removes edge E and all the blocks dominated by it, and updates dominance
9018 information. The IL in E->src needs to be updated separately.
9019 If dominance info is not available, only the edge E is removed.*/
9021 void
9022 remove_edge_and_dominated_blocks (edge e)
9024 vec<basic_block> bbs_to_fix_dom = vNULL;
9025 edge f;
9026 edge_iterator ei;
9027 bool none_removed = false;
9028 unsigned i;
9029 basic_block bb, dbb;
9030 bitmap_iterator bi;
9032 /* If we are removing a path inside a non-root loop that may change
9033 loop ownership of blocks or remove loops. Mark loops for fixup. */
9034 class loop *src_loop = e->src->loop_father;
9035 if (current_loops
9036 && loop_outer (src_loop) != NULL
9037 && src_loop == e->dest->loop_father)
9039 loops_state_set (LOOPS_NEED_FIXUP);
9040 /* If we are removing a backedge clear the number of iterations
9041 and estimates. */
9042 class loop *dest_loop = e->dest->loop_father;
9043 if (e->dest == src_loop->header
9044 || (e->dest == dest_loop->header
9045 && flow_loop_nested_p (dest_loop, src_loop)))
9047 free_numbers_of_iterations_estimates (dest_loop);
9048 /* If we removed the last backedge mark the loop for removal. */
9049 FOR_EACH_EDGE (f, ei, dest_loop->header->preds)
9050 if (f != e
9051 && (f->src->loop_father == dest_loop
9052 || flow_loop_nested_p (dest_loop, f->src->loop_father)))
9053 break;
9054 if (!f)
9055 mark_loop_for_removal (dest_loop);
9059 if (!dom_info_available_p (CDI_DOMINATORS))
9061 remove_edge (e);
9062 return;
9065 /* No updating is needed for edges to exit. */
9066 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9068 if (cfgcleanup_altered_bbs)
9069 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
9070 remove_edge (e);
9071 return;
9074 /* First, we find the basic blocks to remove. If E->dest has a predecessor
9075 that is not dominated by E->dest, then this set is empty. Otherwise,
9076 all the basic blocks dominated by E->dest are removed.
9078 Also, to DF_IDOM we store the immediate dominators of the blocks in
9079 the dominance frontier of E (i.e., of the successors of the
9080 removed blocks, if there are any, and of E->dest otherwise). */
9081 FOR_EACH_EDGE (f, ei, e->dest->preds)
9083 if (f == e)
9084 continue;
9086 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
9088 none_removed = true;
9089 break;
9093 auto_bitmap df, df_idom;
9094 auto_vec<basic_block> bbs_to_remove;
9095 if (none_removed)
9096 bitmap_set_bit (df_idom,
9097 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
9098 else
9100 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
9101 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
9103 FOR_EACH_EDGE (f, ei, bb->succs)
9105 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
9106 bitmap_set_bit (df, f->dest->index);
9109 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
9110 bitmap_clear_bit (df, bb->index);
9112 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
9114 bb = BASIC_BLOCK_FOR_FN (cfun, i);
9115 bitmap_set_bit (df_idom,
9116 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
9120 if (cfgcleanup_altered_bbs)
9122 /* Record the set of the altered basic blocks. */
9123 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
9124 bitmap_ior_into (cfgcleanup_altered_bbs, df);
9127 /* Remove E and the cancelled blocks. */
9128 if (none_removed)
9129 remove_edge (e);
9130 else
9132 /* Walk backwards so as to get a chance to substitute all
9133 released DEFs into debug stmts. See
9134 eliminate_unnecessary_stmts() in tree-ssa-dce.cc for more
9135 details. */
9136 for (i = bbs_to_remove.length (); i-- > 0; )
9137 delete_basic_block (bbs_to_remove[i]);
9140 /* Update the dominance information. The immediate dominator may change only
9141 for blocks whose immediate dominator belongs to DF_IDOM:
9143 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
9144 removal. Let Z the arbitrary block such that idom(Z) = Y and
9145 Z dominates X after the removal. Before removal, there exists a path P
9146 from Y to X that avoids Z. Let F be the last edge on P that is
9147 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
9148 dominates W, and because of P, Z does not dominate W), and W belongs to
9149 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
9150 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
9152 bb = BASIC_BLOCK_FOR_FN (cfun, i);
9153 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
9154 dbb;
9155 dbb = next_dom_son (CDI_DOMINATORS, dbb))
9156 bbs_to_fix_dom.safe_push (dbb);
9159 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
9161 bbs_to_fix_dom.release ();
9164 /* Purge dead EH edges from basic block BB. */
9166 bool
9167 gimple_purge_dead_eh_edges (basic_block bb)
9169 bool changed = false;
9170 edge e;
9171 edge_iterator ei;
9172 gimple *stmt = *gsi_last_bb (bb);
9174 if (stmt && stmt_can_throw_internal (cfun, stmt))
9175 return false;
9177 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
9179 if (e->flags & EDGE_EH)
9181 remove_edge_and_dominated_blocks (e);
9182 changed = true;
9184 else
9185 ei_next (&ei);
9188 return changed;
9191 /* Purge dead EH edges from basic block listed in BLOCKS. */
9193 bool
9194 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
9196 bool changed = false;
9197 unsigned i;
9198 bitmap_iterator bi;
9200 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
9202 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
9204 /* Earlier gimple_purge_dead_eh_edges could have removed
9205 this basic block already. */
9206 gcc_assert (bb || changed);
9207 if (bb != NULL)
9208 changed |= gimple_purge_dead_eh_edges (bb);
9211 return changed;
9214 /* Purge dead abnormal call edges from basic block BB. */
9216 bool
9217 gimple_purge_dead_abnormal_call_edges (basic_block bb)
9219 bool changed = false;
9220 edge e;
9221 edge_iterator ei;
9222 gimple *stmt = *gsi_last_bb (bb);
9224 if (stmt && stmt_can_make_abnormal_goto (stmt))
9225 return false;
9227 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
9229 if (e->flags & EDGE_ABNORMAL)
9231 if (e->flags & EDGE_FALLTHRU)
9232 e->flags &= ~EDGE_ABNORMAL;
9233 else
9234 remove_edge_and_dominated_blocks (e);
9235 changed = true;
9237 else
9238 ei_next (&ei);
9241 return changed;
9244 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
9246 bool
9247 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
9249 bool changed = false;
9250 unsigned i;
9251 bitmap_iterator bi;
9253 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
9255 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
9257 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
9258 this basic block already. */
9259 gcc_assert (bb || changed);
9260 if (bb != NULL)
9261 changed |= gimple_purge_dead_abnormal_call_edges (bb);
9264 return changed;
9267 /* This function is called whenever a new edge is created or
9268 redirected. */
9270 static void
9271 gimple_execute_on_growing_pred (edge e)
9273 basic_block bb = e->dest;
9275 if (!gimple_seq_empty_p (phi_nodes (bb)))
9276 reserve_phi_args_for_new_edge (bb);
9279 /* This function is called immediately before edge E is removed from
9280 the edge vector E->dest->preds. */
9282 static void
9283 gimple_execute_on_shrinking_pred (edge e)
9285 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
9286 remove_phi_args (e);
9289 /*---------------------------------------------------------------------------
9290 Helper functions for Loop versioning
9291 ---------------------------------------------------------------------------*/
9293 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
9294 of 'first'. Both of them are dominated by 'new_head' basic block. When
9295 'new_head' was created by 'second's incoming edge it received phi arguments
9296 on the edge by split_edge(). Later, additional edge 'e' was created to
9297 connect 'new_head' and 'first'. Now this routine adds phi args on this
9298 additional edge 'e' that new_head to second edge received as part of edge
9299 splitting. */
9301 static void
9302 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
9303 basic_block new_head, edge e)
9305 gphi *phi1, *phi2;
9306 gphi_iterator psi1, psi2;
9307 tree def;
9308 edge e2 = find_edge (new_head, second);
9310 /* Because NEW_HEAD has been created by splitting SECOND's incoming
9311 edge, we should always have an edge from NEW_HEAD to SECOND. */
9312 gcc_assert (e2 != NULL);
9314 /* Browse all 'second' basic block phi nodes and add phi args to
9315 edge 'e' for 'first' head. PHI args are always in correct order. */
9317 for (psi2 = gsi_start_phis (second),
9318 psi1 = gsi_start_phis (first);
9319 !gsi_end_p (psi2) && !gsi_end_p (psi1);
9320 gsi_next (&psi2), gsi_next (&psi1))
9322 phi1 = psi1.phi ();
9323 phi2 = psi2.phi ();
9324 def = PHI_ARG_DEF (phi2, e2->dest_idx);
9325 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
9330 /* Adds a if else statement to COND_BB with condition COND_EXPR.
9331 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
9332 the destination of the ELSE part. */
9334 static void
9335 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
9336 basic_block second_head ATTRIBUTE_UNUSED,
9337 basic_block cond_bb, void *cond_e)
9339 gimple_stmt_iterator gsi;
9340 gimple *new_cond_expr;
9341 tree cond_expr = (tree) cond_e;
9342 edge e0;
9344 /* Build new conditional expr */
9345 gsi = gsi_last_bb (cond_bb);
9347 cond_expr = force_gimple_operand_gsi_1 (&gsi, cond_expr,
9348 is_gimple_condexpr_for_cond,
9349 NULL_TREE, false,
9350 GSI_CONTINUE_LINKING);
9351 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
9352 NULL_TREE, NULL_TREE);
9354 /* Add new cond in cond_bb. */
9355 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
9357 /* Adjust edges appropriately to connect new head with first head
9358 as well as second head. */
9359 e0 = single_succ_edge (cond_bb);
9360 e0->flags &= ~EDGE_FALLTHRU;
9361 e0->flags |= EDGE_FALSE_VALUE;
9365 /* Do book-keeping of basic block BB for the profile consistency checker.
9366 Store the counting in RECORD. */
9367 static void
9368 gimple_account_profile_record (basic_block bb,
9369 struct profile_record *record)
9371 gimple_stmt_iterator i;
9372 for (i = gsi_start_nondebug_after_labels_bb (bb); !gsi_end_p (i);
9373 gsi_next_nondebug (&i))
9375 record->size
9376 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
9377 if (profile_info)
9379 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().initialized_p ()
9380 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().nonzero_p ()
9381 && bb->count.ipa ().initialized_p ())
9382 record->time
9383 += estimate_num_insns (gsi_stmt (i),
9384 &eni_time_weights)
9385 * bb->count.ipa ().to_gcov_type ();
9387 else if (bb->count.initialized_p ()
9388 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.initialized_p ())
9389 record->time
9390 += estimate_num_insns
9391 (gsi_stmt (i),
9392 &eni_time_weights)
9393 * bb->count.to_sreal_scale
9394 (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count).to_double ();
9395 else
9396 record->time
9397 += estimate_num_insns (gsi_stmt (i), &eni_time_weights);
9401 struct cfg_hooks gimple_cfg_hooks = {
9402 "gimple",
9403 gimple_verify_flow_info,
9404 gimple_dump_bb, /* dump_bb */
9405 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
9406 create_bb, /* create_basic_block */
9407 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
9408 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
9409 gimple_can_remove_branch_p, /* can_remove_branch_p */
9410 remove_bb, /* delete_basic_block */
9411 gimple_split_block, /* split_block */
9412 gimple_move_block_after, /* move_block_after */
9413 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
9414 gimple_merge_blocks, /* merge_blocks */
9415 gimple_predict_edge, /* predict_edge */
9416 gimple_predicted_by_p, /* predicted_by_p */
9417 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
9418 gimple_duplicate_bb, /* duplicate_block */
9419 gimple_split_edge, /* split_edge */
9420 gimple_make_forwarder_block, /* make_forward_block */
9421 NULL, /* tidy_fallthru_edge */
9422 NULL, /* force_nonfallthru */
9423 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
9424 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
9425 gimple_flow_call_edges_add, /* flow_call_edges_add */
9426 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
9427 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
9428 gimple_duplicate_loop_body_to_header_edge, /* duplicate loop for trees */
9429 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
9430 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
9431 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
9432 flush_pending_stmts, /* flush_pending_stmts */
9433 gimple_empty_block_p, /* block_empty_p */
9434 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
9435 gimple_account_profile_record,
9439 /* Split all critical edges. Split some extra (not necessarily critical) edges
9440 if FOR_EDGE_INSERTION_P is true. */
9442 unsigned int
9443 split_critical_edges (bool for_edge_insertion_p /* = false */)
9445 basic_block bb;
9446 edge e;
9447 edge_iterator ei;
9449 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
9450 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
9451 mappings around the calls to split_edge. */
9452 start_recording_case_labels ();
9453 FOR_ALL_BB_FN (bb, cfun)
9455 FOR_EACH_EDGE (e, ei, bb->succs)
9457 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
9458 split_edge (e);
9459 /* PRE inserts statements to edges and expects that
9460 since split_critical_edges was done beforehand, committing edge
9461 insertions will not split more edges. In addition to critical
9462 edges we must split edges that have multiple successors and
9463 end by control flow statements, such as RESX.
9464 Go ahead and split them too. This matches the logic in
9465 gimple_find_edge_insert_loc. */
9466 else if (for_edge_insertion_p
9467 && (!single_pred_p (e->dest)
9468 || !gimple_seq_empty_p (phi_nodes (e->dest))
9469 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9470 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
9471 && !(e->flags & EDGE_ABNORMAL))
9473 gimple_stmt_iterator gsi;
9475 gsi = gsi_last_bb (e->src);
9476 if (!gsi_end_p (gsi)
9477 && stmt_ends_bb_p (gsi_stmt (gsi))
9478 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
9479 && !gimple_call_builtin_p (gsi_stmt (gsi),
9480 BUILT_IN_RETURN)))
9481 split_edge (e);
9485 end_recording_case_labels ();
9486 return 0;
9489 namespace {
9491 const pass_data pass_data_split_crit_edges =
9493 GIMPLE_PASS, /* type */
9494 "crited", /* name */
9495 OPTGROUP_NONE, /* optinfo_flags */
9496 TV_TREE_SPLIT_EDGES, /* tv_id */
9497 PROP_cfg, /* properties_required */
9498 PROP_no_crit_edges, /* properties_provided */
9499 0, /* properties_destroyed */
9500 0, /* todo_flags_start */
9501 0, /* todo_flags_finish */
9504 class pass_split_crit_edges : public gimple_opt_pass
9506 public:
9507 pass_split_crit_edges (gcc::context *ctxt)
9508 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9511 /* opt_pass methods: */
9512 unsigned int execute (function *) final override
9514 return split_critical_edges ();
9517 opt_pass * clone () final override
9519 return new pass_split_crit_edges (m_ctxt);
9521 }; // class pass_split_crit_edges
9523 } // anon namespace
9525 gimple_opt_pass *
9526 make_pass_split_crit_edges (gcc::context *ctxt)
9528 return new pass_split_crit_edges (ctxt);
9532 /* Insert COND expression which is GIMPLE_COND after STMT
9533 in basic block BB with appropriate basic block split
9534 and creation of a new conditionally executed basic block.
9535 Update profile so the new bb is visited with probability PROB.
9536 Return created basic block. */
9537 basic_block
9538 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9539 profile_probability prob)
9541 edge fall = split_block (bb, stmt);
9542 gimple_stmt_iterator iter = gsi_last_bb (bb);
9543 basic_block new_bb;
9545 /* Insert cond statement. */
9546 gcc_assert (gimple_code (cond) == GIMPLE_COND);
9547 if (gsi_end_p (iter))
9548 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9549 else
9550 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9552 /* Create conditionally executed block. */
9553 new_bb = create_empty_bb (bb);
9554 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9555 e->probability = prob;
9556 new_bb->count = e->count ();
9557 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9559 /* Fix edge for split bb. */
9560 fall->flags = EDGE_FALSE_VALUE;
9561 fall->probability -= e->probability;
9563 /* Update dominance info. */
9564 if (dom_info_available_p (CDI_DOMINATORS))
9566 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9567 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9570 /* Update loop info. */
9571 if (current_loops)
9572 add_bb_to_loop (new_bb, bb->loop_father);
9574 return new_bb;
9579 /* Given a basic block B which ends with a conditional and has
9580 precisely two successors, determine which of the edges is taken if
9581 the conditional is true and which is taken if the conditional is
9582 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9584 void
9585 extract_true_false_edges_from_block (basic_block b,
9586 edge *true_edge,
9587 edge *false_edge)
9589 edge e = EDGE_SUCC (b, 0);
9591 if (e->flags & EDGE_TRUE_VALUE)
9593 *true_edge = e;
9594 *false_edge = EDGE_SUCC (b, 1);
9596 else
9598 *false_edge = e;
9599 *true_edge = EDGE_SUCC (b, 1);
9604 /* From a controlling predicate in the immediate dominator DOM of
9605 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9606 predicate evaluates to true and false and store them to
9607 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9608 they are non-NULL. Returns true if the edges can be determined,
9609 else return false. */
9611 bool
9612 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9613 edge *true_controlled_edge,
9614 edge *false_controlled_edge)
9616 basic_block bb = phiblock;
9617 edge true_edge, false_edge, tem;
9618 edge e0 = NULL, e1 = NULL;
9620 /* We have to verify that one edge into the PHI node is dominated
9621 by the true edge of the predicate block and the other edge
9622 dominated by the false edge. This ensures that the PHI argument
9623 we are going to take is completely determined by the path we
9624 take from the predicate block.
9625 We can only use BB dominance checks below if the destination of
9626 the true/false edges are dominated by their edge, thus only
9627 have a single predecessor. */
9628 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9629 tem = EDGE_PRED (bb, 0);
9630 if (tem == true_edge
9631 || (single_pred_p (true_edge->dest)
9632 && (tem->src == true_edge->dest
9633 || dominated_by_p (CDI_DOMINATORS,
9634 tem->src, true_edge->dest))))
9635 e0 = tem;
9636 else if (tem == false_edge
9637 || (single_pred_p (false_edge->dest)
9638 && (tem->src == false_edge->dest
9639 || dominated_by_p (CDI_DOMINATORS,
9640 tem->src, false_edge->dest))))
9641 e1 = tem;
9642 else
9643 return false;
9644 tem = EDGE_PRED (bb, 1);
9645 if (tem == true_edge
9646 || (single_pred_p (true_edge->dest)
9647 && (tem->src == true_edge->dest
9648 || dominated_by_p (CDI_DOMINATORS,
9649 tem->src, true_edge->dest))))
9650 e0 = tem;
9651 else if (tem == false_edge
9652 || (single_pred_p (false_edge->dest)
9653 && (tem->src == false_edge->dest
9654 || dominated_by_p (CDI_DOMINATORS,
9655 tem->src, false_edge->dest))))
9656 e1 = tem;
9657 else
9658 return false;
9659 if (!e0 || !e1)
9660 return false;
9662 if (true_controlled_edge)
9663 *true_controlled_edge = e0;
9664 if (false_controlled_edge)
9665 *false_controlled_edge = e1;
9667 return true;
9670 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9671 range [low, high]. Place associated stmts before *GSI. */
9673 void
9674 generate_range_test (basic_block bb, tree index, tree low, tree high,
9675 tree *lhs, tree *rhs)
9677 tree type = TREE_TYPE (index);
9678 tree utype = range_check_type (type);
9680 low = fold_convert (utype, low);
9681 high = fold_convert (utype, high);
9683 gimple_seq seq = NULL;
9684 index = gimple_convert (&seq, utype, index);
9685 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9686 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9688 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9689 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9692 /* Return the basic block that belongs to label numbered INDEX
9693 of a switch statement. */
9695 basic_block
9696 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9698 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9701 /* Return the default basic block of a switch statement. */
9703 basic_block
9704 gimple_switch_default_bb (function *ifun, gswitch *gs)
9706 return gimple_switch_label_bb (ifun, gs, 0);
9709 /* Return the edge that belongs to label numbered INDEX
9710 of a switch statement. */
9712 edge
9713 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9715 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9718 /* Return the default edge of a switch statement. */
9720 edge
9721 gimple_switch_default_edge (function *ifun, gswitch *gs)
9723 return gimple_switch_edge (ifun, gs, 0);
9726 /* Return true if the only executable statement in BB is a GIMPLE_COND. */
9728 bool
9729 cond_only_block_p (basic_block bb)
9731 /* BB must have no executable statements. */
9732 gimple_stmt_iterator gsi = gsi_after_labels (bb);
9733 if (phi_nodes (bb))
9734 return false;
9735 while (!gsi_end_p (gsi))
9737 gimple *stmt = gsi_stmt (gsi);
9738 if (is_gimple_debug (stmt))
9740 else if (gimple_code (stmt) == GIMPLE_NOP
9741 || gimple_code (stmt) == GIMPLE_PREDICT
9742 || gimple_code (stmt) == GIMPLE_COND)
9744 else
9745 return false;
9746 gsi_next (&gsi);
9748 return true;
9752 /* Emit return warnings. */
9754 namespace {
9756 const pass_data pass_data_warn_function_return =
9758 GIMPLE_PASS, /* type */
9759 "*warn_function_return", /* name */
9760 OPTGROUP_NONE, /* optinfo_flags */
9761 TV_NONE, /* tv_id */
9762 PROP_cfg, /* properties_required */
9763 0, /* properties_provided */
9764 0, /* properties_destroyed */
9765 0, /* todo_flags_start */
9766 0, /* todo_flags_finish */
9769 class pass_warn_function_return : public gimple_opt_pass
9771 public:
9772 pass_warn_function_return (gcc::context *ctxt)
9773 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9776 /* opt_pass methods: */
9777 unsigned int execute (function *) final override;
9779 }; // class pass_warn_function_return
9781 unsigned int
9782 pass_warn_function_return::execute (function *fun)
9784 location_t location;
9785 gimple *last;
9786 edge e;
9787 edge_iterator ei;
9789 if (!targetm.warn_func_return (fun->decl))
9790 return 0;
9792 /* If we have a path to EXIT, then we do return. */
9793 if (TREE_THIS_VOLATILE (fun->decl)
9794 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9796 location = UNKNOWN_LOCATION;
9797 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9798 (e = ei_safe_edge (ei)); )
9800 last = *gsi_last_bb (e->src);
9801 if ((gimple_code (last) == GIMPLE_RETURN
9802 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9803 && location == UNKNOWN_LOCATION
9804 && ((location = LOCATION_LOCUS (gimple_location (last)))
9805 != UNKNOWN_LOCATION)
9806 && !optimize)
9807 break;
9808 /* When optimizing, replace return stmts in noreturn functions
9809 with __builtin_unreachable () call. */
9810 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9812 location_t loc = gimple_location (last);
9813 gimple *new_stmt = gimple_build_builtin_unreachable (loc);
9814 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9815 gsi_replace (&gsi, new_stmt, true);
9816 remove_edge (e);
9818 else
9819 ei_next (&ei);
9821 if (location == UNKNOWN_LOCATION)
9822 location = cfun->function_end_locus;
9823 warning_at (location, 0, "%<noreturn%> function does return");
9826 /* If we see "return;" in some basic block, then we do reach the end
9827 without returning a value. */
9828 else if (warn_return_type > 0
9829 && !warning_suppressed_p (fun->decl, OPT_Wreturn_type)
9830 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9832 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9834 greturn *return_stmt = dyn_cast <greturn *> (*gsi_last_bb (e->src));
9835 if (return_stmt
9836 && gimple_return_retval (return_stmt) == NULL
9837 && !warning_suppressed_p (return_stmt, OPT_Wreturn_type))
9839 location = gimple_location (return_stmt);
9840 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9841 location = fun->function_end_locus;
9842 if (warning_at (location, OPT_Wreturn_type,
9843 "control reaches end of non-void function"))
9844 suppress_warning (fun->decl, OPT_Wreturn_type);
9845 break;
9848 /* The C++ FE turns fallthrough from the end of non-void function
9849 into __builtin_unreachable () call with BUILTINS_LOCATION.
9850 Recognize those as well as calls from ubsan_instrument_return. */
9851 basic_block bb;
9852 if (!warning_suppressed_p (fun->decl, OPT_Wreturn_type))
9853 FOR_EACH_BB_FN (bb, fun)
9854 if (EDGE_COUNT (bb->succs) == 0)
9856 gimple *last = *gsi_last_bb (bb);
9857 const enum built_in_function ubsan_missing_ret
9858 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9859 if (last
9860 && ((LOCATION_LOCUS (gimple_location (last))
9861 == BUILTINS_LOCATION
9862 && (gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE)
9863 || gimple_call_builtin_p (last,
9864 BUILT_IN_UNREACHABLE_TRAP)
9865 || gimple_call_builtin_p (last, BUILT_IN_TRAP)))
9866 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9868 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9869 gsi_prev_nondebug (&gsi);
9870 gimple *prev = gsi_stmt (gsi);
9871 if (prev == NULL)
9872 location = UNKNOWN_LOCATION;
9873 else
9874 location = gimple_location (prev);
9875 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9876 location = fun->function_end_locus;
9877 if (warning_at (location, OPT_Wreturn_type,
9878 "control reaches end of non-void function"))
9879 suppress_warning (fun->decl, OPT_Wreturn_type);
9880 break;
9884 return 0;
9887 } // anon namespace
9889 gimple_opt_pass *
9890 make_pass_warn_function_return (gcc::context *ctxt)
9892 return new pass_warn_function_return (ctxt);
9895 /* Walk a gimplified function and warn for functions whose return value is
9896 ignored and attribute((warn_unused_result)) is set. This is done before
9897 inlining, so we don't have to worry about that. */
9899 static void
9900 do_warn_unused_result (gimple_seq seq)
9902 tree fdecl, ftype;
9903 gimple_stmt_iterator i;
9905 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9907 gimple *g = gsi_stmt (i);
9909 switch (gimple_code (g))
9911 case GIMPLE_BIND:
9912 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9913 break;
9914 case GIMPLE_TRY:
9915 do_warn_unused_result (gimple_try_eval (g));
9916 do_warn_unused_result (gimple_try_cleanup (g));
9917 break;
9918 case GIMPLE_CATCH:
9919 do_warn_unused_result (gimple_catch_handler (
9920 as_a <gcatch *> (g)));
9921 break;
9922 case GIMPLE_EH_FILTER:
9923 do_warn_unused_result (gimple_eh_filter_failure (g));
9924 break;
9926 case GIMPLE_CALL:
9927 if (gimple_call_lhs (g))
9928 break;
9929 if (gimple_call_internal_p (g))
9930 break;
9932 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9933 LHS. All calls whose value is ignored should be
9934 represented like this. Look for the attribute. */
9935 fdecl = gimple_call_fndecl (g);
9936 ftype = gimple_call_fntype (g);
9938 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9940 location_t loc = gimple_location (g);
9942 if (fdecl)
9943 warning_at (loc, OPT_Wunused_result,
9944 "ignoring return value of %qD "
9945 "declared with attribute %<warn_unused_result%>",
9946 fdecl);
9947 else
9948 warning_at (loc, OPT_Wunused_result,
9949 "ignoring return value of function "
9950 "declared with attribute %<warn_unused_result%>");
9952 break;
9954 default:
9955 /* Not a container, not a call, or a call whose value is used. */
9956 break;
9961 namespace {
9963 const pass_data pass_data_warn_unused_result =
9965 GIMPLE_PASS, /* type */
9966 "*warn_unused_result", /* name */
9967 OPTGROUP_NONE, /* optinfo_flags */
9968 TV_NONE, /* tv_id */
9969 PROP_gimple_any, /* properties_required */
9970 0, /* properties_provided */
9971 0, /* properties_destroyed */
9972 0, /* todo_flags_start */
9973 0, /* todo_flags_finish */
9976 class pass_warn_unused_result : public gimple_opt_pass
9978 public:
9979 pass_warn_unused_result (gcc::context *ctxt)
9980 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9983 /* opt_pass methods: */
9984 bool gate (function *) final override { return flag_warn_unused_result; }
9985 unsigned int execute (function *) final override
9987 do_warn_unused_result (gimple_body (current_function_decl));
9988 return 0;
9991 }; // class pass_warn_unused_result
9993 } // anon namespace
9995 gimple_opt_pass *
9996 make_pass_warn_unused_result (gcc::context *ctxt)
9998 return new pass_warn_unused_result (ctxt);
10001 /* Maybe Remove stores to variables we marked write-only.
10002 Return true if a store was removed. */
10003 static bool
10004 maybe_remove_writeonly_store (gimple_stmt_iterator &gsi, gimple *stmt,
10005 bitmap dce_ssa_names)
10007 /* Keep access when store has side effect, i.e. in case when source
10008 is volatile. */
10009 if (!gimple_store_p (stmt)
10010 || gimple_has_side_effects (stmt)
10011 || optimize_debug)
10012 return false;
10014 tree lhs = get_base_address (gimple_get_lhs (stmt));
10016 if (!VAR_P (lhs)
10017 || (!TREE_STATIC (lhs) && !DECL_EXTERNAL (lhs))
10018 || !varpool_node::get (lhs)->writeonly)
10019 return false;
10021 if (dump_file && (dump_flags & TDF_DETAILS))
10023 fprintf (dump_file, "Removing statement, writes"
10024 " to write only var:\n");
10025 print_gimple_stmt (dump_file, stmt, 0,
10026 TDF_VOPS|TDF_MEMSYMS);
10029 /* Mark ssa name defining to be checked for simple dce. */
10030 if (gimple_assign_single_p (stmt))
10032 tree rhs = gimple_assign_rhs1 (stmt);
10033 if (TREE_CODE (rhs) == SSA_NAME
10034 && !SSA_NAME_IS_DEFAULT_DEF (rhs))
10035 bitmap_set_bit (dce_ssa_names, SSA_NAME_VERSION (rhs));
10037 unlink_stmt_vdef (stmt);
10038 gsi_remove (&gsi, true);
10039 release_defs (stmt);
10040 return true;
10043 /* IPA passes, compilation of earlier functions or inlining
10044 might have changed some properties, such as marked functions nothrow,
10045 pure, const or noreturn.
10046 Remove redundant edges and basic blocks, and create new ones if necessary. */
10048 unsigned int
10049 execute_fixup_cfg (void)
10051 basic_block bb;
10052 gimple_stmt_iterator gsi;
10053 int todo = 0;
10054 cgraph_node *node = cgraph_node::get (current_function_decl);
10055 /* Same scaling is also done by ipa_merge_profiles. */
10056 profile_count num = node->count;
10057 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
10058 bool scale = num.initialized_p () && !(num == den);
10059 auto_bitmap dce_ssa_names;
10061 if (scale)
10063 profile_count::adjust_for_ipa_scaling (&num, &den);
10064 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
10065 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
10066 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
10069 FOR_EACH_BB_FN (bb, cfun)
10071 if (scale)
10072 bb->count = bb->count.apply_scale (num, den);
10073 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
10075 gimple *stmt = gsi_stmt (gsi);
10076 tree decl = is_gimple_call (stmt)
10077 ? gimple_call_fndecl (stmt)
10078 : NULL;
10079 if (decl)
10081 int flags = gimple_call_flags (stmt);
10082 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
10084 if (gimple_in_ssa_p (cfun))
10086 todo |= TODO_update_ssa | TODO_cleanup_cfg;
10087 update_stmt (stmt);
10090 if (flags & ECF_NORETURN
10091 && fixup_noreturn_call (stmt))
10092 todo |= TODO_cleanup_cfg;
10095 /* Remove stores to variables we marked write-only. */
10096 if (maybe_remove_writeonly_store (gsi, stmt, dce_ssa_names))
10098 todo |= TODO_update_ssa | TODO_cleanup_cfg;
10099 continue;
10102 /* For calls we can simply remove LHS when it is known
10103 to be write-only. */
10104 if (is_gimple_call (stmt)
10105 && gimple_get_lhs (stmt))
10107 tree lhs = get_base_address (gimple_get_lhs (stmt));
10109 if (VAR_P (lhs)
10110 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
10111 && varpool_node::get (lhs)->writeonly)
10113 gimple_call_set_lhs (stmt, NULL);
10114 update_stmt (stmt);
10115 todo |= TODO_update_ssa | TODO_cleanup_cfg;
10119 gsi_next (&gsi);
10121 if (gimple *last = *gsi_last_bb (bb))
10123 if (maybe_clean_eh_stmt (last)
10124 && gimple_purge_dead_eh_edges (bb))
10125 todo |= TODO_cleanup_cfg;
10126 if (gimple_purge_dead_abnormal_call_edges (bb))
10127 todo |= TODO_cleanup_cfg;
10130 /* If we have a basic block with no successors that does not
10131 end with a control statement or a noreturn call end it with
10132 a call to __builtin_unreachable. This situation can occur
10133 when inlining a noreturn call that does in fact return. */
10134 if (EDGE_COUNT (bb->succs) == 0)
10136 gimple *stmt = last_nondebug_stmt (bb);
10137 if (!stmt
10138 || (!is_ctrl_stmt (stmt)
10139 && (!is_gimple_call (stmt)
10140 || !gimple_call_noreturn_p (stmt))))
10142 if (stmt && is_gimple_call (stmt))
10143 gimple_call_set_ctrl_altering (stmt, false);
10144 stmt = gimple_build_builtin_unreachable (UNKNOWN_LOCATION);
10145 gimple_stmt_iterator gsi = gsi_last_bb (bb);
10146 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
10147 if (!cfun->after_inlining)
10148 if (tree fndecl = gimple_call_fndecl (stmt))
10150 gcall *call_stmt = dyn_cast <gcall *> (stmt);
10151 node->create_edge (cgraph_node::get_create (fndecl),
10152 call_stmt, bb->count);
10157 if (scale)
10159 update_max_bb_count ();
10160 compute_function_frequency ();
10163 if (current_loops
10164 && (todo & TODO_cleanup_cfg))
10165 loops_state_set (LOOPS_NEED_FIXUP);
10167 simple_dce_from_worklist (dce_ssa_names);
10169 return todo;
10172 namespace {
10174 const pass_data pass_data_fixup_cfg =
10176 GIMPLE_PASS, /* type */
10177 "fixup_cfg", /* name */
10178 OPTGROUP_NONE, /* optinfo_flags */
10179 TV_NONE, /* tv_id */
10180 PROP_cfg, /* properties_required */
10181 0, /* properties_provided */
10182 0, /* properties_destroyed */
10183 0, /* todo_flags_start */
10184 0, /* todo_flags_finish */
10187 class pass_fixup_cfg : public gimple_opt_pass
10189 public:
10190 pass_fixup_cfg (gcc::context *ctxt)
10191 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
10194 /* opt_pass methods: */
10195 opt_pass * clone () final override { return new pass_fixup_cfg (m_ctxt); }
10196 unsigned int execute (function *) final override
10198 return execute_fixup_cfg ();
10201 }; // class pass_fixup_cfg
10203 } // anon namespace
10205 gimple_opt_pass *
10206 make_pass_fixup_cfg (gcc::context *ctxt)
10208 return new pass_fixup_cfg (ctxt);
10211 /* Garbage collection support for edge_def. */
10213 extern void gt_ggc_mx (tree&);
10214 extern void gt_ggc_mx (gimple *&);
10215 extern void gt_ggc_mx (rtx&);
10216 extern void gt_ggc_mx (basic_block&);
10218 static void
10219 gt_ggc_mx (rtx_insn *& x)
10221 if (x)
10222 gt_ggc_mx_rtx_def ((void *) x);
10225 void
10226 gt_ggc_mx (edge_def *e)
10228 tree block = LOCATION_BLOCK (e->goto_locus);
10229 gt_ggc_mx (e->src);
10230 gt_ggc_mx (e->dest);
10231 if (current_ir_type () == IR_GIMPLE)
10232 gt_ggc_mx (e->insns.g);
10233 else
10234 gt_ggc_mx (e->insns.r);
10235 gt_ggc_mx (block);
10238 /* PCH support for edge_def. */
10240 extern void gt_pch_nx (tree&);
10241 extern void gt_pch_nx (gimple *&);
10242 extern void gt_pch_nx (rtx&);
10243 extern void gt_pch_nx (basic_block&);
10245 static void
10246 gt_pch_nx (rtx_insn *& x)
10248 if (x)
10249 gt_pch_nx_rtx_def ((void *) x);
10252 void
10253 gt_pch_nx (edge_def *e)
10255 tree block = LOCATION_BLOCK (e->goto_locus);
10256 gt_pch_nx (e->src);
10257 gt_pch_nx (e->dest);
10258 if (current_ir_type () == IR_GIMPLE)
10259 gt_pch_nx (e->insns.g);
10260 else
10261 gt_pch_nx (e->insns.r);
10262 gt_pch_nx (block);
10265 void
10266 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
10268 tree block = LOCATION_BLOCK (e->goto_locus);
10269 op (&(e->src), NULL, cookie);
10270 op (&(e->dest), NULL, cookie);
10271 if (current_ir_type () == IR_GIMPLE)
10272 op (&(e->insns.g), NULL, cookie);
10273 else
10274 op (&(e->insns.r), NULL, cookie);
10275 op (&(block), &(block), cookie);
10278 #if CHECKING_P
10280 namespace selftest {
10282 /* Helper function for CFG selftests: create a dummy function decl
10283 and push it as cfun. */
10285 static tree
10286 push_fndecl (const char *name)
10288 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
10289 /* FIXME: this uses input_location: */
10290 tree fndecl = build_fn_decl (name, fn_type);
10291 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
10292 NULL_TREE, integer_type_node);
10293 DECL_RESULT (fndecl) = retval;
10294 push_struct_function (fndecl);
10295 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10296 ASSERT_TRUE (fun != NULL);
10297 init_empty_tree_cfg_for_function (fun);
10298 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
10299 ASSERT_EQ (0, n_edges_for_fn (fun));
10300 return fndecl;
10303 /* These tests directly create CFGs.
10304 Compare with the static fns within tree-cfg.cc:
10305 - build_gimple_cfg
10306 - make_blocks: calls create_basic_block (seq, bb);
10307 - make_edges. */
10309 /* Verify a simple cfg of the form:
10310 ENTRY -> A -> B -> C -> EXIT. */
10312 static void
10313 test_linear_chain ()
10315 gimple_register_cfg_hooks ();
10317 tree fndecl = push_fndecl ("cfg_test_linear_chain");
10318 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10320 /* Create some empty blocks. */
10321 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
10322 basic_block bb_b = create_empty_bb (bb_a);
10323 basic_block bb_c = create_empty_bb (bb_b);
10325 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
10326 ASSERT_EQ (0, n_edges_for_fn (fun));
10328 /* Create some edges: a simple linear chain of BBs. */
10329 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
10330 make_edge (bb_a, bb_b, 0);
10331 make_edge (bb_b, bb_c, 0);
10332 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10334 /* Verify the edges. */
10335 ASSERT_EQ (4, n_edges_for_fn (fun));
10336 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
10337 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
10338 ASSERT_EQ (1, bb_a->preds->length ());
10339 ASSERT_EQ (1, bb_a->succs->length ());
10340 ASSERT_EQ (1, bb_b->preds->length ());
10341 ASSERT_EQ (1, bb_b->succs->length ());
10342 ASSERT_EQ (1, bb_c->preds->length ());
10343 ASSERT_EQ (1, bb_c->succs->length ());
10344 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
10345 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
10347 /* Verify the dominance information
10348 Each BB in our simple chain should be dominated by the one before
10349 it. */
10350 calculate_dominance_info (CDI_DOMINATORS);
10351 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
10352 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
10353 auto_vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10354 ASSERT_EQ (1, dom_by_b.length ());
10355 ASSERT_EQ (bb_c, dom_by_b[0]);
10356 free_dominance_info (CDI_DOMINATORS);
10358 /* Similarly for post-dominance: each BB in our chain is post-dominated
10359 by the one after it. */
10360 calculate_dominance_info (CDI_POST_DOMINATORS);
10361 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10362 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10363 auto_vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10364 ASSERT_EQ (1, postdom_by_b.length ());
10365 ASSERT_EQ (bb_a, postdom_by_b[0]);
10366 free_dominance_info (CDI_POST_DOMINATORS);
10368 pop_cfun ();
10371 /* Verify a simple CFG of the form:
10372 ENTRY
10376 /t \f
10382 EXIT. */
10384 static void
10385 test_diamond ()
10387 gimple_register_cfg_hooks ();
10389 tree fndecl = push_fndecl ("cfg_test_diamond");
10390 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10392 /* Create some empty blocks. */
10393 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
10394 basic_block bb_b = create_empty_bb (bb_a);
10395 basic_block bb_c = create_empty_bb (bb_a);
10396 basic_block bb_d = create_empty_bb (bb_b);
10398 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
10399 ASSERT_EQ (0, n_edges_for_fn (fun));
10401 /* Create the edges. */
10402 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
10403 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
10404 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
10405 make_edge (bb_b, bb_d, 0);
10406 make_edge (bb_c, bb_d, 0);
10407 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10409 /* Verify the edges. */
10410 ASSERT_EQ (6, n_edges_for_fn (fun));
10411 ASSERT_EQ (1, bb_a->preds->length ());
10412 ASSERT_EQ (2, bb_a->succs->length ());
10413 ASSERT_EQ (1, bb_b->preds->length ());
10414 ASSERT_EQ (1, bb_b->succs->length ());
10415 ASSERT_EQ (1, bb_c->preds->length ());
10416 ASSERT_EQ (1, bb_c->succs->length ());
10417 ASSERT_EQ (2, bb_d->preds->length ());
10418 ASSERT_EQ (1, bb_d->succs->length ());
10420 /* Verify the dominance information. */
10421 calculate_dominance_info (CDI_DOMINATORS);
10422 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
10423 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
10424 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
10425 auto_vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
10426 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
10427 dom_by_a.release ();
10428 auto_vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10429 ASSERT_EQ (0, dom_by_b.length ());
10430 dom_by_b.release ();
10431 free_dominance_info (CDI_DOMINATORS);
10433 /* Similarly for post-dominance. */
10434 calculate_dominance_info (CDI_POST_DOMINATORS);
10435 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10436 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10437 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
10438 auto_vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
10439 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
10440 postdom_by_d.release ();
10441 auto_vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10442 ASSERT_EQ (0, postdom_by_b.length ());
10443 postdom_by_b.release ();
10444 free_dominance_info (CDI_POST_DOMINATORS);
10446 pop_cfun ();
10449 /* Verify that we can handle a CFG containing a "complete" aka
10450 fully-connected subgraph (where A B C D below all have edges
10451 pointing to each other node, also to themselves).
10452 e.g.:
10453 ENTRY EXIT
10459 A<--->B
10460 ^^ ^^
10461 | \ / |
10462 | X |
10463 | / \ |
10464 VV VV
10465 C<--->D
10468 static void
10469 test_fully_connected ()
10471 gimple_register_cfg_hooks ();
10473 tree fndecl = push_fndecl ("cfg_fully_connected");
10474 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10476 const int n = 4;
10478 /* Create some empty blocks. */
10479 auto_vec <basic_block> subgraph_nodes;
10480 for (int i = 0; i < n; i++)
10481 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
10483 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
10484 ASSERT_EQ (0, n_edges_for_fn (fun));
10486 /* Create the edges. */
10487 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
10488 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10489 for (int i = 0; i < n; i++)
10490 for (int j = 0; j < n; j++)
10491 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
10493 /* Verify the edges. */
10494 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
10495 /* The first one is linked to ENTRY/EXIT as well as itself and
10496 everything else. */
10497 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
10498 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
10499 /* The other ones in the subgraph are linked to everything in
10500 the subgraph (including themselves). */
10501 for (int i = 1; i < n; i++)
10503 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
10504 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
10507 /* Verify the dominance information. */
10508 calculate_dominance_info (CDI_DOMINATORS);
10509 /* The initial block in the subgraph should be dominated by ENTRY. */
10510 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
10511 get_immediate_dominator (CDI_DOMINATORS,
10512 subgraph_nodes[0]));
10513 /* Every other block in the subgraph should be dominated by the
10514 initial block. */
10515 for (int i = 1; i < n; i++)
10516 ASSERT_EQ (subgraph_nodes[0],
10517 get_immediate_dominator (CDI_DOMINATORS,
10518 subgraph_nodes[i]));
10519 free_dominance_info (CDI_DOMINATORS);
10521 /* Similarly for post-dominance. */
10522 calculate_dominance_info (CDI_POST_DOMINATORS);
10523 /* The initial block in the subgraph should be postdominated by EXIT. */
10524 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
10525 get_immediate_dominator (CDI_POST_DOMINATORS,
10526 subgraph_nodes[0]));
10527 /* Every other block in the subgraph should be postdominated by the
10528 initial block, since that leads to EXIT. */
10529 for (int i = 1; i < n; i++)
10530 ASSERT_EQ (subgraph_nodes[0],
10531 get_immediate_dominator (CDI_POST_DOMINATORS,
10532 subgraph_nodes[i]));
10533 free_dominance_info (CDI_POST_DOMINATORS);
10535 pop_cfun ();
10538 /* Run all of the selftests within this file. */
10540 void
10541 tree_cfg_cc_tests ()
10543 test_linear_chain ();
10544 test_diamond ();
10545 test_fully_connected ();
10548 } // namespace selftest
10550 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10551 - loop
10552 - nested loops
10553 - switch statement (a block with many out-edges)
10554 - something that jumps to itself
10555 - etc */
10557 #endif /* CHECKING_P */