1 /* Convert a program in SSA form into Normal form.
2 Copyright (C) 2004-2025 Free Software Foundation, Inc.
3 Contributed by Andrew Macleod <amacleod@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
36 #include "stor-layout.h"
40 #include "gimple-iterator.h"
43 #include "tree-ssa-live.h"
44 #include "tree-ssa-ter.h"
45 #include "tree-ssa-coalesce.h"
46 #include "tree-outof-ssa.h"
48 #include "internal-fn.h"
50 /* FIXME: A lot of code here deals with expanding to RTL. All that code
51 should be in cfgexpand.cc. */
55 /* Return TRUE if expression STMT is suitable for replacement. */
58 ssa_is_replaceable_p (gimple
*stmt
)
64 /* Only consider modify stmts and direct internal fn calls that are
65 not also tail-calls. */
67 if (!is_gimple_assign (stmt
)
68 && (!(call
= dyn_cast
<gcall
*> (stmt
))
69 || gimple_call_tail_p (call
)
70 || !gimple_call_internal_p (call
)
71 || !direct_internal_fn_p (gimple_call_internal_fn (call
))))
74 /* If the statement may throw an exception, it cannot be replaced. */
75 if (stmt_could_throw_p (cfun
, stmt
))
78 /* Punt if there is more than 1 def. */
79 def
= SINGLE_SSA_TREE_OPERAND (stmt
, SSA_OP_DEF
);
83 /* Only consider definitions which have a single use. */
84 if (!single_imm_use (def
, &use_p
, &use_stmt
))
87 /* Used in this block, but at the TOP of the block, not the end. */
88 if (gimple_code (use_stmt
) == GIMPLE_PHI
)
91 /* There must be no VDEFs. */
92 if (gimple_vdef (stmt
))
95 /* Float expressions must go through memory if float-store is on. */
97 && FLOAT_TYPE_P (TREE_TYPE (def
)))
100 /* An assignment with a register variable on the RHS is not
102 if (is_gimple_assign (stmt
)
103 && gimple_assign_rhs_code (stmt
) == VAR_DECL
104 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt
)))
107 /* Leave any stmt with volatile operands alone as well. */
108 if (gimple_has_volatile_ops (stmt
))
115 /* Used to hold all the components required to do SSA PHI elimination.
116 The node and pred/succ list is a simple linear list of nodes and
117 edges represented as pairs of nodes.
119 The predecessor and successor list: Nodes are entered in pairs, where
120 [0] ->PRED, [1]->SUCC. All the even indexes in the array represent
121 predecessors, all the odd elements are successors.
124 When implemented as bitmaps, very large programs SSA->Normal times were
125 being dominated by clearing the interference graph.
127 Typically this list of edges is extremely small since it only includes
128 PHI results and uses from a single edge which have not coalesced with
129 each other. This means that no virtual PHI nodes are included, and
130 empirical evidence suggests that the number of edges rarely exceed
131 3, and in a bootstrap of GCC, the maximum size encountered was 7.
132 This also limits the number of possible nodes that are involved to
133 rarely more than 6, and in the bootstrap of gcc, the maximum number
134 of nodes encountered was 12. */
139 elim_graph (var_map map
);
141 /* Size of the elimination vectors. */
144 /* List of nodes in the elimination graph. */
147 /* The predecessor and successor edge list. */
148 auto_vec
<int> edge_list
;
150 /* Source locus on each edge */
151 auto_vec
<location_t
> edge_locus
;
153 /* Visited vector. */
154 auto_sbitmap visited
;
156 /* Stack for visited nodes. */
159 /* The variable partition map. */
162 /* Edge being eliminated by this graph. */
165 /* List of constant copies to emit. These are pushed on in pairs. */
166 auto_vec
<int> const_dests
;
167 auto_vec
<tree
> const_copies
;
169 /* Source locations for any constant copies. */
170 auto_vec
<location_t
> copy_locus
;
174 /* For an edge E find out a good source location to associate with
175 instructions inserted on edge E. If E has an implicit goto set,
176 use its location. Otherwise search instructions in predecessors
177 of E for a location, and use that one. That makes sense because
178 we insert on edges for PHI nodes, and effects of PHIs happen on
179 the end of the predecessor conceptually. An exception is made
180 for EH edges because we don't want to drag the source location
181 of unrelated statements at the beginning of handlers; they would
182 be further reused for various EH constructs, which would damage
183 the coverage information. */
186 set_location_for_edge (edge e
)
189 set_curr_insn_location (e
->goto_locus
);
190 else if (e
->flags
& EDGE_EH
)
192 basic_block bb
= e
->dest
;
193 gimple_stmt_iterator gsi
;
197 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
199 gimple
*stmt
= gsi_stmt (gsi
);
200 if (is_gimple_debug (stmt
))
202 if (gimple_has_location (stmt
) || gimple_block (stmt
))
204 set_curr_insn_location (gimple_location (stmt
));
208 /* Nothing found in this basic block. Make a half-assed attempt
209 to continue with another block. */
210 if (single_succ_p (bb
))
211 bb
= single_succ (bb
);
215 while (bb
!= e
->dest
);
219 basic_block bb
= e
->src
;
220 gimple_stmt_iterator gsi
;
224 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
226 gimple
*stmt
= gsi_stmt (gsi
);
227 if (is_gimple_debug (stmt
))
229 if (gimple_has_location (stmt
) || gimple_block (stmt
))
231 set_curr_insn_location (gimple_location (stmt
));
235 /* Nothing found in this basic block. Make a half-assed attempt
236 to continue with another block. */
237 if (single_pred_p (bb
))
238 bb
= single_pred (bb
);
242 while (bb
!= e
->src
);
246 /* Emit insns to copy SRC into DEST converting SRC if necessary. As
247 SRC/DEST might be BLKmode memory locations SIZEEXP is a tree from
248 which we deduce the size to copy in that case. */
250 static inline rtx_insn
*
251 emit_partition_copy (rtx dest
, rtx src
, int unsignedsrcp
, tree sizeexp
)
255 if (GET_MODE (src
) != VOIDmode
&& GET_MODE (src
) != GET_MODE (dest
))
256 src
= convert_to_mode (GET_MODE (dest
), src
, unsignedsrcp
);
257 if (GET_MODE (src
) == BLKmode
)
259 gcc_assert (GET_MODE (dest
) == BLKmode
);
260 emit_block_move (dest
, src
, expr_size (sizeexp
), BLOCK_OP_NORMAL
);
263 emit_move_insn (dest
, src
);
264 do_pending_stack_adjust ();
266 rtx_insn
*seq
= get_insns ();
272 /* Insert a copy instruction from partition SRC to DEST onto edge E. */
275 insert_partition_copy_on_edge (edge e
, int dest
, int src
, location_t locus
)
278 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
281 "Inserting a partition copy on edge BB%d->BB%d : "
284 e
->dest
->index
, dest
, src
);
285 fprintf (dump_file
, "\n");
288 gcc_assert (SA
.partition_to_pseudo
[dest
]);
289 gcc_assert (SA
.partition_to_pseudo
[src
]);
291 set_location_for_edge (e
);
292 /* If a locus is provided, override the default. */
294 set_curr_insn_location (locus
);
296 var
= partition_to_var (SA
.map
, src
);
297 rtx_insn
*seq
= emit_partition_copy (copy_rtx (SA
.partition_to_pseudo
[dest
]),
298 copy_rtx (SA
.partition_to_pseudo
[src
]),
299 TYPE_UNSIGNED (TREE_TYPE (var
)),
302 insert_insn_on_edge (seq
, e
);
305 /* Insert a copy instruction from expression SRC to partition DEST
309 insert_value_copy_on_edge (edge e
, int dest
, tree src
, location_t locus
)
311 rtx dest_rtx
, seq
, x
;
312 machine_mode dest_mode
, src_mode
;
315 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
318 "Inserting a value copy on edge BB%d->BB%d : PART.%d = ",
320 e
->dest
->index
, dest
);
321 print_generic_expr (dump_file
, src
, TDF_SLIM
);
322 fprintf (dump_file
, "\n");
325 dest_rtx
= copy_rtx (SA
.partition_to_pseudo
[dest
]);
326 gcc_assert (dest_rtx
);
328 set_location_for_edge (e
);
329 /* If a locus is provided, override the default. */
331 set_curr_insn_location (locus
);
335 tree name
= partition_to_var (SA
.map
, dest
);
336 src_mode
= TYPE_MODE (TREE_TYPE (src
));
337 dest_mode
= GET_MODE (dest_rtx
);
338 gcc_assert (src_mode
== TYPE_MODE (TREE_TYPE (name
)));
339 gcc_assert (!REG_P (dest_rtx
)
340 || dest_mode
== promote_ssa_mode (name
, &unsignedp
));
342 if (src_mode
!= dest_mode
)
344 x
= expand_expr (src
, NULL
, src_mode
, EXPAND_NORMAL
);
345 x
= convert_modes (dest_mode
, src_mode
, x
, unsignedp
);
347 else if (src_mode
== BLKmode
)
350 store_expr (src
, x
, 0, false, false);
353 x
= expand_expr (src
, dest_rtx
, dest_mode
, EXPAND_NORMAL
);
356 emit_move_insn (dest_rtx
, x
);
357 do_pending_stack_adjust ();
362 insert_insn_on_edge (seq
, e
);
365 /* Insert a copy instruction from RTL expression SRC to partition DEST
369 insert_rtx_to_part_on_edge (edge e
, int dest
, rtx src
, int unsignedsrcp
,
372 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
375 "Inserting a temp copy on edge BB%d->BB%d : PART.%d = ",
377 e
->dest
->index
, dest
);
378 print_simple_rtl (dump_file
, src
);
379 fprintf (dump_file
, "\n");
382 gcc_assert (SA
.partition_to_pseudo
[dest
]);
384 set_location_for_edge (e
);
385 /* If a locus is provided, override the default. */
387 set_curr_insn_location (locus
);
389 /* We give the destination as sizeexp in case src/dest are BLKmode
390 mems. Usually we give the source. As we result from SSA names
391 the left and right size should be the same (and no WITH_SIZE_EXPR
392 involved), so it doesn't matter. */
393 rtx_insn
*seq
= emit_partition_copy (copy_rtx (SA
.partition_to_pseudo
[dest
]),
395 partition_to_var (SA
.map
, dest
));
397 insert_insn_on_edge (seq
, e
);
400 /* Insert a copy instruction from partition SRC to RTL lvalue DEST
404 insert_part_to_rtx_on_edge (edge e
, rtx dest
, int src
, location_t locus
)
407 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
410 "Inserting a temp copy on edge BB%d->BB%d : ",
413 print_simple_rtl (dump_file
, dest
);
414 fprintf (dump_file
, "= PART.%d\n", src
);
417 gcc_assert (SA
.partition_to_pseudo
[src
]);
419 set_location_for_edge (e
);
420 /* If a locus is provided, override the default. */
422 set_curr_insn_location (locus
);
424 var
= partition_to_var (SA
.map
, src
);
425 rtx_insn
*seq
= emit_partition_copy (dest
,
426 copy_rtx (SA
.partition_to_pseudo
[src
]),
427 TYPE_UNSIGNED (TREE_TYPE (var
)),
430 insert_insn_on_edge (seq
, e
);
434 /* Create an elimination graph for map. */
436 elim_graph::elim_graph (var_map map
) :
437 nodes (30), edge_list (20), edge_locus (10), visited (map
->num_partitions
),
438 stack (30), map (map
), const_dests (20), const_copies (20), copy_locus (10)
443 /* Empty elimination graph G. */
446 clear_elim_graph (elim_graph
*g
)
448 g
->nodes
.truncate (0);
449 g
->edge_list
.truncate (0);
450 g
->edge_locus
.truncate (0);
454 /* Return the number of nodes in graph G. */
457 elim_graph_size (elim_graph
*g
)
459 return g
->nodes
.length ();
463 /* Add NODE to graph G, if it doesn't exist already. */
466 elim_graph_add_node (elim_graph
*g
, int node
)
471 FOR_EACH_VEC_ELT (g
->nodes
, x
, t
)
474 g
->nodes
.safe_push (node
);
478 /* Add the edge PRED->SUCC to graph G. */
481 elim_graph_add_edge (elim_graph
*g
, int pred
, int succ
, location_t locus
)
483 g
->edge_list
.safe_push (pred
);
484 g
->edge_list
.safe_push (succ
);
485 g
->edge_locus
.safe_push (locus
);
489 /* Remove an edge from graph G for which NODE is the predecessor, and
490 return the successor node. -1 is returned if there is no such edge. */
493 elim_graph_remove_succ_edge (elim_graph
*g
, int node
, location_t
*locus
)
497 for (x
= 0; x
< g
->edge_list
.length (); x
+= 2)
498 if (g
->edge_list
[x
] == node
)
500 g
->edge_list
[x
] = -1;
501 y
= g
->edge_list
[x
+ 1];
502 g
->edge_list
[x
+ 1] = -1;
503 *locus
= g
->edge_locus
[x
/ 2];
504 g
->edge_locus
[x
/ 2] = UNKNOWN_LOCATION
;
507 *locus
= UNKNOWN_LOCATION
;
512 /* Find all the nodes in GRAPH which are successors to NODE in the
513 edge list. VAR will hold the partition number found. CODE is the
514 code fragment executed for every node found. */
516 #define FOR_EACH_ELIM_GRAPH_SUCC(GRAPH, NODE, VAR, LOCUS, CODE) \
520 for (x_ = 0; x_ < (GRAPH)->edge_list.length (); x_ += 2) \
522 y_ = (GRAPH)->edge_list[x_]; \
525 (void) ((VAR) = (GRAPH)->edge_list[x_ + 1]); \
526 (void) ((LOCUS) = (GRAPH)->edge_locus[x_ / 2]); \
532 /* Find all the nodes which are predecessors of NODE in the edge list for
533 GRAPH. VAR will hold the partition number found. CODE is the
534 code fragment executed for every node found. */
536 #define FOR_EACH_ELIM_GRAPH_PRED(GRAPH, NODE, VAR, LOCUS, CODE) \
540 for (x_ = 0; x_ < (GRAPH)->edge_list.length (); x_ += 2) \
542 y_ = (GRAPH)->edge_list[x_ + 1]; \
545 (void) ((VAR) = (GRAPH)->edge_list[x_]); \
546 (void) ((LOCUS) = (GRAPH)->edge_locus[x_ / 2]); \
552 /* Add T to elimination graph G. */
555 eliminate_name (elim_graph
*g
, int T
)
557 elim_graph_add_node (g
, T
);
560 /* Return true if this phi argument T should have a copy queued when using
561 var_map MAP. PHI nodes should contain only ssa_names and invariants. A
562 test for ssa_name is definitely simpler, but don't let invalid contents
563 slip through in the meantime. */
566 queue_phi_copy_p (var_map map
, tree t
)
568 if (TREE_CODE (t
) == SSA_NAME
)
570 if (var_to_partition (map
, t
) == NO_PARTITION
)
574 gcc_checking_assert (is_gimple_min_invariant (t
));
578 /* Build elimination graph G for basic block BB on incoming PHI edge
582 eliminate_build (elim_graph
*g
)
588 clear_elim_graph (g
);
590 for (gsi
= gsi_start_phis (g
->e
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
592 gphi
*phi
= gsi
.phi ();
595 p0
= var_to_partition (g
->map
, gimple_phi_result (phi
));
596 /* Ignore results which are not in partitions. */
597 if (p0
== NO_PARTITION
)
600 Ti
= PHI_ARG_DEF (phi
, g
->e
->dest_idx
);
601 /* See set_location_for_edge for the rationale. */
602 if (g
->e
->flags
& EDGE_EH
)
603 locus
= UNKNOWN_LOCATION
;
605 locus
= gimple_phi_arg_location_from_edge (phi
, g
->e
);
607 /* If this argument is a constant, or a SSA_NAME which is being
608 left in SSA form, just queue a copy to be emitted on this
610 if (queue_phi_copy_p (g
->map
, Ti
))
612 /* Save constant copies until all other copies have been emitted
614 g
->const_dests
.safe_push (p0
);
615 g
->const_copies
.safe_push (Ti
);
616 g
->copy_locus
.safe_push (locus
);
620 pi
= var_to_partition (g
->map
, Ti
);
623 eliminate_name (g
, p0
);
624 eliminate_name (g
, pi
);
625 elim_graph_add_edge (g
, p0
, pi
, locus
);
632 /* Push successors of T onto the elimination stack for G. */
635 elim_forward (elim_graph
*g
, int T
)
640 bitmap_set_bit (g
->visited
, T
);
641 FOR_EACH_ELIM_GRAPH_SUCC (g
, T
, S
, locus
,
643 if (!bitmap_bit_p (g
->visited
, S
))
646 g
->stack
.safe_push (T
);
650 /* Return 1 if there unvisited predecessors of T in graph G. */
653 elim_unvisited_predecessor (elim_graph
*g
, int T
)
658 FOR_EACH_ELIM_GRAPH_PRED (g
, T
, P
, locus
,
660 if (!bitmap_bit_p (g
->visited
, P
))
666 /* Process predecessors first, and insert a copy. */
669 elim_backward (elim_graph
*g
, int T
)
674 bitmap_set_bit (g
->visited
, T
);
675 FOR_EACH_ELIM_GRAPH_PRED (g
, T
, P
, locus
,
677 if (!bitmap_bit_p (g
->visited
, P
))
679 elim_backward (g
, P
);
680 insert_partition_copy_on_edge (g
->e
, P
, T
, locus
);
685 /* Allocate a new pseudo register usable for storing values sitting
686 in NAME (a decl or SSA name), i.e. with matching mode and attributes. */
689 get_temp_reg (tree name
)
691 tree type
= TREE_TYPE (name
);
693 machine_mode reg_mode
= promote_ssa_mode (name
, &unsignedp
);
694 if (reg_mode
== BLKmode
)
695 return assign_temp (type
, 0, 0);
696 rtx x
= gen_reg_rtx (reg_mode
);
697 if (POINTER_TYPE_P (type
))
698 mark_reg_pointer (x
, TYPE_ALIGN (TREE_TYPE (type
)));
702 /* Insert required copies for T in graph G. Check for a strongly connected
703 region, and create a temporary to break the cycle if one is found. */
706 elim_create (elim_graph
*g
, int T
)
711 if (elim_unvisited_predecessor (g
, T
))
713 tree var
= partition_to_var (g
->map
, T
);
714 rtx U
= get_temp_reg (var
);
715 int unsignedsrcp
= TYPE_UNSIGNED (TREE_TYPE (var
));
717 insert_part_to_rtx_on_edge (g
->e
, U
, T
, UNKNOWN_LOCATION
);
718 FOR_EACH_ELIM_GRAPH_PRED (g
, T
, P
, locus
,
720 if (!bitmap_bit_p (g
->visited
, P
))
722 elim_backward (g
, P
);
723 insert_rtx_to_part_on_edge (g
->e
, P
, U
, unsignedsrcp
, locus
);
729 S
= elim_graph_remove_succ_edge (g
, T
, &locus
);
732 bitmap_set_bit (g
->visited
, T
);
733 insert_partition_copy_on_edge (g
->e
, T
, S
, locus
);
739 /* Eliminate all the phi nodes on edge E in graph G. */
742 eliminate_phi (edge e
, elim_graph
*g
)
746 gcc_assert (g
->const_copies
.length () == 0);
747 gcc_assert (g
->copy_locus
.length () == 0);
749 /* Abnormal edges already have everything coalesced. */
750 if (e
->flags
& EDGE_ABNORMAL
)
757 if (elim_graph_size (g
) != 0)
761 bitmap_clear (g
->visited
);
762 g
->stack
.truncate (0);
764 FOR_EACH_VEC_ELT (g
->nodes
, x
, part
)
766 if (!bitmap_bit_p (g
->visited
, part
))
767 elim_forward (g
, part
);
770 bitmap_clear (g
->visited
);
771 while (g
->stack
.length () > 0)
774 if (!bitmap_bit_p (g
->visited
, x
))
779 /* If there are any pending constant copies, issue them now. */
780 while (g
->const_copies
.length () > 0)
786 src
= g
->const_copies
.pop ();
787 dest
= g
->const_dests
.pop ();
788 locus
= g
->copy_locus
.pop ();
789 insert_value_copy_on_edge (e
, dest
, src
, locus
);
794 /* Remove each argument from PHI. If an arg was the last use of an SSA_NAME,
795 check to see if this allows another PHI node to be removed. */
798 remove_gimple_phi_args (gphi
*phi
)
803 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
805 fprintf (dump_file
, "Removing Dead PHI definition: ");
806 print_gimple_stmt (dump_file
, phi
, 0, TDF_SLIM
);
809 FOR_EACH_PHI_ARG (arg_p
, phi
, iter
, SSA_OP_USE
)
811 tree arg
= USE_FROM_PTR (arg_p
);
812 if (TREE_CODE (arg
) == SSA_NAME
)
814 /* Remove the reference to the existing argument. */
815 SET_USE (arg_p
, NULL_TREE
);
816 if (has_zero_uses (arg
))
819 gimple_stmt_iterator gsi
;
821 stmt
= SSA_NAME_DEF_STMT (arg
);
823 /* Also remove the def if it is a PHI node. */
824 if (gimple_code (stmt
) == GIMPLE_PHI
)
826 remove_gimple_phi_args (as_a
<gphi
*> (stmt
));
827 gsi
= gsi_for_stmt (stmt
);
828 remove_phi_node (&gsi
, true);
836 /* Remove any PHI node which is a virtual PHI, or a PHI with no uses. */
839 eliminate_useless_phis (void)
845 FOR_EACH_BB_FN (bb
, cfun
)
847 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); )
849 gphi
*phi
= gsi
.phi ();
850 result
= gimple_phi_result (phi
);
851 if (virtual_operand_p (result
))
852 remove_phi_node (&gsi
, true);
855 /* Also remove real PHIs with no uses. */
856 if (has_zero_uses (result
))
858 remove_gimple_phi_args (phi
);
859 remove_phi_node (&gsi
, true);
869 /* This function will rewrite the current program using the variable mapping
870 found in MAP. If the replacement vector VALUES is provided, any
871 occurrences of partitions with non-null entries in the vector will be
872 replaced with the expression in the vector instead of its mapped
876 rewrite_trees (var_map map
)
882 /* Search for PHIs where the destination has no partition, but one
883 or more arguments has a partition. This should not happen and can
884 create incorrect code. */
885 FOR_EACH_BB_FN (bb
, cfun
)
888 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
890 gphi
*phi
= gsi
.phi ();
891 tree T0
= var_to_partition_to_var (map
, gimple_phi_result (phi
));
895 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
897 tree arg
= PHI_ARG_DEF (phi
, i
);
899 if (TREE_CODE (arg
) == SSA_NAME
900 && var_to_partition (map
, arg
) != NO_PARTITION
)
902 fprintf (stderr
, "Argument of PHI is in a partition :(");
903 print_generic_expr (stderr
, arg
, TDF_SLIM
);
904 fprintf (stderr
, "), but the result is not :");
905 print_gimple_stmt (stderr
, phi
, 0, TDF_SLIM
);
906 internal_error ("SSA corruption");
914 /* Create a default def for VAR. */
917 create_default_def (tree var
, void *arg ATTRIBUTE_UNUSED
)
919 if (!is_gimple_reg (var
))
922 tree ssa
= get_or_create_ssa_default_def (cfun
, var
);
926 /* Call CALLBACK for all PARM_DECLs and RESULT_DECLs for which
927 assign_parms may ask for a default partition. */
930 for_all_parms (void (*callback
)(tree var
, void *arg
), void *arg
)
932 for (tree var
= DECL_ARGUMENTS (current_function_decl
); var
;
933 var
= DECL_CHAIN (var
))
935 if (!VOID_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl
))))
936 callback (DECL_RESULT (current_function_decl
), arg
);
937 if (cfun
->static_chain_decl
)
938 callback (cfun
->static_chain_decl
, arg
);
941 /* We need to pass two arguments to set_parm_default_def_partition,
942 but for_all_parms only supports one. Use a pair. */
944 typedef std::pair
<var_map
, bitmap
> parm_default_def_partition_arg
;
946 /* Set in ARG's PARTS bitmap the bit corresponding to the partition in
947 ARG's MAP containing VAR's default def. */
950 set_parm_default_def_partition (tree var
, void *arg_
)
952 parm_default_def_partition_arg
*arg
= (parm_default_def_partition_arg
*)arg_
;
953 var_map map
= arg
->first
;
954 bitmap parts
= arg
->second
;
956 if (!is_gimple_reg (var
))
959 tree ssa
= ssa_default_def (cfun
, var
);
962 int version
= var_to_partition (map
, ssa
);
963 gcc_assert (version
!= NO_PARTITION
);
965 bool changed
= bitmap_set_bit (parts
, version
);
966 gcc_assert (changed
);
969 /* Allocate and return a bitmap that has a bit set for each partition
970 that contains a default def for a parameter. */
973 get_parm_default_def_partitions (var_map map
)
975 bitmap parm_default_def_parts
= BITMAP_ALLOC (NULL
);
977 parm_default_def_partition_arg
978 arg
= std::make_pair (map
, parm_default_def_parts
);
980 for_all_parms (set_parm_default_def_partition
, &arg
);
982 return parm_default_def_parts
;
985 /* Allocate and return a bitmap that has a bit set for each partition
986 that contains an undefined value. */
989 get_undefined_value_partitions (var_map map
)
991 bitmap undefined_value_parts
= BITMAP_ALLOC (NULL
);
993 for (unsigned int i
= 1; i
< num_ssa_names
; i
++)
995 tree var
= ssa_name (i
);
997 && !virtual_operand_p (var
)
998 && !has_zero_uses (var
)
999 && ssa_undefined_value_p (var
))
1001 const int p
= var_to_partition (map
, var
);
1002 if (p
!= NO_PARTITION
)
1003 bitmap_set_bit (undefined_value_parts
, p
);
1007 return undefined_value_parts
;
1010 /* Given the out-of-ssa info object SA (with prepared partitions)
1011 eliminate all phi nodes in all basic blocks. Afterwards no
1012 basic block will have phi nodes anymore and there are possibly
1013 some RTL instructions inserted on edges. */
1016 expand_phi_nodes (struct ssaexpand
*sa
)
1019 elim_graph
g (sa
->map
);
1021 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
,
1022 EXIT_BLOCK_PTR_FOR_FN (cfun
), next_bb
)
1023 if (!gimple_seq_empty_p (phi_nodes (bb
)))
1027 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1028 eliminate_phi (e
, &g
);
1029 set_phi_nodes (bb
, NULL
);
1030 /* We can't redirect EH edges in RTL land, so we need to do this
1031 here. Redirection happens only when splitting is necessary,
1032 which it is only for critical edges, normally. For EH edges
1033 it might also be necessary when the successor has more than
1034 one predecessor. In that case the edge is either required to
1035 be fallthru (which EH edges aren't), or the predecessor needs
1036 to end with a jump (which again, isn't the case with EH edges).
1037 Hence, split all EH edges on which we inserted instructions
1038 and whose successor has multiple predecessors. */
1039 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
1041 if (e
->insns
.r
&& (e
->flags
& EDGE_EH
)
1042 && !single_pred_p (e
->dest
))
1044 rtx_insn
*insns
= e
->insns
.r
;
1047 bb
= split_edge (e
);
1048 single_pred_edge (bb
)->insns
.r
= insns
;
1057 /* Remove the ssa-names in the current function and translate them into normal
1058 compiler variables. PERFORM_TER is true if Temporary Expression Replacement
1059 should also be used. */
1062 remove_ssa_form (bool perform_ter
, struct ssaexpand
*sa
)
1064 bitmap values
= NULL
;
1067 for_all_parms (create_default_def
, NULL
);
1068 map
= init_var_map (num_ssa_names
);
1069 coalesce_ssa_name (map
);
1071 /* Return to viewing the variable list as just all reference variables after
1072 coalescing has been performed. */
1073 partition_view_normal (map
);
1075 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1077 fprintf (dump_file
, "After Coalescing:\n");
1078 dump_var_map (dump_file
, map
);
1083 values
= find_replaceable_exprs (map
);
1084 if (values
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
1085 dump_replaceable_exprs (dump_file
, values
);
1088 rewrite_trees (map
);
1091 sa
->values
= values
;
1092 sa
->partitions_for_parm_default_defs
= get_parm_default_def_partitions (map
);
1093 sa
->partitions_for_undefined_values
= get_undefined_value_partitions (map
);
1097 /* If not already done so for basic block BB, assign increasing uids
1098 to each of its instructions. */
1101 maybe_renumber_stmts_bb (basic_block bb
)
1104 gimple_stmt_iterator gsi
;
1109 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1111 gimple
*stmt
= gsi_stmt (gsi
);
1112 gimple_set_uid (stmt
, i
);
1118 /* Return true if we can determine that the SSA_NAMEs RESULT (a result
1119 of a PHI node) and ARG (one of its arguments) conflict. Return false
1120 otherwise, also when we simply aren't sure. */
1123 trivially_conflicts_p (basic_block bb
, tree result
, tree arg
)
1126 imm_use_iterator imm_iter
;
1127 gimple
*defa
= SSA_NAME_DEF_STMT (arg
);
1129 /* If ARG isn't defined in the same block it's too complicated for
1131 if (gimple_bb (defa
) != bb
)
1134 FOR_EACH_IMM_USE_FAST (use
, imm_iter
, result
)
1136 gimple
*use_stmt
= USE_STMT (use
);
1137 if (is_gimple_debug (use_stmt
))
1139 /* Now, if there's a use of RESULT that lies outside this basic block,
1140 then there surely is a conflict with ARG. */
1141 if (gimple_bb (use_stmt
) != bb
)
1143 if (gimple_code (use_stmt
) == GIMPLE_PHI
)
1145 /* The use now is in a real stmt of BB, so if ARG was defined
1146 in a PHI node (like RESULT) both conflict. */
1147 if (gimple_code (defa
) == GIMPLE_PHI
)
1149 maybe_renumber_stmts_bb (bb
);
1150 /* If the use of RESULT occurs after the definition of ARG,
1151 the two conflict too. */
1152 if (gimple_uid (defa
) < gimple_uid (use_stmt
))
1160 /* Search every PHI node for arguments associated with backedges which
1161 we can trivially determine will need a copy (the argument is either
1162 not an SSA_NAME or the argument has a different underlying variable
1163 than the PHI result).
1165 Insert a copy from the PHI argument to a new destination at the
1166 end of the block with the backedge to the top of the loop. Update
1167 the PHI argument to reference this new destination. */
1170 insert_backedge_copies (void)
1175 mark_dfs_back_edges ();
1177 FOR_EACH_BB_FN (bb
, cfun
)
1179 /* Mark block as possibly needing calculation of UIDs. */
1182 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1184 gphi
*phi
= gsi
.phi ();
1185 tree result
= gimple_phi_result (phi
);
1188 if (virtual_operand_p (result
))
1191 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1193 tree arg
= gimple_phi_arg_def (phi
, i
);
1194 edge e
= gimple_phi_arg_edge (phi
, i
);
1195 /* We are only interested in copies emitted on critical
1197 if (!(e
->flags
& EDGE_DFS_BACK
)
1198 || !EDGE_CRITICAL_P (e
))
1201 /* If the argument is not an SSA_NAME, then we will need a
1202 constant initialization. If the argument is an SSA_NAME then
1203 a copy statement may be needed. First handle the case
1204 where we cannot insert before the argument definition. */
1205 if (TREE_CODE (arg
) != SSA_NAME
1206 || (gimple_code (SSA_NAME_DEF_STMT (arg
)) == GIMPLE_PHI
1207 && trivially_conflicts_p (bb
, result
, arg
)))
1211 gimple
*last
= NULL
;
1212 gimple_stmt_iterator gsi2
;
1214 gsi2
= gsi_last_bb (gimple_phi_arg_edge (phi
, i
)->src
);
1215 if (!gsi_end_p (gsi2
))
1216 last
= gsi_stmt (gsi2
);
1218 /* In theory the only way we ought to get back to the
1219 start of a loop should be with a COND_EXPR or GOTO_EXPR.
1220 However, better safe than sorry.
1221 If the block ends with a control statement or
1222 something that might throw, then we have to
1223 insert this assignment before the last
1224 statement. Else insert it after the last statement. */
1225 if (last
&& stmt_ends_bb_p (last
))
1227 /* If the last statement in the block is the definition
1228 site of the PHI argument, then we can't insert
1229 anything after it. */
1230 if (TREE_CODE (arg
) == SSA_NAME
1231 && SSA_NAME_DEF_STMT (arg
) == last
)
1235 /* Create a new instance of the underlying variable of the
1237 name
= copy_ssa_name (result
);
1238 stmt
= gimple_build_assign (name
,
1239 gimple_phi_arg_def (phi
, i
));
1241 /* copy location if present. */
1242 if (gimple_phi_arg_has_location (phi
, i
))
1243 gimple_set_location (stmt
,
1244 gimple_phi_arg_location (phi
, i
));
1246 /* Insert the new statement into the block and update
1248 if (last
&& stmt_ends_bb_p (last
))
1249 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
1251 gsi_insert_after (&gsi2
, stmt
, GSI_NEW_STMT
);
1252 SET_PHI_ARG_DEF (phi
, i
, name
);
1254 /* Insert a copy before the definition of the backedge value
1255 and adjust all conflicting uses. */
1256 else if (trivially_conflicts_p (bb
, result
, arg
))
1258 gimple
*def
= SSA_NAME_DEF_STMT (arg
);
1259 if (gimple_nop_p (def
)
1260 || gimple_code (def
) == GIMPLE_PHI
)
1262 tree name
= copy_ssa_name (result
);
1263 gimple
*stmt
= gimple_build_assign (name
, result
);
1264 imm_use_iterator imm_iter
;
1266 /* The following matches trivially_conflicts_p. */
1267 FOR_EACH_IMM_USE_STMT (use_stmt
, imm_iter
, result
)
1269 if (gimple_bb (use_stmt
) != bb
1270 || (gimple_code (use_stmt
) != GIMPLE_PHI
1271 && (maybe_renumber_stmts_bb (bb
), true)
1272 && gimple_uid (use_stmt
) > gimple_uid (def
)))
1275 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1276 SET_USE (use
, name
);
1279 gimple_stmt_iterator gsi
= gsi_for_stmt (def
);
1280 gsi_insert_before (&gsi
, stmt
, GSI_SAME_STMT
);
1285 /* Unmark this block again. */
1290 /* Remove indirect clobbers. */
1293 remove_indirect_clobbers (void)
1297 FOR_EACH_BB_FN (bb
, cfun
)
1298 for (auto gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
1300 gimple
*stmt
= gsi_stmt (gsi
);
1301 if (gimple_clobber_p (stmt
))
1303 tree lhs
= gimple_assign_lhs (stmt
);
1304 if (TREE_CODE (lhs
) == MEM_REF
1305 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
)
1307 unlink_stmt_vdef (stmt
);
1308 gsi_remove (&gsi
, true);
1309 release_defs (stmt
);
1317 /* Free all memory associated with going out of SSA form. SA is
1318 the outof-SSA info object. */
1321 finish_out_of_ssa (struct ssaexpand
*sa
)
1323 free (sa
->partition_to_pseudo
);
1325 BITMAP_FREE (sa
->values
);
1326 delete_var_map (sa
->map
);
1327 BITMAP_FREE (sa
->partitions_for_parm_default_defs
);
1328 BITMAP_FREE (sa
->partitions_for_undefined_values
);
1329 memset (sa
, 0, sizeof *sa
);
1332 /* Take the current function out of SSA form, translating PHIs as described in
1333 R. Morgan, ``Building an Optimizing Compiler'',
1334 Butterworth-Heinemann, Boston, MA, 1998. pp 176-186. */
1337 rewrite_out_of_ssa (struct ssaexpand
*sa
)
1339 /* Remove remaining indirect clobbers as we do not need those anymore.
1340 Those might extend SSA lifetime and restrict coalescing. */
1341 remove_indirect_clobbers ();
1343 /* If elimination of a PHI requires inserting a copy on a backedge,
1344 then we will have to split the backedge which has numerous
1345 undesirable performance effects.
1347 A significant number of such cases can be handled here by inserting
1348 copies into the loop itself. */
1349 insert_backedge_copies ();
1351 /* Eliminate PHIs which are of no use, such as virtual or dead phis. */
1352 eliminate_useless_phis ();
1354 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1355 gimple_dump_cfg (dump_file
, dump_flags
& ~TDF_DETAILS
);
1357 remove_ssa_form (flag_tree_ter
, sa
);