1 /* Combining of if-expressions on trees.
2 Copyright (C) 2007-2025 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "tree-pass.h"
33 #include "tree-pretty-print.h"
34 /* rtl is needed only because arm back-end requires it for
36 #include "fold-const.h"
38 #include "gimple-iterator.h"
39 #include "gimple-fold.h"
40 #include "gimplify-me.h"
47 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
48 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
49 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
53 /* Return FALSE iff the COND_BB ends with a conditional whose result is not a
57 known_succ_p (basic_block cond_bb
)
59 gcond
*cond
= safe_dyn_cast
<gcond
*> (*gsi_last_bb (cond_bb
));
64 return (CONSTANT_CLASS_P (gimple_cond_lhs (cond
))
65 && CONSTANT_CLASS_P (gimple_cond_rhs (cond
)));
68 /* This pass combines COND_EXPRs to simplify control flow. It
69 currently recognizes bit tests and comparisons in chains that
70 represent logical and or logical or of two COND_EXPRs.
72 It does so by walking basic blocks in a approximate reverse
73 post-dominator order and trying to match CFG patterns that
74 represent logical and or logical or of two COND_EXPRs.
75 Transformations are done if the COND_EXPR conditions match
78 1. two single bit tests X & (1 << Yn) (for logical and)
80 2. two bit tests X & Yn (for logical or)
82 3. two comparisons X OPn Y (for logical or)
84 To simplify this pass, removing basic blocks and dead code
85 is left to CFG cleanup and DCE. */
88 /* Recognize a if-then-else CFG pattern starting to match with the COND_BB
89 basic-block containing the COND_EXPR. If !SUCCS_ANY, the condition must not
90 resolve to a constant for a match. Returns true if the pattern matched,
91 false otherwise. In case of a !SUCCS_ANY match, the recognized then end
92 else blocks are stored to *THEN_BB and *ELSE_BB. If *THEN_BB and/or
93 *ELSE_BB are already set, they are required to match the then and else
94 basic-blocks to make the pattern match. If SUCCS_ANY, *THEN_BB and *ELSE_BB
95 will not be filled in, and they will be found to match even if reversed. */
98 recognize_if_then_else (basic_block cond_bb
,
99 basic_block
*then_bb
, basic_block
*else_bb
,
100 bool succs_any
= false)
104 if (EDGE_COUNT (cond_bb
->succs
) != 2
105 || (!succs_any
&& known_succ_p (cond_bb
)))
108 /* Find the then/else edges. */
109 t
= EDGE_SUCC (cond_bb
, 0);
110 e
= EDGE_SUCC (cond_bb
, 1);
113 return ((t
->dest
== *then_bb
&& e
->dest
== *else_bb
)
114 || (t
->dest
== *else_bb
&& e
->dest
== *then_bb
));
116 if (!(t
->flags
& EDGE_TRUE_VALUE
))
118 if (!(t
->flags
& EDGE_TRUE_VALUE
)
119 || !(e
->flags
& EDGE_FALSE_VALUE
))
122 /* Check if the edge destinations point to the required block. */
124 && t
->dest
!= *then_bb
)
127 && e
->dest
!= *else_bb
)
138 /* Verify if the basic block BB does not have side-effects. Return
139 true in this case, else false. */
142 bb_no_side_effects_p (basic_block bb
)
144 gimple_stmt_iterator gsi
;
146 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
148 gimple
*stmt
= gsi_stmt (gsi
);
150 if (is_gimple_debug (stmt
))
154 enum tree_code rhs_code
;
155 if (gimple_has_side_effects (stmt
)
156 || gimple_could_trap_p (stmt
)
157 || gimple_vdef (stmt
)
158 /* We need to rewrite stmts with undefined overflow to use
159 unsigned arithmetic but cannot do so for signed division. */
160 || ((ass
= dyn_cast
<gassign
*> (stmt
))
161 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (ass
)))
162 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (gimple_assign_lhs (ass
)))
163 && ((rhs_code
= gimple_assign_rhs_code (ass
)), true)
164 && (rhs_code
== TRUNC_DIV_EXPR
165 || rhs_code
== CEIL_DIV_EXPR
166 || rhs_code
== FLOOR_DIV_EXPR
167 || rhs_code
== ROUND_DIV_EXPR
)
168 /* We cannot use expr_not_equal_to since we'd have to restrict
169 flow-sensitive info to whats known at the outer if. */
170 && (TREE_CODE (gimple_assign_rhs2 (ass
)) != INTEGER_CST
171 || !integer_minus_onep (gimple_assign_rhs2 (ass
))))
172 /* const calls don't match any of the above, yet they could
173 still have some side-effects - they could contain
174 gimple_could_trap_p statements, like floating point
175 exceptions or integer division by zero. See PR70586.
176 FIXME: perhaps gimple_has_side_effects or gimple_could_trap_p
177 should handle this. */
178 || is_gimple_call (stmt
))
183 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, it
, SSA_OP_USE
)
184 if (ssa_name_maybe_undef_p (use
))
191 /* Return true if BB is an empty forwarder block to TO_BB. */
194 forwarder_block_to (basic_block bb
, basic_block to_bb
)
196 return empty_block_p (bb
)
197 && single_succ_p (bb
)
198 && single_succ (bb
) == to_bb
;
201 /* Verify if all PHI node arguments in DEST for edges from BB1 or
202 BB2 to DEST are the same. This makes the CFG merge point
203 free from side-effects. Return true in this case, else false. */
206 same_phi_args_p (basic_block bb1
, basic_block bb2
, basic_block dest
)
208 edge e1
= find_edge (bb1
, dest
);
209 edge e2
= find_edge (bb2
, dest
);
213 for (gsi
= gsi_start_phis (dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
216 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi
, e1
),
217 PHI_ARG_DEF_FROM_EDGE (phi
, e2
), 0))
224 /* Return the best representative SSA name for CANDIDATE which is used
228 get_name_for_bit_test (tree candidate
)
230 /* Skip single-use names in favor of using the name from a
231 non-widening conversion definition. */
232 if (TREE_CODE (candidate
) == SSA_NAME
233 && has_single_use (candidate
))
235 gimple
*def_stmt
= SSA_NAME_DEF_STMT (candidate
);
236 if (is_gimple_assign (def_stmt
)
237 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
239 if (TYPE_PRECISION (TREE_TYPE (candidate
))
240 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt
))))
241 return gimple_assign_rhs1 (def_stmt
);
248 /* Recognize a single bit test pattern in GIMPLE_COND and its defining
249 statements. Store the name being tested in *NAME and the bit
250 in *BIT. The GIMPLE_COND computes *NAME & (1 << *BIT).
251 Returns true if the pattern matched, false otherwise. */
254 recognize_single_bit_test (gcond
*cond
, tree
*name
, tree
*bit
, bool inv
)
258 /* Get at the definition of the result of the bit test. */
259 if (gimple_cond_code (cond
) != (inv
? EQ_EXPR
: NE_EXPR
)
260 || TREE_CODE (gimple_cond_lhs (cond
)) != SSA_NAME
261 || !integer_zerop (gimple_cond_rhs (cond
)))
263 stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (cond
));
264 if (!is_gimple_assign (stmt
))
267 /* Look at which bit is tested. One form to recognize is
268 D.1985_5 = state_3(D) >> control1_4(D);
269 D.1986_6 = (int) D.1985_5;
271 if (D.1987_7 != 0) */
272 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
273 && integer_onep (gimple_assign_rhs2 (stmt
))
274 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
276 tree orig_name
= gimple_assign_rhs1 (stmt
);
278 /* Look through copies and conversions to eventually
279 find the stmt that computes the shift. */
280 stmt
= SSA_NAME_DEF_STMT (orig_name
);
282 while (is_gimple_assign (stmt
)
283 && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
284 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt
)))
285 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt
))))
286 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
287 || gimple_assign_ssa_name_copy_p (stmt
)))
288 stmt
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
290 /* If we found such, decompose it. */
291 if (is_gimple_assign (stmt
)
292 && gimple_assign_rhs_code (stmt
) == RSHIFT_EXPR
)
294 /* op0 & (1 << op1) */
295 *bit
= gimple_assign_rhs2 (stmt
);
296 *name
= gimple_assign_rhs1 (stmt
);
301 *bit
= integer_zero_node
;
302 *name
= get_name_for_bit_test (orig_name
);
309 D.1987_7 = op0 & (1 << CST)
310 if (D.1987_7 != 0) */
311 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
312 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
313 && integer_pow2p (gimple_assign_rhs2 (stmt
)))
315 *name
= gimple_assign_rhs1 (stmt
);
316 *bit
= build_int_cst (integer_type_node
,
317 tree_log2 (gimple_assign_rhs2 (stmt
)));
322 D.1986_6 = 1 << control1_4(D)
323 D.1987_7 = op0 & D.1986_6
324 if (D.1987_7 != 0) */
325 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
326 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
327 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == SSA_NAME
)
331 /* Both arguments of the BIT_AND_EXPR can be the single-bit
332 specifying expression. */
333 tmp
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
334 if (is_gimple_assign (tmp
)
335 && gimple_assign_rhs_code (tmp
) == LSHIFT_EXPR
336 && integer_onep (gimple_assign_rhs1 (tmp
)))
338 *name
= gimple_assign_rhs2 (stmt
);
339 *bit
= gimple_assign_rhs2 (tmp
);
343 tmp
= SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt
));
344 if (is_gimple_assign (tmp
)
345 && gimple_assign_rhs_code (tmp
) == LSHIFT_EXPR
346 && integer_onep (gimple_assign_rhs1 (tmp
)))
348 *name
= gimple_assign_rhs1 (stmt
);
349 *bit
= gimple_assign_rhs2 (tmp
);
357 /* Recognize a bit test pattern in a GIMPLE_COND and its defining
358 statements. Store the name being tested in *NAME and the bits
359 in *BITS. The COND_EXPR computes *NAME & *BITS.
360 Returns true if the pattern matched, false otherwise. */
363 recognize_bits_test (gcond
*cond
, tree
*name
, tree
*bits
, bool inv
)
367 /* Get at the definition of the result of the bit test. */
368 if (gimple_cond_code (cond
) != (inv
? EQ_EXPR
: NE_EXPR
)
369 || TREE_CODE (gimple_cond_lhs (cond
)) != SSA_NAME
370 || !integer_zerop (gimple_cond_rhs (cond
)))
372 stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (cond
));
373 if (!is_gimple_assign (stmt
)
374 || gimple_assign_rhs_code (stmt
) != BIT_AND_EXPR
)
377 *name
= get_name_for_bit_test (gimple_assign_rhs1 (stmt
));
378 *bits
= gimple_assign_rhs2 (stmt
);
384 /* Update profile after code in either outer_cond_bb or inner_cond_bb was
385 adjusted so that it has no condition. */
388 update_profile_after_ifcombine (basic_block inner_cond_bb
,
389 basic_block outer_cond_bb
)
391 /* In the following we assume that inner_cond_bb has single predecessor. */
392 gcc_assert (single_pred_p (inner_cond_bb
));
394 basic_block outer_to_inner_bb
= inner_cond_bb
;
395 profile_probability prob
= profile_probability::always ();
398 basic_block parent
= single_pred (outer_to_inner_bb
);
399 prob
*= find_edge (parent
, outer_to_inner_bb
)->probability
;
400 if (parent
== outer_cond_bb
)
402 outer_to_inner_bb
= parent
;
405 edge outer_to_inner
= find_edge (outer_cond_bb
, outer_to_inner_bb
);
406 edge outer2
= (EDGE_SUCC (outer_cond_bb
, 0) == outer_to_inner
407 ? EDGE_SUCC (outer_cond_bb
, 1)
408 : EDGE_SUCC (outer_cond_bb
, 0));
409 edge inner_taken
= EDGE_SUCC (inner_cond_bb
, 0);
410 edge inner_not_taken
= EDGE_SUCC (inner_cond_bb
, 1);
412 if (inner_taken
->dest
!= outer2
->dest
)
413 std::swap (inner_taken
, inner_not_taken
);
414 gcc_assert (inner_taken
->dest
== outer2
->dest
);
416 if (outer_to_inner_bb
== inner_cond_bb
417 && known_succ_p (outer_cond_bb
))
419 /* Path outer_cond_bb->(outer2) needs to be merged into path
420 outer_cond_bb->(outer_to_inner)->inner_cond_bb->(inner_taken)
421 and probability of inner_not_taken updated. */
423 inner_cond_bb
->count
= outer_cond_bb
->count
;
425 /* Handle special case where inner_taken probability is always. In this
426 case we know that the overall outcome will be always as well, but
427 combining probabilities will be conservative because it does not know
428 that outer2->probability is inverse of
429 outer_to_inner->probability. */
430 if (inner_taken
->probability
== profile_probability::always ())
433 inner_taken
->probability
= outer2
->probability
434 + outer_to_inner
->probability
* inner_taken
->probability
;
435 inner_not_taken
->probability
= profile_probability::always ()
436 - inner_taken
->probability
;
438 outer_to_inner
->probability
= profile_probability::always ();
439 outer2
->probability
= profile_probability::never ();
441 else if (known_succ_p (inner_cond_bb
))
443 /* Path inner_cond_bb->(inner_taken) needs to be merged into path
444 outer_cond_bb->(outer2). We've accumulated the probabilities from
445 outer_cond_bb->(outer)->...->inner_cond_bb in prob, so we have to
446 adjust that by inner_taken, and make inner unconditional. */
448 prob
*= inner_taken
->probability
;
449 outer2
->probability
+= prob
;
450 outer_to_inner
->probability
= profile_probability::always ()
451 - outer2
->probability
;
453 inner_taken
->probability
= profile_probability::never ();
454 inner_not_taken
->probability
= profile_probability::always ();
458 /* We've moved part of the inner cond to outer, but we don't know the
459 probabilities for each part, so estimate the effects by moving half of
460 the odds of inner_taken to outer. */
462 inner_taken
->probability
*= profile_probability::even ();
463 inner_not_taken
->probability
= profile_probability::always ()
464 - inner_taken
->probability
;
466 prob
*= inner_taken
->probability
;
467 outer2
->probability
+= prob
;
468 outer_to_inner
->probability
= profile_probability::always ()
469 - outer2
->probability
;
473 /* Set NAME's bit in USED if OUTER dominates it. */
476 ifcombine_mark_ssa_name (bitmap used
, tree name
, basic_block outer
)
478 if (!name
|| TREE_CODE (name
) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (name
))
481 gimple
*def
= SSA_NAME_DEF_STMT (name
);
482 basic_block bb
= gimple_bb (def
);
483 if (!dominated_by_p (CDI_DOMINATORS
, bb
, outer
))
486 bitmap_set_bit (used
, SSA_NAME_VERSION (name
));
489 /* Data structure passed to ifcombine_mark_ssa_name. */
490 struct ifcombine_mark_ssa_name_t
492 /* SSA_NAMEs that have been referenced. */
494 /* Dominating block of DEFs that might need moving. */
498 /* Mark in DATA->used any SSA_NAMEs used in *t. */
501 ifcombine_mark_ssa_name_walk (tree
*t
, int *, void *data_
)
503 ifcombine_mark_ssa_name_t
*data
= (ifcombine_mark_ssa_name_t
*)data_
;
505 ifcombine_mark_ssa_name (data
->used
, *t
, data
->outer
);
510 /* Rewrite a stmt, that presumably used to be guarded by conditions that could
511 avoid undefined overflow, into one that has well-defined overflow, so that
512 it won't invoke undefined behavior once the guarding conditions change. */
515 ifcombine_rewrite_to_defined_overflow (gimple_stmt_iterator gsi
)
517 gassign
*ass
= dyn_cast
<gassign
*> (gsi_stmt (gsi
));
520 tree lhs
= gimple_assign_lhs (ass
);
521 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
522 || POINTER_TYPE_P (TREE_TYPE (lhs
)))
523 && arith_code_with_undefined_signed_overflow
524 (gimple_assign_rhs_code (ass
)))
525 rewrite_to_defined_overflow (&gsi
);
529 /* Replace the conditions in INNER_COND and OUTER_COND with COND and COND2.
530 COND and COND2 are computed for insertion at INNER_COND, with OUTER_COND
531 replaced with a constant, but if there are intervening blocks, it's best to
532 adjust COND for insertion at OUTER_COND, placing COND2 at INNER_COND. */
535 ifcombine_replace_cond (gcond
*inner_cond
, bool inner_inv
,
536 gcond
*outer_cond
, bool outer_inv
,
537 tree cond
, bool must_canon
, tree cond2
)
539 bool split_single_cond
= false;
540 /* Split cond into cond2 if they're contiguous. ??? We might be able to
541 handle ORIF as well, inverting both conditions, but it's not clear that
542 this would be enough, and it never comes up. */
544 && TREE_CODE (cond
) == TRUTH_ANDIF_EXPR
545 && single_pred (gimple_bb (inner_cond
)) == gimple_bb (outer_cond
))
547 cond2
= TREE_OPERAND (cond
, 1);
548 cond
= TREE_OPERAND (cond
, 0);
549 split_single_cond
= true;
552 bool outer_p
= cond2
|| (single_pred (gimple_bb (inner_cond
))
553 != gimple_bb (outer_cond
));
554 bool result_inv
= outer_p
? outer_inv
: inner_inv
;
555 bool strictening_outer_cond
= !split_single_cond
&& outer_p
;
558 cond
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (cond
), cond
);
560 if (tree tcanon
= canonicalize_cond_expr_cond (cond
))
569 basic_block outer_bb
= gimple_bb (outer_cond
);
571 bitmap_tree_view (used
);
573 /* Mark SSA DEFs that are referenced by cond and may thus need to be
576 ifcombine_mark_ssa_name_t data
= { used
, outer_bb
};
577 walk_tree (&cond
, ifcombine_mark_ssa_name_walk
, &data
, NULL
);
580 if (!bitmap_empty_p (used
))
582 const int max_stmts
= 6;
583 auto_vec
<gimple
*, max_stmts
> stmts
;
585 /* Iterate up from inner_cond, moving DEFs identified as used by
586 cond, and marking USEs in the DEFs for moving as well. */
587 for (basic_block bb
= gimple_bb (inner_cond
);
588 bb
!= outer_bb
; bb
= single_pred (bb
))
590 for (gimple_stmt_iterator gsitr
= gsi_last_bb (bb
);
591 !gsi_end_p (gsitr
); gsi_prev (&gsitr
))
593 gimple
*stmt
= gsi_stmt (gsitr
);
598 FOR_EACH_SSA_TREE_OPERAND (t
, stmt
, it
, SSA_OP_DEF
)
599 if (bitmap_bit_p (used
, SSA_NAME_VERSION (t
)))
608 if (stmts
.length () < max_stmts
)
609 stmts
.quick_push (stmt
);
613 /* Mark uses in STMT before moving it. */
614 FOR_EACH_SSA_TREE_OPERAND (t
, stmt
, it
, SSA_OP_USE
)
615 ifcombine_mark_ssa_name (used
, t
, outer_bb
);
618 /* Surprisingly, there may be PHI nodes in single-predecessor
619 bocks, as in pr50682.C. Fortunately, since they can't
620 involve back edges, there won't be references to parallel
621 nodes that we'd have to pay special attention to to keep
622 them parallel. We can't move the PHI nodes, but we can turn
623 them into assignments. */
624 for (gphi_iterator gsi
= gsi_start_phis (bb
);
627 gphi
*phi
= gsi
.phi ();
629 gcc_assert (gimple_phi_num_args (phi
) == 1);
630 tree def
= gimple_phi_result (phi
);
632 if (!bitmap_bit_p (used
, SSA_NAME_VERSION (def
)))
638 if (stmts
.length () < max_stmts
)
639 stmts
.quick_push (phi
);
643 /* Mark uses in STMT before moving it. */
646 FOR_EACH_PHI_ARG (use_p
, phi
, it
, SSA_OP_USE
)
647 ifcombine_mark_ssa_name (used
, USE_FROM_PTR (use_p
),
652 /* ??? Test whether it makes sense to move STMTS. */
654 /* Move the STMTS that need moving. From this point on, we're
655 committing to the attempted ifcombine. */
656 gimple_stmt_iterator gsins
= gsi_for_stmt (outer_cond
);
659 FOR_EACH_VEC_ELT (stmts
, i
, stmt
)
661 if (gphi
*phi
= dyn_cast
<gphi
*> (stmt
))
663 tree def
= gimple_phi_result (phi
);
664 tree use
= gimple_phi_arg_def (phi
, 0);
665 location_t loc
= gimple_phi_arg_location (phi
, 0);
667 gphi_iterator gsi
= gsi_for_phi (phi
);
668 remove_phi_node (&gsi
, false);
670 gassign
*a
= gimple_build_assign (def
, use
);
671 gimple_set_location (a
, loc
);
672 gsi_insert_before (&gsins
, a
, GSI_NEW_STMT
);
676 gimple_stmt_iterator gsitr
= gsi_for_stmt (stmt
);
677 gsi_move_before (&gsitr
, &gsins
, GSI_NEW_STMT
);
681 for (; gsi_stmt (gsins
) != outer_cond
; gsi_next (&gsins
))
683 /* Clear range info from all defs we've moved from under
687 FOR_EACH_SSA_TREE_OPERAND (t
, gsi_stmt (gsins
), it
, SSA_OP_DEF
)
688 reset_flow_sensitive_info (t
);
689 /* Avoid introducing undefined overflows while at that. */
690 ifcombine_rewrite_to_defined_overflow (gsins
);
695 if (!is_gimple_condexpr_for_cond (cond
))
697 gimple_stmt_iterator gsi
= gsi_for_stmt (outer_cond
);
698 cond
= force_gimple_operand_gsi_1 (&gsi
, cond
,
699 is_gimple_condexpr_for_cond
,
700 NULL
, true, GSI_SAME_STMT
);
703 /* Leave CFG optimization to cfg_cleanup. */
704 gimple_cond_set_condition_from_tree (outer_cond
, cond
);
705 update_stmt (outer_cond
);
710 cond2
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (cond2
), cond2
);
712 if (tree tcanon
= canonicalize_cond_expr_cond (cond2
))
714 if (!is_gimple_condexpr_for_cond (cond2
))
716 gimple_stmt_iterator gsi
= gsi_for_stmt (inner_cond
);
717 cond2
= force_gimple_operand_gsi_1 (&gsi
, cond2
,
718 is_gimple_condexpr_for_cond
,
719 NULL
, true, GSI_SAME_STMT
);
721 gimple_cond_set_condition_from_tree (inner_cond
, cond2
);
724 gimple_cond_set_condition_from_tree (inner_cond
,
727 : boolean_true_node
);
728 update_stmt (inner_cond
);
732 if (!is_gimple_condexpr_for_cond (cond
))
734 gimple_stmt_iterator gsi
= gsi_for_stmt (inner_cond
);
735 cond
= force_gimple_operand_gsi_1 (&gsi
, cond
,
736 is_gimple_condexpr_for_cond
,
737 NULL
, true, GSI_SAME_STMT
);
739 gimple_cond_set_condition_from_tree (inner_cond
, cond
);
740 update_stmt (inner_cond
);
742 /* Leave CFG optimization to cfg_cleanup. */
743 gimple_cond_set_condition_from_tree (outer_cond
,
746 : boolean_true_node
);
747 update_stmt (outer_cond
);
750 /* We're changing conditions that guard inner blocks, so reset flow sensitive
751 info and avoid introducing undefined behavior. */
752 for (basic_block bb
= gimple_bb (inner_cond
), end
= gimple_bb (outer_cond
);
753 bb
!= end
; bb
= single_pred (bb
))
755 /* Clear range info from all stmts in BB which is now guarded by
756 different conditionals. */
757 reset_flow_sensitive_info_in_bb (gimple_bb (inner_cond
));
759 /* We only need to worry about introducing undefined behavior if we've
760 relaxed the outer condition. */
761 if (strictening_outer_cond
)
764 /* Avoid introducing undefined behavior as we move stmts that used to be
765 guarded by OUTER_COND. */
766 for (gimple_stmt_iterator gsi
= gsi_start_bb (gimple_bb (inner_cond
));
767 !gsi_end_p (gsi
); gsi_next (&gsi
))
768 ifcombine_rewrite_to_defined_overflow (gsi
);
771 update_profile_after_ifcombine (gimple_bb (inner_cond
),
772 gimple_bb (outer_cond
));
777 /* Returns true if inner_cond_bb contains just the condition or 1/2 statements
778 that define lhs or rhs with an integer conversion. */
781 can_combine_bbs_with_short_circuit (basic_block inner_cond_bb
, tree lhs
, tree rhs
)
783 gimple_stmt_iterator gsi
;
784 gsi
= gsi_start_nondebug_after_labels_bb (inner_cond_bb
);
785 /* If only the condition, this should be allowed. */
786 if (gsi_one_before_end_p (gsi
))
788 /* Can have up to 2 statements defining each of lhs/rhs. */
789 for (int i
= 0; i
< 2; i
++)
791 gimple
*stmt
= gsi_stmt (gsi
);
792 if (!is_gimple_assign (stmt
)
793 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)))
795 /* The defining statement needs to match either the lhs or rhs of
797 if (lhs
!= gimple_assign_lhs (stmt
)
798 && rhs
!= gimple_assign_lhs (stmt
))
800 gsi_next_nondebug (&gsi
);
801 if (gsi_one_before_end_p (gsi
))
807 /* If-convert on a and pattern with a common else block. The inner
808 if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
809 inner_inv, outer_inv indicate whether the conditions are inverted.
810 Returns true if the edges to the common else basic-block were merged. */
813 ifcombine_ifandif (basic_block inner_cond_bb
, bool inner_inv
,
814 basic_block outer_cond_bb
, bool outer_inv
)
816 gimple_stmt_iterator gsi
;
817 tree name1
, name2
, bit1
, bit2
, bits1
, bits2
;
819 gcond
*inner_cond
= safe_dyn_cast
<gcond
*> (*gsi_last_bb (inner_cond_bb
));
823 gcond
*outer_cond
= safe_dyn_cast
<gcond
*> (*gsi_last_bb (outer_cond_bb
));
827 /* See if we test a single bit of the same name in both tests. In
828 that case remove the outer test, merging both else edges,
829 and change the inner one to test for
830 name & (bit1 | bit2) == (bit1 | bit2). */
831 if (recognize_single_bit_test (inner_cond
, &name1
, &bit1
, inner_inv
)
832 && recognize_single_bit_test (outer_cond
, &name2
, &bit2
, outer_inv
)
837 if (TREE_CODE (name1
) == SSA_NAME
838 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name1
))
842 gsi
= gsi_for_stmt (inner_cond
);
843 t
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (name1
),
844 build_int_cst (TREE_TYPE (name1
), 1), bit1
);
845 t2
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (name1
),
846 build_int_cst (TREE_TYPE (name1
), 1), bit2
);
847 t
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (name1
), t
, t2
);
848 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
849 true, GSI_SAME_STMT
);
850 t2
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (name1
), name1
, t
);
851 t2
= force_gimple_operand_gsi (&gsi
, t2
, true, NULL_TREE
,
852 true, GSI_SAME_STMT
);
854 t
= fold_build2 (EQ_EXPR
, boolean_type_node
, t2
, t
);
856 if (!ifcombine_replace_cond (inner_cond
, inner_inv
,
857 outer_cond
, outer_inv
,
863 fprintf (dump_file
, "optimizing double bit test to ");
864 print_generic_expr (dump_file
, name1
);
865 fprintf (dump_file
, " & T == T\nwith temporary T = (1 << ");
866 print_generic_expr (dump_file
, bit1
);
867 fprintf (dump_file
, ") | (1 << ");
868 print_generic_expr (dump_file
, bit2
);
869 fprintf (dump_file
, ")\n");
875 /* See if we have two bit tests of the same name in both tests.
876 In that case remove the outer test and change the inner one to
877 test for name & (bits1 | bits2) != 0. */
878 else if (recognize_bits_test (inner_cond
, &name1
, &bits1
, !inner_inv
)
879 && recognize_bits_test (outer_cond
, &name2
, &bits2
, !outer_inv
))
883 if ((TREE_CODE (name1
) == SSA_NAME
884 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name1
))
885 || (TREE_CODE (name2
) == SSA_NAME
886 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name2
)))
889 /* Find the common name which is bit-tested. */
892 else if (bits1
== bits2
)
894 std::swap (name2
, bits2
);
895 std::swap (name1
, bits1
);
897 else if (name1
== bits2
)
898 std::swap (name2
, bits2
);
899 else if (bits1
== name2
)
900 std::swap (name1
, bits1
);
902 goto bits_test_failed
;
904 /* As we strip non-widening conversions in finding a common
905 name that is tested make sure to end up with an integral
906 type for building the bit operations. */
907 if (TYPE_PRECISION (TREE_TYPE (bits1
))
908 >= TYPE_PRECISION (TREE_TYPE (bits2
)))
910 bits1
= fold_convert (unsigned_type_for (TREE_TYPE (bits1
)), bits1
);
911 name1
= fold_convert (TREE_TYPE (bits1
), name1
);
912 bits2
= fold_convert (unsigned_type_for (TREE_TYPE (bits2
)), bits2
);
913 bits2
= fold_convert (TREE_TYPE (bits1
), bits2
);
917 bits2
= fold_convert (unsigned_type_for (TREE_TYPE (bits2
)), bits2
);
918 name1
= fold_convert (TREE_TYPE (bits2
), name1
);
919 bits1
= fold_convert (unsigned_type_for (TREE_TYPE (bits1
)), bits1
);
920 bits1
= fold_convert (TREE_TYPE (bits2
), bits1
);
923 t
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (name1
), bits1
, bits2
);
924 t
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (name1
), name1
, t
);
925 t
= fold_build2 (EQ_EXPR
, boolean_type_node
, t
,
926 build_int_cst (TREE_TYPE (t
), 0));
927 if (!ifcombine_replace_cond (inner_cond
, inner_inv
,
928 outer_cond
, outer_inv
,
929 t
, false, NULL_TREE
))
934 fprintf (dump_file
, "optimizing bits or bits test to ");
935 print_generic_expr (dump_file
, name1
);
936 fprintf (dump_file
, " & T != 0\nwith temporary T = ");
937 print_generic_expr (dump_file
, bits1
);
938 fprintf (dump_file
, " | ");
939 print_generic_expr (dump_file
, bits2
);
940 fprintf (dump_file
, "\n");
946 /* See if we have two comparisons that we can merge into one. */
947 else bits_test_failed
:
948 if (TREE_CODE_CLASS (gimple_cond_code (inner_cond
)) == tcc_comparison
949 && TREE_CODE_CLASS (gimple_cond_code (outer_cond
)) == tcc_comparison
)
951 tree t
, ts
= NULL_TREE
;
952 enum tree_code inner_cond_code
= gimple_cond_code (inner_cond
);
953 enum tree_code outer_cond_code
= gimple_cond_code (outer_cond
);
955 /* Invert comparisons if necessary (and possible). */
957 inner_cond_code
= invert_tree_comparison (inner_cond_code
,
958 HONOR_NANS (gimple_cond_lhs (inner_cond
)));
959 if (inner_cond_code
== ERROR_MARK
)
962 outer_cond_code
= invert_tree_comparison (outer_cond_code
,
963 HONOR_NANS (gimple_cond_lhs (outer_cond
)));
964 if (outer_cond_code
== ERROR_MARK
)
966 /* Don't return false so fast, try maybe_fold_or_comparisons? */
968 if (!(t
= maybe_fold_and_comparisons (boolean_type_node
, inner_cond_code
,
969 gimple_cond_lhs (inner_cond
),
970 gimple_cond_rhs (inner_cond
),
972 gimple_cond_lhs (outer_cond
),
973 gimple_cond_rhs (outer_cond
),
974 gimple_bb (outer_cond
)))
975 && !(t
= (fold_truth_andor_for_ifcombine
976 (TRUTH_ANDIF_EXPR
, boolean_type_node
,
977 gimple_location (outer_cond
),
979 gimple_cond_lhs (outer_cond
),
980 gimple_cond_rhs (outer_cond
),
981 gimple_location (inner_cond
),
983 gimple_cond_lhs (inner_cond
),
984 gimple_cond_rhs (inner_cond
),
985 single_pred (inner_cond_bb
) != outer_cond_bb
988 /* Only combine conditions in this fallback case if the blocks are
990 if (single_pred (inner_cond_bb
) != outer_cond_bb
)
993 bool logical_op_non_short_circuit
= LOGICAL_OP_NON_SHORT_CIRCUIT
;
994 if (param_logical_op_non_short_circuit
!= -1)
995 logical_op_non_short_circuit
996 = param_logical_op_non_short_circuit
;
997 if (!logical_op_non_short_circuit
|| sanitize_coverage_p ())
999 /* Only do this optimization if the inner bb contains only the conditional
1000 or there is one or 2 statements which are nop conversion for the comparison. */
1001 if (!can_combine_bbs_with_short_circuit (inner_cond_bb
,
1002 gimple_cond_lhs (inner_cond
),
1003 gimple_cond_rhs (inner_cond
)))
1005 t1
= fold_build2_loc (gimple_location (inner_cond
),
1008 gimple_cond_lhs (inner_cond
),
1009 gimple_cond_rhs (inner_cond
));
1010 t2
= fold_build2_loc (gimple_location (outer_cond
),
1013 gimple_cond_lhs (outer_cond
),
1014 gimple_cond_rhs (outer_cond
));
1015 t
= fold_build2_loc (gimple_location (inner_cond
),
1016 TRUTH_AND_EXPR
, boolean_type_node
, t1
, t2
);
1019 if (!ifcombine_replace_cond (inner_cond
, inner_inv
,
1020 outer_cond
, outer_inv
,
1026 fprintf (dump_file
, "optimizing two comparisons to ");
1027 print_generic_expr (dump_file
, t
);
1030 fprintf (dump_file
, " and ");
1031 print_generic_expr (dump_file
, ts
);
1033 fprintf (dump_file
, "\n");
1042 /* Helper function for tree_ssa_ifcombine_bb. Recognize a CFG pattern and
1043 dispatch to the appropriate if-conversion helper for a particular
1044 set of INNER_COND_BB, OUTER_COND_BB, THEN_BB and ELSE_BB.
1045 PHI_PRED_BB should be one of INNER_COND_BB, THEN_BB or ELSE_BB.
1046 OUTER_SUCC_BB is the successor of OUTER_COND_BB on the path towards
1050 tree_ssa_ifcombine_bb_1 (basic_block inner_cond_bb
, basic_block outer_cond_bb
,
1051 basic_block then_bb
, basic_block else_bb
,
1052 basic_block phi_pred_bb
, basic_block outer_succ_bb
)
1054 /* The && form is characterized by a common else_bb with
1055 the two edges leading to it mergable. The latter is
1056 guaranteed by matching PHI arguments in the else_bb and
1057 the inner cond_bb having no side-effects. */
1058 if (phi_pred_bb
!= else_bb
1059 && recognize_if_then_else (outer_cond_bb
, &outer_succ_bb
, &else_bb
)
1060 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, else_bb
))
1064 if (q) goto inner_cond_bb; else goto else_bb;
1066 if (p) goto ...; else goto else_bb;
1071 return ifcombine_ifandif (inner_cond_bb
, false, outer_cond_bb
, false);
1074 /* And a version where the outer condition is negated. */
1075 if (phi_pred_bb
!= else_bb
1076 && recognize_if_then_else (outer_cond_bb
, &else_bb
, &outer_succ_bb
)
1077 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, else_bb
))
1081 if (q) goto else_bb; else goto inner_cond_bb;
1083 if (p) goto ...; else goto else_bb;
1088 return ifcombine_ifandif (inner_cond_bb
, false, outer_cond_bb
, true);
1091 /* The || form is characterized by a common then_bb with the
1092 two edges leading to it mergeable. The latter is guaranteed
1093 by matching PHI arguments in the then_bb and the inner cond_bb
1094 having no side-effects. */
1095 if (phi_pred_bb
!= then_bb
1096 && recognize_if_then_else (outer_cond_bb
, &then_bb
, &outer_succ_bb
)
1097 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, then_bb
))
1101 if (q) goto then_bb; else goto inner_cond_bb;
1103 if (p) goto then_bb; else goto ...;
1107 return ifcombine_ifandif (inner_cond_bb
, true, outer_cond_bb
, true);
1110 /* And a version where the outer condition is negated. */
1111 if (phi_pred_bb
!= then_bb
1112 && recognize_if_then_else (outer_cond_bb
, &outer_succ_bb
, &then_bb
)
1113 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, then_bb
))
1117 if (q) goto inner_cond_bb; else goto then_bb;
1119 if (p) goto then_bb; else goto ...;
1123 return ifcombine_ifandif (inner_cond_bb
, true, outer_cond_bb
, false);
1129 /* Recognize a CFG pattern and dispatch to the appropriate
1130 if-conversion helper. We start with BB as the innermost
1131 worker basic-block. Returns true if a transformation was done. */
1134 tree_ssa_ifcombine_bb (basic_block inner_cond_bb
)
1137 basic_block then_bb
= NULL
, else_bb
= NULL
;
1139 if (!recognize_if_then_else (inner_cond_bb
, &then_bb
, &else_bb
))
1142 /* Recognize && and || of two conditions with a common
1143 then/else block which entry edges we can merge. That is:
1149 This requires a single predecessor of the inner cond_bb.
1151 Look for an OUTER_COND_BBs to combine with INNER_COND_BB. They need not
1152 be contiguous, as long as inner and intervening blocks have no side
1153 effects, and are either single-entry-single-exit or conditionals choosing
1154 between the same EXIT_BB with the same PHI args, possibly through an
1155 EXIT_PRED, and the path leading to INNER_COND_BB. EXIT_PRED will be set
1156 just before (along with a successful combination) or just after setting
1157 EXIT_BB, to either THEN_BB, ELSE_BB, or INNER_COND_BB. ??? We could
1158 potentially handle multi-block single-entry-single-exit regions, but the
1159 loop below only deals with single-entry-single-exit individual intervening
1160 blocks. Larger regions without side effects are presumably rare, so it's
1161 probably not worth the effort. */
1162 for (basic_block bb
= inner_cond_bb
, outer_cond_bb
, exit_bb
= NULL
,
1163 /* This initialization shouldn't be needed, but in case the compiler
1164 is not smart enough to tell, make it harmless. */
1166 single_pred_p (bb
) && bb_no_side_effects_p (bb
);
1169 bool changed
= false;
1171 outer_cond_bb
= single_pred (bb
);
1173 /* Skip blocks without conditions. */
1174 if (single_succ_p (outer_cond_bb
))
1177 /* When considering noncontiguous conditions, make sure that all
1178 non-final conditions lead to the same successor of the final
1179 condition, when not taking the path to inner_bb, so that we can
1180 combine C into A, both in A && (B && C), and in A || (B || C), but
1181 neither in A && (B || C), nor A || (B && C). Say, if C goes to
1182 THEN_BB or ELSE_BB, then B must go to either of these, say X, besides
1183 C (whether C is then or else), and A must go to X and B (whether then
1186 We test for this, while allowing intervening nonconditional blocks, by
1187 first taking note of which of the successors of the inner conditional
1188 block is the exit path taken by the first considered outer conditional
1191 Having identified and saved the exit block in EXIT_BB at the end of
1192 the loop, here we test that subsequent conditional blocks under
1193 consideration also use the exit block as a successor, besides the
1194 block that leads to inner_cond_bb, and that the edges to exit share
1195 the same phi values. */
1197 && !recognize_if_then_else (outer_cond_bb
, &bb
, &exit_bb
, true))
1200 /* After checking dests and phi args, we can also skip blocks whose
1201 conditions have been optimized down to a constant, without trying to
1202 combine them, but we must not skip the computation of EXIT_BB and the
1203 checking of same phi args. */
1204 if (known_succ_p (outer_cond_bb
))
1206 else if ((!exit_bb
|| exit_pred
== inner_cond_bb
)
1207 && tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
,
1208 then_bb
, else_bb
, inner_cond_bb
, bb
))
1209 changed
= true, exit_pred
= inner_cond_bb
;
1211 ? exit_pred
== else_bb
1212 : forwarder_block_to (else_bb
, then_bb
))
1214 /* Other possibilities for the && form, if else_bb is
1215 empty forwarder block to then_bb. Compared to the above simpler
1216 forms this can be treated as if then_bb and else_bb were swapped,
1217 and the corresponding inner_cond_bb not inverted because of that.
1218 For same_phi_args_p we look at equality of arguments between
1219 edge from outer_cond_bb and the forwarder block. */
1220 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
, else_bb
,
1221 then_bb
, else_bb
, bb
))
1222 changed
= true, exit_pred
= else_bb
;
1225 ? exit_pred
== then_bb
1226 : forwarder_block_to (then_bb
, else_bb
))
1228 /* Other possibilities for the || form, if then_bb is
1229 empty forwarder block to else_bb. Compared to the above simpler
1230 forms this can be treated as if then_bb and else_bb were swapped,
1231 and the corresponding inner_cond_bb not inverted because of that.
1232 For same_phi_args_p we look at equality of arguments between
1233 edge from outer_cond_bb and the forwarder block. */
1234 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
, else_bb
,
1235 then_bb
, then_bb
, bb
))
1236 changed
= true, exit_pred
= then_bb
;
1242 /* If the inner condition is gone, there's no point in attempting to
1243 combine it any further. */
1244 if (changed
&& known_succ_p (inner_cond_bb
))
1247 /* Starting at this point in the loop, we start preparing to attempt
1248 combinations in which OUTER_COND_BB will be an intervening block.
1249 Checking that it has a single predecessor is a very cheap test, unlike
1250 the PHI args tests below, so test it early and hopefully save the more
1251 expensive tests in case we won't be able to try other blocks. */
1252 if (!single_pred_p (outer_cond_bb
))
1255 /* Record the exit path taken by the outer condition. */
1258 /* If we have removed the outer condition entirely, we need not
1259 commit to an exit block yet, it's as if we'd merged the blocks and
1260 were starting afresh. This is sound as long as we never replace
1261 the outer condition with a constant that leads away from the inner
1262 block. Here's why we never do: when combining contiguous
1263 conditions, we replace the inner cond, and replace the outer cond
1264 with a constant that leads to inner, so this case is good. When
1265 combining noncontiguous blocks, we normally modify outer, and
1266 replace inner with a constant or remainders of the original
1267 condition that couldn't be combined. This test would normally not
1268 hit with noncontiguous blocks, because we'd have computed EXIT_BB
1269 before reaching the noncontiguous outer block. However, if all
1270 intervening blocks are unconditional, including those just made
1271 unconditional, we may replace outer instead of inner with the
1272 combined condition. If the combined noncontiguous conditions are
1273 mutually exclusive, we could end up with a constant outer
1274 condition, but then, the inner condition would also be a constant,
1275 and then we'd stop iterating because of the known_succ_p
1276 (inner_cond_bb) test above. */
1277 if (changed
&& known_succ_p (outer_cond_bb
))
1280 if (recognize_if_then_else (outer_cond_bb
, &then_bb
, &bb
, true))
1282 else if (recognize_if_then_else (outer_cond_bb
, &bb
, &else_bb
, true))
1287 /* Find out which path from INNER_COND_BB shares PHI args with the
1288 edge (OUTER_COND_BB->EXIT_BB). That path may involve a forwarder
1289 block, whether THEN_BB or ELSE_BB, and we need to know which one
1290 satisfies the condition to avoid combinations that could use
1291 different forwarding arrangements, because they would be unsound.
1292 E.g., given (a ? 0 : b ? 1 : c ? 1 : 0), after trying to merge b
1293 and c, we test that both share the same exit block, with the same
1294 value 1. Whether or not that involves a forwarder block, if we
1295 don't go through the same (possibly absent) forwarder block in
1296 subsequent attempted combinations, e.g. a with c, we could find
1297 that a and inverted c share the same exit block with a different
1298 value, namely 0, which would enable an unsound merge. We need all
1299 of inner, intervening and outer blocks to reach the same exit with
1300 the same value for the transformation to be sound. So here we
1301 determine how to get to EXIT_BB from outer and inner with the same
1302 PHI values, record that in EXIT_PRED, and then subsequent
1303 combination attempts that have OUTER_COND_BB as an intervening
1304 block will ensure the same path to exit is taken, skipping unsound
1307 /* EXIT_PRED was set along with CHANGED, and the successful
1308 combination already checked for the same PHI args. */;
1309 else if (same_phi_args_p (outer_cond_bb
, inner_cond_bb
, exit_bb
))
1310 exit_pred
= inner_cond_bb
;
1311 else if (then_bb
== exit_bb
1312 && forwarder_block_to (else_bb
, then_bb
)
1313 && same_phi_args_p (outer_cond_bb
, else_bb
, exit_bb
))
1314 exit_pred
= else_bb
;
1315 else if (else_bb
== exit_bb
1316 && forwarder_block_to (then_bb
, else_bb
)
1317 && same_phi_args_p (outer_cond_bb
, then_bb
, exit_bb
))
1318 exit_pred
= then_bb
;
1320 /* If none of the paths share the same PHI args, no combination is
1323 /* Skip the PHI args test below, it's redundant with the tests we've
1328 /* Before trying an earlier block, make sure INNER_COND_BB and the
1329 current OUTER_COND_BB share the same PHI args at EXIT_BB. We don't
1330 need to check if the latest attempt at combining succeeded, because
1331 that means we'll have already checked. But we can't only check outer
1332 and inner, we have to check that all intervening blocks also get to
1333 exit with the same result, otherwise the transformation may change the
1334 final result. Consider (a ? 0 : b ? 1 : c ? 0 : -1). If we combine
1335 (a | c), yielding ((a | c) ? 0 : b ? 1 : [0 ? 0 :] -1), we'd get 0
1336 rather than 1 when (!a&&b). And if we were to replace inner instead
1337 of outer, we'd get ([1 ? 0 :] b ? 1 : (a | c) ? 0 : -1), which would
1338 yield 1 rather than 0 when (a). */
1340 && !same_phi_args_p (outer_cond_bb
, exit_pred
, exit_bb
))
1347 /* Main entry for the tree if-conversion pass. */
1351 const pass_data pass_data_tree_ifcombine
=
1353 GIMPLE_PASS
, /* type */
1354 "ifcombine", /* name */
1355 OPTGROUP_NONE
, /* optinfo_flags */
1356 TV_TREE_IFCOMBINE
, /* tv_id */
1357 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1358 0, /* properties_provided */
1359 0, /* properties_destroyed */
1360 0, /* todo_flags_start */
1361 TODO_update_ssa
, /* todo_flags_finish */
1364 class pass_tree_ifcombine
: public gimple_opt_pass
1367 pass_tree_ifcombine (gcc::context
*ctxt
)
1368 : gimple_opt_pass (pass_data_tree_ifcombine
, ctxt
)
1371 /* opt_pass methods: */
1372 unsigned int execute (function
*) final override
;
1374 }; // class pass_tree_ifcombine
1377 pass_tree_ifcombine::execute (function
*fun
)
1380 bool cfg_changed
= false;
1383 bbs
= single_pred_before_succ_order ();
1384 calculate_dominance_info (CDI_DOMINATORS
);
1385 mark_ssa_maybe_undefs ();
1387 /* Search every basic block for COND_EXPR we may be able to optimize.
1389 We walk the blocks in order that guarantees that a block with
1390 a single predecessor is processed after the predecessor.
1391 This ensures that we collapse outter ifs before visiting the
1392 inner ones, and also that we do not try to visit a removed
1393 block. This is opposite of PHI-OPT, because we cascade the
1394 combining rather than cascading PHIs. */
1395 for (i
= n_basic_blocks_for_fn (fun
) - NUM_FIXED_BLOCKS
- 1; i
>= 0; i
--)
1397 basic_block bb
= bbs
[i
];
1399 if (safe_is_a
<gcond
*> (*gsi_last_bb (bb
)))
1400 if (tree_ssa_ifcombine_bb (bb
))
1401 cfg_changed
|= true;
1406 return cfg_changed
? TODO_cleanup_cfg
: 0;
1412 make_pass_tree_ifcombine (gcc::context
*ctxt
)
1414 return new pass_tree_ifcombine (ctxt
);