1 /* Preamble and helpers for the autogenerated gimple-match.cc file.
2 Copyright (C) 2014-2025 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "vec-perm-indices.h"
31 #include "fold-const.h"
32 #include "fold-const-call.h"
33 #include "stor-layout.h"
34 #include "gimple-iterator.h"
35 #include "gimple-fold.h"
39 #include "gimple-match.h"
40 #include "tree-pass.h"
41 #include "internal-fn.h"
42 #include "case-cfn-macros.h"
46 #include "optabs-tree.h"
50 #include "gimple-range.h"
51 #include "langhooks.h"
55 tree
do_valueize (tree
, tree (*)(tree
), bool &);
56 tree
do_valueize (tree (*)(tree
), tree
);
58 /* Helper for the autogenerated code, get at the definition of NAME when
59 VALUEIZE allows that. */
62 get_def (tree (*valueize
)(tree
), tree name
)
64 if (valueize
&& ! valueize (name
))
66 return SSA_NAME_DEF_STMT (name
);
69 /* Routine to determine if the types T1 and T2 are effectively
70 the same for GIMPLE. If T1 or T2 is not a type, the test
71 applies to their TREE_TYPE. */
74 types_match (tree t1
, tree t2
)
81 return types_compatible_p (t1
, t2
);
84 /* Routine to determine if the types T1, T2 and T3 are effectively
85 the same for GIMPLE. If T1, T2 or T2 is not a type, the test
86 applies to their TREE_TYPE. */
89 types_match (tree t1
, tree t2
, tree t3
)
91 return types_match (t1
, t2
) && types_match (t2
, t3
);
94 /* Return if T has a single use. For GIMPLE, we also allow any
95 non-SSA_NAME (ie constants) and zero uses to cope with uses
96 that aren't linked up yet. */
99 single_use (const_tree
) ATTRIBUTE_PURE
;
102 single_use (const_tree t
)
104 if (TREE_CODE (t
) != SSA_NAME
)
107 /* Inline return has_zero_uses (t) || has_single_use (t); */
108 const ssa_use_operand_t
*const head
= &(SSA_NAME_IMM_USE_NODE (t
));
109 const ssa_use_operand_t
*ptr
;
112 for (ptr
= head
->next
; ptr
!= head
; ptr
= ptr
->next
)
113 if (USE_STMT(ptr
) && !is_gimple_debug (USE_STMT (ptr
)))
122 /* Return true if math operations should be canonicalized,
123 e.g. sqrt(sqrt(x)) -> pow(x, 0.25). */
126 canonicalize_math_p ()
128 return !cfun
|| (cfun
->curr_properties
& PROP_gimple_opt_math
) == 0;
131 /* Return true if math operations that are beneficial only after
132 vectorization should be canonicalized. */
135 canonicalize_math_after_vectorization_p ()
137 return !cfun
|| (cfun
->curr_properties
& PROP_gimple_lvec
) != 0;
140 /* Return true if we can still perform transformations that may introduce
141 vector operations that are not supported by the target. Vector lowering
142 normally handles those, but after that pass, it becomes unsafe. */
145 optimize_vectors_before_lowering_p ()
147 return !cfun
|| (cfun
->curr_properties
& PROP_gimple_lvec
) == 0;
150 /* Returns true if the expression T has no side effects
151 including not trapping. */
153 expr_no_side_effects_p (tree t
)
155 /* For gimple, there should only be gimple val's here. */
156 gcc_assert (is_gimple_val (t
));
160 /* Return true if pow(cst, x) should be optimized into exp(log(cst) * x).
161 As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0
162 is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...>
163 where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1)
164 will likely be exact, while exp (log (arg0) * arg1) might be not.
165 Also don't do it if arg1 is phi_res above and cst2 is an exact integer. */
168 optimize_pow_to_exp (tree arg0
, tree arg1
)
170 gcc_assert (TREE_CODE (arg0
) == REAL_CST
);
171 if (!real_isinteger (TREE_REAL_CST_PTR (arg0
), TYPE_MODE (TREE_TYPE (arg0
))))
174 if (TREE_CODE (arg1
) != SSA_NAME
)
177 gimple
*def
= SSA_NAME_DEF_STMT (arg1
);
178 gphi
*phi
= dyn_cast
<gphi
*> (def
);
179 tree cst1
= NULL_TREE
;
180 enum tree_code code
= ERROR_MARK
;
183 if (!is_gimple_assign (def
))
185 code
= gimple_assign_rhs_code (def
);
194 if (TREE_CODE (gimple_assign_rhs1 (def
)) != SSA_NAME
195 || TREE_CODE (gimple_assign_rhs2 (def
)) != REAL_CST
)
198 cst1
= gimple_assign_rhs2 (def
);
200 phi
= dyn_cast
<gphi
*> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def
)));
205 tree cst2
= NULL_TREE
;
206 int n
= gimple_phi_num_args (phi
);
207 for (int i
= 0; i
< n
; i
++)
209 tree arg
= PHI_ARG_DEF (phi
, i
);
210 if (TREE_CODE (arg
) != REAL_CST
)
212 else if (cst2
== NULL_TREE
)
214 else if (!operand_equal_p (cst2
, arg
, 0))
219 cst2
= const_binop (code
, TREE_TYPE (cst2
), cst2
, cst1
);
221 && TREE_CODE (cst2
) == REAL_CST
222 && real_isinteger (TREE_REAL_CST_PTR (cst2
),
223 TYPE_MODE (TREE_TYPE (cst2
))))
228 /* Return true if a division INNER_DIV / DIVISOR where INNER_DIV
229 is another division can be optimized. Don't optimize if INNER_DIV
230 is used in a TRUNC_MOD_EXPR with DIVISOR as second operand. */
233 optimize_successive_divisions_p (tree divisor
, tree inner_div
)
235 if (!gimple_in_ssa_p (cfun
))
238 imm_use_iterator imm_iter
;
240 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, inner_div
)
242 gimple
*use_stmt
= USE_STMT (use_p
);
243 if (!is_gimple_assign (use_stmt
)
244 || gimple_assign_rhs_code (use_stmt
) != TRUNC_MOD_EXPR
245 || !operand_equal_p (gimple_assign_rhs2 (use_stmt
), divisor
, 0))
252 /* Return true if EXPR1 and EXPR2 have the same value, but not necessarily
253 same type. The types can differ through nop conversions. */
254 #define bitwise_equal_p(expr1, expr2) \
255 gimple_bitwise_equal_p (expr1, expr2, valueize)
257 bool gimple_nop_convert (tree
, tree
*, tree (*) (tree
));
258 bool gimple_maybe_truncate (tree
, tree
*, tree (*) (tree
));
260 /* Helper function for bitwise_equal_p macro. */
263 gimple_bitwise_equal_p (tree expr1
, tree expr2
, tree (*valueize
) (tree
))
267 if (!tree_nop_conversion_p (TREE_TYPE (expr1
), TREE_TYPE (expr2
)))
269 if (TREE_CODE (expr1
) == INTEGER_CST
&& TREE_CODE (expr2
) == INTEGER_CST
)
270 return wi::to_wide (expr1
) == wi::to_wide (expr2
);
271 if (operand_equal_p (expr1
, expr2
, 0))
274 if (!gimple_nop_convert (expr1
, &expr3
, valueize
))
276 if (!gimple_nop_convert (expr2
, &expr4
, valueize
))
280 if (operand_equal_p (expr3
, expr2
, 0))
282 if (expr2
!= expr4
&& operand_equal_p (expr3
, expr4
, 0))
285 if (expr2
!= expr4
&& operand_equal_p (expr1
, expr4
, 0))
287 if (gimple_maybe_truncate (expr3
, &expr3
, valueize
)
288 && gimple_maybe_truncate (expr4
, &expr4
, valueize
)
289 && operand_equal_p (expr3
, expr4
, 0))
294 /* Return true if EXPR1 and EXPR2 have the bitwise opposite value,
295 but not necessarily same type.
296 The types can differ through nop conversions. */
297 #define bitwise_inverted_equal_p(expr1, expr2, wascmp) \
298 gimple_bitwise_inverted_equal_p (expr1, expr2, wascmp, valueize)
301 bool gimple_bit_not_with_nop (tree
, tree
*, tree (*) (tree
));
302 bool gimple_maybe_cmp (tree
, tree
*, tree (*) (tree
));
303 bool gimple_bit_xor_cst (tree
, tree
*, tree (*) (tree
));
305 /* Helper function for bitwise_inverted_equal_p macro. */
308 gimple_bitwise_inverted_equal_p (tree expr1
, tree expr2
, bool &wascmp
, tree (*valueize
) (tree
))
313 if (!tree_nop_conversion_p (TREE_TYPE (expr1
), TREE_TYPE (expr2
)))
315 tree cst1
= uniform_integer_cst_p (expr1
);
316 tree cst2
= uniform_integer_cst_p (expr2
);
318 return wi::to_wide (cst1
) == ~wi::to_wide (cst2
);
319 if (operand_equal_p (expr1
, expr2
, 0))
324 /* `X ^ CST` and `X ^ ~CST` match for ~. */
325 if (gimple_bit_xor_cst (expr1
, xor1
, valueize
)
326 && gimple_bit_xor_cst (expr2
, xor2
, valueize
))
328 if (operand_equal_p (xor1
[0], xor2
[0], 0)
329 && (wi::to_wide (uniform_integer_cst_p (xor1
[1]))
330 == ~wi::to_wide (uniform_integer_cst_p (xor2
[1]))))
335 /* Try if EXPR1 was defined as ~EXPR2. */
336 if (gimple_bit_not_with_nop (expr1
, &other
, valueize
))
338 if (gimple_bitwise_equal_p (other
, expr2
, valueize
))
341 /* Try if EXPR2 was defined as ~EXPR1. */
342 if (gimple_bit_not_with_nop (expr2
, &other
, valueize
))
344 if (gimple_bitwise_equal_p (other
, expr1
, valueize
))
348 /* If neither are defined by BIT_NOT, try to see if
349 both are defined by comparisons and see if they are
350 complementary (inversion) of each other. */
351 tree newexpr1
, newexpr2
;
352 if (!gimple_maybe_cmp (expr1
, &newexpr1
, valueize
))
354 if (!gimple_maybe_cmp (expr2
, &newexpr2
, valueize
))
357 gimple
*d1
= get_def (valueize
, newexpr1
);
358 gassign
*a1
= dyn_cast
<gassign
*> (d1
);
359 gimple
*d2
= get_def (valueize
, newexpr2
);
360 gassign
*a2
= dyn_cast
<gassign
*> (d2
);
361 tree op10
= do_valueize (valueize
, gimple_assign_rhs1 (a1
));
362 tree op20
= do_valueize (valueize
, gimple_assign_rhs1 (a2
));
363 if (!operand_equal_p (op10
, op20
))
365 tree op11
= do_valueize (valueize
, gimple_assign_rhs2 (a1
));
366 tree op21
= do_valueize (valueize
, gimple_assign_rhs2 (a2
));
367 if (!operand_equal_p (op11
, op21
))
370 tree_code ac1
= gimple_assign_rhs_code (a1
);
371 tree_code ac2
= gimple_assign_rhs_code (a2
);
372 /* Match `^` against `==` but this should only
373 happen when the type is a 1bit precision integer. */
374 if (ac1
== BIT_XOR_EXPR
)
376 tree type
= TREE_TYPE (newexpr1
);
377 gcc_assert (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) == 1);
378 return ac2
== EQ_EXPR
;
380 if (ac2
== BIT_XOR_EXPR
)
382 tree type
= TREE_TYPE (newexpr1
);
383 gcc_assert (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) == 1);
384 return ac1
== EQ_EXPR
;
386 if (invert_tree_comparison (ac1
, HONOR_NANS (op10
)) == ac2
)
392 * Return the relevant gcond * of the given phi, as well as the true
393 * and false TREE args of the phi. Or return nullptr.
395 * If matched the gcond *, the output argument TREE true_arg and false_arg
396 * will be updated to the relevant args of phi.
398 * If failed to match, nullptr gcond * will be returned, as well as the output
399 * arguments will be set to NULL_TREE.
402 static inline gcond
*
403 match_cond_with_binary_phi (gphi
*phi
, tree
*true_arg
, tree
*false_arg
)
405 *true_arg
= *false_arg
= NULL_TREE
;
407 if (gimple_phi_num_args (phi
) != 2)
410 basic_block pred_b0
= EDGE_PRED (gimple_bb (phi
), 0)->src
;
411 basic_block pred_b1
= EDGE_PRED (gimple_bb (phi
), 1)->src
;
412 edge edge_for_pred_0
= nullptr;
414 if (EDGE_COUNT (pred_b0
->succs
) == 2
415 && EDGE_COUNT (pred_b1
->succs
) == 1
416 && EDGE_COUNT (pred_b1
->preds
) == 1
417 && pred_b0
== EDGE_PRED (pred_b1
, 0)->src
)
423 * | cond |------>| def |
431 * | def |<----------+
433 * #: edge_for_pred_0.
435 edge_for_pred_0
= EDGE_PRED (gimple_bb (phi
), 0);
436 else if (EDGE_COUNT (pred_b1
->succs
) == 2
437 && EDGE_COUNT (pred_b0
->succs
) == 1
438 && EDGE_COUNT (pred_b0
->preds
) == 1
439 && pred_b1
== EDGE_PRED (pred_b0
, 0)->src
)
445 * | def |<---#---| cond |
454 * | def |<----------+
456 * #: edge_for_pred_0.
458 edge_for_pred_0
= EDGE_PRED (pred_b0
, 0);
459 else if (EDGE_COUNT (pred_b0
->succs
) == 1
460 && EDGE_COUNT (pred_b1
->succs
) == 1
461 && EDGE_COUNT (pred_b0
->preds
) == 1
462 && EDGE_COUNT (pred_b1
->preds
) == 1
463 && EDGE_COUNT (EDGE_PRED (pred_b0
, 0)->src
->succs
) == 2
464 && EDGE_PRED (pred_b0
, 0)->src
== EDGE_PRED (pred_b1
, 0)->src
)
468 * | cond |------>| b2: |
482 * | b3: |<----------+
485 * #: edge_for_pred_0.
487 edge_for_pred_0
= EDGE_PRED (pred_b0
, 0);
489 if (!edge_for_pred_0
)
492 gcond
*cond
= safe_dyn_cast
<gcond
*> (*gsi_last_bb (edge_for_pred_0
->src
));
497 if (edge_for_pred_0
->flags
& EDGE_TRUE_VALUE
)
499 *true_arg
= gimple_phi_arg_def (phi
, 0);
500 *false_arg
= gimple_phi_arg_def (phi
, 1);
502 else /* Aka edge_for_pred_0->flags & EDGE_FALSE_VALUE */
504 *false_arg
= gimple_phi_arg_def (phi
, 0);
505 *true_arg
= gimple_phi_arg_def (phi
, 1);