libstdc++: Simplify std::any to fix -Wdeprecated-declarations warning
[official-gcc.git] / gcc / tree-ssa-phiprop.cc
blob2a1cdae46d2755e8ca4570d03ae5b005bf3fb627
1 /* Backward propagation of indirect loads through PHIs.
2 Copyright (C) 2007-2024 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-eh.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 #include "stor-layout.h"
35 #include "tree-ssa-loop.h"
36 #include "tree-cfg.h"
37 #include "tree-ssa-dce.h"
39 /* This pass propagates indirect loads through the PHI node for its
40 address to make the load source possibly non-addressable and to
41 allow for PHI optimization to trigger.
43 For example the pass changes
45 # addr_1 = PHI <&a, &b>
46 tmp_1 = *addr_1;
50 # tmp_1 = PHI <a, b>
52 but also handles more complex scenarios like
54 D.2077_2 = &this_1(D)->a1;
55 ...
57 # b_12 = PHI <&c(2), D.2077_2(3)>
58 D.2114_13 = *b_12;
59 ...
61 # b_15 = PHI <b_12(4), &b(5)>
62 D.2080_5 = &this_1(D)->a0;
63 ...
65 # b_18 = PHI <D.2080_5(6), &c(7)>
66 ...
68 # b_21 = PHI <b_15(8), b_18(9)>
69 D.2076_8 = *b_21;
71 where the addresses loaded are defined by PHIs itself.
72 The above happens for
74 std::max(std::min(a0, c), std::min(std::max(a1, c), b))
76 where this pass transforms it to a form later PHI optimization
77 recognizes and transforms it to the simple
79 D.2109_10 = this_1(D)->a1;
80 D.2110_11 = c;
81 D.2114_31 = MAX_EXPR <D.2109_10, D.2110_11>;
82 D.2115_14 = b;
83 D.2125_17 = MIN_EXPR <D.2115_14, D.2114_31>;
84 D.2119_16 = this_1(D)->a0;
85 D.2124_32 = MIN_EXPR <D.2110_11, D.2119_16>;
86 D.2076_33 = MAX_EXPR <D.2125_17, D.2124_32>;
88 The pass does a dominator walk processing loads using a basic-block
89 local analysis and stores the result for use by transformations on
90 dominated basic-blocks. */
93 /* Structure to keep track of the value of a dereferenced PHI result
94 and the virtual operand used for that dereference. */
96 struct phiprop_d
98 tree value;
99 tree vuse;
102 /* Verify if the value recorded for NAME in PHIVN is still valid at
103 the start of basic block BB. */
105 static bool
106 phivn_valid_p (struct phiprop_d *phivn, tree name, basic_block bb)
108 tree vuse = phivn[SSA_NAME_VERSION (name)].vuse;
109 gimple *use_stmt;
110 imm_use_iterator ui2;
111 bool ok = true;
113 /* The def stmts of the virtual uses need to be dominated by bb. */
114 gcc_assert (vuse != NULL_TREE);
116 FOR_EACH_IMM_USE_STMT (use_stmt, ui2, vuse)
118 /* If BB does not dominate a VDEF, the value is invalid. */
119 if ((gimple_vdef (use_stmt) != NULL_TREE
120 || gimple_code (use_stmt) == GIMPLE_PHI)
121 && !dominated_by_p (CDI_DOMINATORS, gimple_bb (use_stmt), bb))
123 ok = false;
124 break;
128 return ok;
131 /* Insert a new phi node for the dereference of PHI at basic_block
132 BB with the virtual operands from USE_STMT. */
134 static tree
135 phiprop_insert_phi (basic_block bb, gphi *phi, gimple *use_stmt,
136 struct phiprop_d *phivn, size_t n,
137 bitmap dce_ssa_names)
139 tree res;
140 gphi *new_phi = NULL;
141 edge_iterator ei;
142 edge e;
143 tree phi_result = PHI_RESULT (phi);
144 bitmap_set_bit (dce_ssa_names, SSA_NAME_VERSION (phi_result));
146 gcc_assert (is_gimple_assign (use_stmt)
147 && gimple_assign_rhs_code (use_stmt) == MEM_REF);
149 /* Build a new PHI node to replace the definition of
150 the indirect reference lhs. */
151 res = gimple_assign_lhs (use_stmt);
152 if (TREE_CODE (res) == SSA_NAME)
153 new_phi = create_phi_node (res, bb);
155 if (dump_file && (dump_flags & TDF_DETAILS))
157 fprintf (dump_file, "Inserting PHI for result of load ");
158 print_gimple_stmt (dump_file, use_stmt, 0);
161 gphi *vphi = get_virtual_phi (bb);
163 /* Add PHI arguments for each edge inserting loads of the
164 addressable operands. */
165 FOR_EACH_EDGE (e, ei, bb->preds)
167 tree old_arg, new_var;
168 gassign *tmp;
169 location_t locus;
171 old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
172 locus = gimple_phi_arg_location_from_edge (phi, e);
173 while (TREE_CODE (old_arg) == SSA_NAME
174 && (SSA_NAME_VERSION (old_arg) >= n
175 || phivn[SSA_NAME_VERSION (old_arg)].value == NULL_TREE))
177 gimple *def_stmt = SSA_NAME_DEF_STMT (old_arg);
178 old_arg = gimple_assign_rhs1 (def_stmt);
179 locus = gimple_location (def_stmt);
182 if (TREE_CODE (old_arg) == SSA_NAME)
184 if (dump_file && (dump_flags & TDF_DETAILS))
186 fprintf (dump_file, " for edge defining ");
187 print_generic_expr (dump_file, PHI_ARG_DEF_FROM_EDGE (phi, e));
188 fprintf (dump_file, " reusing PHI result ");
189 print_generic_expr (dump_file,
190 phivn[SSA_NAME_VERSION (old_arg)].value);
191 fprintf (dump_file, "\n");
193 /* Reuse a formerly created dereference. */
194 new_var = phivn[SSA_NAME_VERSION (old_arg)].value;
196 else
198 tree rhs = gimple_assign_rhs1 (use_stmt);
199 gcc_assert (TREE_CODE (old_arg) == ADDR_EXPR);
200 tree vuse = NULL_TREE;
201 if (TREE_CODE (res) == SSA_NAME)
203 new_var = make_ssa_name (TREE_TYPE (rhs));
204 if (vphi)
205 vuse = PHI_ARG_DEF_FROM_EDGE (vphi, e);
206 else
207 vuse = gimple_vuse (use_stmt);
209 else
210 /* For the aggregate copy case updating virtual operands
211 we'd have to possibly insert a virtual PHI and we have
212 to split the existing VUSE lifetime. Leave that to
213 the generic SSA updating. */
214 new_var = unshare_expr (res);
215 if (!is_gimple_min_invariant (old_arg))
216 old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
217 else
218 old_arg = unshare_expr (old_arg);
219 tmp = gimple_build_assign (new_var,
220 fold_build2 (MEM_REF, TREE_TYPE (rhs),
221 old_arg,
222 TREE_OPERAND (rhs, 1)));
223 gimple_set_location (tmp, locus);
224 if (vuse)
225 gimple_set_vuse (tmp, vuse);
227 gsi_insert_on_edge (e, tmp);
228 update_stmt (tmp);
230 if (dump_file && (dump_flags & TDF_DETAILS))
232 fprintf (dump_file, " for edge defining ");
233 print_generic_expr (dump_file, PHI_ARG_DEF_FROM_EDGE (phi, e));
234 fprintf (dump_file, " inserting load ");
235 print_gimple_stmt (dump_file, tmp, 0);
239 if (new_phi)
240 add_phi_arg (new_phi, new_var, e, locus);
243 if (new_phi)
245 update_stmt (new_phi);
247 if (dump_file && (dump_flags & TDF_DETAILS))
248 print_gimple_stmt (dump_file, new_phi, 0);
251 return res;
254 /* Verify if *idx is available at *DATA. */
256 static bool
257 chk_uses (tree, tree *idx, void *data)
259 basic_block dom = (basic_block) data;
260 if (TREE_CODE (*idx) == SSA_NAME)
261 return (SSA_NAME_IS_DEFAULT_DEF (*idx)
262 || ! dominated_by_p (CDI_DOMINATORS,
263 gimple_bb (SSA_NAME_DEF_STMT (*idx)), dom));
264 return true;
267 /* Propagate between the phi node arguments of PHI in BB and phi result
268 users. For now this matches
269 # p_2 = PHI <&x, &y>
270 <Lx>:;
271 p_3 = p_2;
272 z_2 = *p_3;
273 and converts it to
274 # z_2 = PHI <x, y>
275 <Lx>:;
276 Returns true if a transformation was done and edge insertions
277 need to be committed. Global data PHIVN and N is used to track
278 past transformation results. We need to be especially careful here
279 with aliasing issues as we are moving memory reads. */
281 static bool
282 propagate_with_phi (basic_block bb, gphi *phi, struct phiprop_d *phivn,
283 size_t n, bitmap dce_ssa_names)
285 tree ptr = PHI_RESULT (phi);
286 gimple *use_stmt;
287 tree res = NULL_TREE;
288 gimple_stmt_iterator gsi;
289 imm_use_iterator ui;
290 use_operand_p arg_p, use;
291 ssa_op_iter i;
292 bool phi_inserted;
293 bool changed;
294 tree type = NULL_TREE;
296 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
297 || (!is_gimple_reg_type (TREE_TYPE (TREE_TYPE (ptr)))
298 && TYPE_MODE (TREE_TYPE (TREE_TYPE (ptr))) == BLKmode))
299 return false;
301 /* Check if we can "cheaply" dereference all phi arguments. */
302 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_USE)
304 tree arg = USE_FROM_PTR (arg_p);
305 /* Walk the ssa chain until we reach a ssa name we already
306 created a value for or we reach a definition of the form
307 ssa_name_n = &var; */
308 while (TREE_CODE (arg) == SSA_NAME
309 && !SSA_NAME_IS_DEFAULT_DEF (arg)
310 && (SSA_NAME_VERSION (arg) >= n
311 || phivn[SSA_NAME_VERSION (arg)].value == NULL_TREE))
313 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
314 if (!gimple_assign_single_p (def_stmt))
315 return false;
316 arg = gimple_assign_rhs1 (def_stmt);
318 if (TREE_CODE (arg) != ADDR_EXPR
319 && !(TREE_CODE (arg) == SSA_NAME
320 && SSA_NAME_VERSION (arg) < n
321 && phivn[SSA_NAME_VERSION (arg)].value != NULL_TREE
322 && (!type
323 || types_compatible_p
324 (type, TREE_TYPE (phivn[SSA_NAME_VERSION (arg)].value)))
325 && phivn_valid_p (phivn, arg, bb)))
326 return false;
327 if (!type
328 && TREE_CODE (arg) == SSA_NAME)
329 type = TREE_TYPE (phivn[SSA_NAME_VERSION (arg)].value);
332 /* Find a dereferencing use. First follow (single use) ssa
333 copy chains for ptr. */
334 while (single_imm_use (ptr, &use, &use_stmt)
335 && gimple_assign_ssa_name_copy_p (use_stmt))
336 ptr = gimple_assign_lhs (use_stmt);
338 /* Replace the first dereference of *ptr if there is one and if we
339 can move the loads to the place of the ptr phi node. */
340 phi_inserted = false;
341 changed = false;
342 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
344 gimple *def_stmt;
345 tree vuse;
347 if (!dom_info_available_p (cfun, CDI_POST_DOMINATORS))
348 calculate_dominance_info (CDI_POST_DOMINATORS);
350 /* Only replace loads in blocks that post-dominate the PHI node. That
351 makes sure we don't end up speculating loads. */
352 if (!dominated_by_p (CDI_POST_DOMINATORS,
353 bb, gimple_bb (use_stmt)))
354 continue;
356 /* Check whether this is a load of *ptr. */
357 if (!(is_gimple_assign (use_stmt)
358 && gimple_assign_rhs_code (use_stmt) == MEM_REF
359 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == ptr
360 && integer_zerop (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 1))
361 && (!type
362 || types_compatible_p
363 (TREE_TYPE (gimple_assign_lhs (use_stmt)), type))
364 /* We cannot replace a load that may throw or is volatile.
365 For volatiles the transform can change the number of
366 executions if the load is inside a loop but the address
367 computations outside (PR91812). We could relax this
368 if we guard against that appropriately. For loads that can
369 throw we could relax things if the moved loads all are
370 known to not throw. */
371 && !stmt_can_throw_internal (cfun, use_stmt)
372 && !gimple_has_volatile_ops (use_stmt)))
373 continue;
375 /* Check if we can move the loads. The def stmt of the virtual use
376 needs to be in a different basic block dominating bb. When the
377 def is an edge-inserted one we know it dominates us. */
378 vuse = gimple_vuse (use_stmt);
379 def_stmt = SSA_NAME_DEF_STMT (vuse);
380 if (!SSA_NAME_IS_DEFAULT_DEF (vuse)
381 && (gimple_bb (def_stmt) == bb
382 || (gimple_bb (def_stmt)
383 && !dominated_by_p (CDI_DOMINATORS,
384 bb, gimple_bb (def_stmt)))))
385 goto next;
387 /* Found a proper dereference with an aggregate copy. Just
388 insert aggregate copies on the edges instead. */
389 if (!is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (use_stmt))))
391 if (!gimple_vdef (use_stmt))
392 goto next;
394 /* As we replicate the lhs on each incoming edge all
395 used SSA names have to be available there. */
396 if (! for_each_index (gimple_assign_lhs_ptr (use_stmt),
397 chk_uses,
398 get_immediate_dominator (CDI_DOMINATORS,
399 gimple_bb (phi))))
400 goto next;
402 gimple *vuse_stmt;
403 imm_use_iterator vui;
404 use_operand_p vuse_p;
405 /* In order to move the aggregate copies earlier, make sure
406 there are no statements that could read from memory
407 aliasing the lhs in between the start of bb and use_stmt.
408 As we require use_stmt to have a VDEF above, loads after
409 use_stmt will use a different virtual SSA_NAME. When
410 we reach an edge inserted load the constraints we place
411 on processing guarantees that program order is preserved
412 so we can avoid checking those. */
413 FOR_EACH_IMM_USE_FAST (vuse_p, vui, vuse)
415 vuse_stmt = USE_STMT (vuse_p);
416 if (vuse_stmt == use_stmt)
417 continue;
418 if (!gimple_bb (vuse_stmt)
419 || !dominated_by_p (CDI_DOMINATORS,
420 gimple_bb (vuse_stmt), bb))
421 continue;
422 if (ref_maybe_used_by_stmt_p (vuse_stmt,
423 gimple_assign_lhs (use_stmt)))
424 goto next;
427 phiprop_insert_phi (bb, phi, use_stmt, phivn, n, dce_ssa_names);
429 /* Remove old stmt. The phi and all of maybe its depedencies
430 will be removed later via simple_dce_from_worklist. */
431 gsi = gsi_for_stmt (use_stmt);
432 /* Unlinking the VDEF here is fine as we are sure that we process
433 stmts in execution order due to aggregate copies having VDEFs
434 and we emit loads on the edges in the very same order.
435 We get multiple copies (or intermediate register loads) handled
436 only by walking PHIs or immediate uses in a lucky order though,
437 so we could signal the caller to re-start iterating over PHIs
438 when we come here which would make it quadratic in the number
439 of PHIs. */
440 unlink_stmt_vdef (use_stmt);
441 gsi_remove (&gsi, true);
443 changed = true;
446 /* Found a proper dereference. Insert a phi node if this
447 is the first load transformation. */
448 else if (!phi_inserted)
450 res = phiprop_insert_phi (bb, phi, use_stmt, phivn, n, dce_ssa_names);
451 type = TREE_TYPE (res);
453 /* Remember the value we created for *ptr. */
454 phivn[SSA_NAME_VERSION (ptr)].value = res;
455 phivn[SSA_NAME_VERSION (ptr)].vuse = vuse;
457 /* Remove old stmt. The phi and all of maybe its depedencies
458 will be removed later via simple_dce_from_worklist. */
459 gsi = gsi_for_stmt (use_stmt);
460 gsi_remove (&gsi, true);
462 phi_inserted = true;
463 changed = true;
465 else
467 /* Further replacements are easy, just make a copy out of the
468 load. */
469 gimple_assign_set_rhs1 (use_stmt, res);
470 update_stmt (use_stmt);
471 changed = true;
474 next:;
475 /* Continue searching for a proper dereference. */
478 return changed;
481 /* Main entry for phiprop pass. */
483 namespace {
485 const pass_data pass_data_phiprop =
487 GIMPLE_PASS, /* type */
488 "phiprop", /* name */
489 OPTGROUP_NONE, /* optinfo_flags */
490 TV_TREE_PHIPROP, /* tv_id */
491 ( PROP_cfg | PROP_ssa ), /* properties_required */
492 0, /* properties_provided */
493 0, /* properties_destroyed */
494 0, /* todo_flags_start */
495 0, /* todo_flags_finish */
498 class pass_phiprop : public gimple_opt_pass
500 public:
501 pass_phiprop (gcc::context *ctxt)
502 : gimple_opt_pass (pass_data_phiprop, ctxt)
505 /* opt_pass methods: */
506 opt_pass * clone () final override { return new pass_phiprop (m_ctxt); }
507 bool gate (function *) final override { return flag_tree_phiprop; }
508 unsigned int execute (function *) final override;
510 }; // class pass_phiprop
512 unsigned int
513 pass_phiprop::execute (function *fun)
515 struct phiprop_d *phivn;
516 bool did_something = false;
517 basic_block bb;
518 gphi_iterator gsi;
519 unsigned i;
520 size_t n;
521 auto_bitmap dce_ssa_names;
523 calculate_dominance_info (CDI_DOMINATORS);
525 n = num_ssa_names;
526 phivn = XCNEWVEC (struct phiprop_d, n);
528 /* Walk the dominator tree in preorder. */
529 auto_vec<basic_block> bbs
530 = get_all_dominated_blocks (CDI_DOMINATORS,
531 single_succ (ENTRY_BLOCK_PTR_FOR_FN (fun)));
532 FOR_EACH_VEC_ELT (bbs, i, bb)
534 /* Since we're going to move dereferences across predecessor
535 edges avoid blocks with abnormal predecessors. */
536 if (bb_has_abnormal_pred (bb))
537 continue;
538 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
539 did_something |= propagate_with_phi (bb, gsi.phi (), phivn, n, dce_ssa_names);
542 if (did_something)
544 gsi_commit_edge_inserts ();
545 simple_dce_from_worklist (dce_ssa_names);
548 free (phivn);
550 free_dominance_info (CDI_POST_DOMINATORS);
552 return did_something ? TODO_update_ssa_only_virtuals : 0;
555 } // anon namespace
557 gimple_opt_pass *
558 make_pass_phiprop (gcc::context *ctxt)
560 return new pass_phiprop (ctxt);