AVR: Provide built-ins for strlen where the string lives in some AS.
[gcc.git] / gcc / ipa.cc
blob8a7b067a526a7fd716c406fc9428130488f358e8
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003-2025 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "stringpool.h"
30 #include "cgraph.h"
31 #include "gimplify.h"
32 #include "tree-iterator.h"
33 #include "ipa-utils.h"
34 #include "symbol-summary.h"
35 #include "tree-vrp.h"
36 #include "sreal.h"
37 #include "ipa-cp.h"
38 #include "ipa-prop.h"
39 #include "ipa-fnsummary.h"
40 #include "dbgcnt.h"
41 #include "debug.h"
42 #include "stringpool.h"
43 #include "attribs.h"
45 /* Return true when NODE has ADDR reference. */
47 static bool
48 has_addr_references_p (struct cgraph_node *node,
49 void *)
51 int i;
52 struct ipa_ref *ref = NULL;
54 for (i = 0; node->iterate_referring (i, ref); i++)
55 if (ref->use == IPA_REF_ADDR)
56 return true;
57 return false;
60 /* Return true when NODE can be target of an indirect call. */
62 static bool
63 is_indirect_call_target_p (struct cgraph_node *node, void *)
65 return node->indirect_call_target;
68 /* Look for all functions inlined to NODE and update their inlined_to pointers
69 to INLINED_TO. */
71 static void
72 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
74 struct cgraph_edge *e;
75 for (e = node->callees; e; e = e->next_callee)
76 if (e->callee->inlined_to)
78 e->callee->inlined_to = inlined_to;
79 update_inlined_to_pointer (e->callee, inlined_to);
83 /* Add symtab NODE to queue starting at FIRST.
85 The queue is linked via AUX pointers and terminated by pointer to 1.
86 We enqueue nodes at two occasions: when we find them reachable or when we find
87 their bodies needed for further clonning. In the second case we mark them
88 by pointer to 2 after processing so they are re-queue when they become
89 reachable. */
91 static void
92 enqueue_node (symtab_node *node, symtab_node **first,
93 hash_set<symtab_node *> *reachable)
95 /* Node is still in queue; do nothing. */
96 if (node->aux && node->aux != (void *) 2)
97 return;
98 /* Node was already processed as unreachable, re-enqueue
99 only if it became reachable now. */
100 if (node->aux == (void *)2 && !reachable->contains (node))
101 return;
102 node->aux = *first;
103 *first = node;
106 /* Return true if NODE may get inlined later.
107 This is used to keep DECL_EXTERNAL function bodies around long enough
108 so inliner can proces them. */
110 static bool
111 possible_inline_candidate_p (symtab_node *node)
113 if (symtab->state >= IPA_SSA_AFTER_INLINING)
114 return false;
115 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
116 if (!cnode)
117 return false;
118 if (DECL_UNINLINABLE (cnode->decl))
119 return false;
120 if (opt_for_fn (cnode->decl, optimize))
121 return true;
122 if (symtab->state >= IPA_SSA)
123 return false;
124 return lookup_attribute ("always_inline", DECL_ATTRIBUTES (node->decl));
127 /* Process references. */
129 static void
130 process_references (symtab_node *snode,
131 symtab_node **first,
132 hash_set<symtab_node *> *reachable)
134 int i;
135 struct ipa_ref *ref = NULL;
136 for (i = 0; snode->iterate_reference (i, ref); i++)
138 symtab_node *node = ref->referred;
139 symtab_node *body = node->ultimate_alias_target ();
141 if (node->definition && !node->in_other_partition
142 && ((!DECL_EXTERNAL (node->decl) || node->alias)
143 || (possible_inline_candidate_p (node)
144 /* We use variable constructors during late compilation for
145 constant folding. Keep references alive so partitioning
146 knows about potential references. */
147 || (VAR_P (node->decl)
148 && (flag_wpa
149 || flag_incremental_link
150 == INCREMENTAL_LINK_LTO)
151 && dyn_cast <varpool_node *> (node)
152 ->ctor_useable_for_folding_p ()))))
154 /* Be sure that we will not optimize out alias target
155 body. */
156 if (DECL_EXTERNAL (node->decl)
157 && node->alias
158 && symtab->state < IPA_SSA_AFTER_INLINING)
159 reachable->add (body);
160 reachable->add (node);
162 enqueue_node (node, first, reachable);
166 /* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark
167 all its potential targets as reachable to permit later inlining if
168 devirtualization happens. After inlining still keep their declarations
169 around, so we can devirtualize to a direct call.
171 Also try to make trivial devirutalization when no or only one target is
172 possible. */
174 static void
175 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
176 struct cgraph_edge *edge,
177 symtab_node **first,
178 hash_set<symtab_node *> *reachable)
180 unsigned int i;
181 void *cache_token;
182 bool final;
183 vec <cgraph_node *>targets
184 = possible_polymorphic_call_targets
185 (edge, &final, &cache_token);
187 if (cache_token != NULL && !reachable_call_targets->add (cache_token))
189 for (i = 0; i < targets.length (); i++)
191 struct cgraph_node *n = targets[i];
193 /* Do not bother to mark virtual methods in anonymous namespace;
194 either we will find use of virtual table defining it, or it is
195 unused. */
196 if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
197 && type_in_anonymous_namespace_p
198 (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl))))
199 continue;
201 n->indirect_call_target = true;
202 symtab_node *body = n->function_symbol ();
204 /* Prior inlining, keep alive bodies of possible targets for
205 devirtualization. */
206 if (n->definition
207 && (possible_inline_candidate_p (body)
208 && opt_for_fn (body->decl, flag_devirtualize)))
210 /* Be sure that we will not optimize out alias target
211 body. */
212 if (DECL_EXTERNAL (n->decl)
213 && n->alias
214 && symtab->state < IPA_SSA_AFTER_INLINING)
215 reachable->add (body);
216 reachable->add (n);
218 /* Even after inlining we want to keep the possible targets in the
219 boundary, so late passes can still produce direct call even if
220 the chance for inlining is lost. */
221 enqueue_node (n, first, reachable);
225 /* Very trivial devirtualization; when the type is
226 final or anonymous (so we know all its derivation)
227 and there is only one possible virtual call target,
228 make the edge direct. */
229 if (final)
231 if (targets.length () <= 1 && dbg_cnt (devirt))
233 cgraph_node *target, *node = edge->caller;
234 if (targets.length () == 1)
235 target = targets[0];
236 else
237 target = cgraph_node::get_create (builtin_decl_unreachable ());
239 if (dump_enabled_p ())
241 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, edge->call_stmt,
242 "devirtualizing call in %s to %s\n",
243 edge->caller->dump_name (),
244 target->dump_name ());
246 edge = cgraph_edge::make_direct (edge, target);
247 if (ipa_fn_summaries)
248 ipa_update_overall_fn_summary (node->inlined_to
249 ? node->inlined_to : node);
250 else if (edge->call_stmt)
251 cgraph_edge::redirect_call_stmt_to_callee (edge);
256 /* Perform reachability analysis and reclaim all unreachable nodes.
258 The algorithm is basically mark&sweep but with some extra refinements:
260 - reachable extern inline functions needs special handling; the bodies needs
261 to stay in memory until inlining in hope that they will be inlined.
262 After inlining we release their bodies and turn them into unanalyzed
263 nodes even when they are reachable.
265 - virtual functions are kept in callgraph even if they seem unreachable in
266 hope calls to them will be devirtualized.
268 Again we remove them after inlining. In late optimization some
269 devirtualization may happen, but it is not important since we won't inline
270 the call. In theory early opts and IPA should work out all important cases.
272 - virtual clones needs bodies of their origins for later materialization;
273 this means that we want to keep the body even if the origin is unreachable
274 otherwise. To avoid origin from sitting in the callgraph and being
275 walked by IPA passes, we turn them into unanalyzed nodes with body
276 defined.
278 We maintain set of function declaration where body needs to stay in
279 body_needed_for_clonning
281 Inline clones represent special case: their declaration match the
282 declaration of origin and cgraph_remove_node already knows how to
283 reshape callgraph and preserve body when offline copy of function or
284 inline clone is being removed.
286 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
287 variables with DECL_INITIAL set. We finalize these and keep reachable
288 ones around for constant folding purposes. After inlining we however
289 stop walking their references to let everything static referenced by them
290 to be removed when it is otherwise unreachable.
292 We maintain queue of both reachable symbols (i.e. defined symbols that needs
293 to stay) and symbols that are in boundary (i.e. external symbols referenced
294 by reachable symbols or origins of clones). The queue is represented
295 as linked list by AUX pointer terminated by 1.
297 At the end we keep all reachable symbols. For symbols in boundary we always
298 turn definition into a declaration, but we may keep function body around
299 based on body_needed_for_clonning
301 All symbols that enter the queue have AUX pointer non-zero and are in the
302 boundary. Pointer set REACHABLE is used to track reachable symbols.
304 Every symbol can be visited twice - once as part of boundary and once
305 as real reachable symbol. enqueue_node needs to decide whether the
306 node needs to be re-queued for second processing. For this purpose
307 we set AUX pointer of processed symbols in the boundary to constant 2. */
309 bool
310 symbol_table::remove_unreachable_nodes (FILE *file)
312 symtab_node *first = (symtab_node *) (void *) 1;
313 struct cgraph_node *node, *next;
314 varpool_node *vnode, *vnext;
315 bool changed = false;
316 hash_set<symtab_node *> reachable;
317 hash_set<tree> body_needed_for_clonning;
318 hash_set<void *> reachable_call_targets;
320 timevar_push (TV_IPA_UNREACHABLE);
321 build_type_inheritance_graph ();
322 if (file)
323 fprintf (file, "\nReclaiming functions:");
324 if (flag_checking)
326 FOR_EACH_FUNCTION (node)
327 gcc_assert (!node->aux);
328 FOR_EACH_VARIABLE (vnode)
329 gcc_assert (!vnode->aux);
331 /* Mark functions whose bodies are obviously needed.
332 This is mostly when they can be referenced externally. Inline clones
333 are special since their declarations are shared with master clone and thus
334 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
335 FOR_EACH_FUNCTION (node)
337 node->used_as_abstract_origin = false;
338 node->indirect_call_target = false;
339 if (node->definition
340 && !node->inlined_to
341 && !node->in_other_partition
342 && !node->can_remove_if_no_direct_calls_and_refs_p ())
344 gcc_assert (!node->inlined_to);
345 reachable.add (node);
346 enqueue_node (node, &first, &reachable);
348 else
349 gcc_assert (!node->aux);
352 /* Mark variables that are obviously needed. */
353 FOR_EACH_DEFINED_VARIABLE (vnode)
354 if (!vnode->can_remove_if_no_refs_p()
355 && !vnode->in_other_partition)
357 reachable.add (vnode);
358 enqueue_node (vnode, &first, &reachable);
361 /* Perform reachability analysis. */
362 while (first != (symtab_node *) (void *) 1)
364 bool in_boundary_p = !reachable.contains (first);
365 symtab_node *node = first;
367 first = (symtab_node *)first->aux;
369 /* If we are processing symbol in boundary, mark its AUX pointer for
370 possible later re-processing in enqueue_node. */
371 if (in_boundary_p)
373 node->aux = (void *)2;
374 if (node->alias && node->analyzed)
375 enqueue_node (node->get_alias_target (), &first, &reachable);
377 else
379 if (TREE_CODE (node->decl) == FUNCTION_DECL
380 && DECL_ABSTRACT_ORIGIN (node->decl))
382 struct cgraph_node *origin_node
383 = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
384 if (origin_node && !origin_node->used_as_abstract_origin)
386 origin_node->used_as_abstract_origin = true;
387 gcc_assert (!origin_node->prev_sibling_clone);
388 gcc_assert (!origin_node->next_sibling_clone);
389 for (cgraph_node *n = origin_node->clones; n;
390 n = n->next_sibling_clone)
391 if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
392 n->used_as_abstract_origin = true;
395 /* If any non-external and non-local symbol in a comdat group is
396 reachable, force all externally visible symbols in the same comdat
397 group to be reachable as well. Comdat-local symbols
398 can be discarded if all uses were inlined. */
399 if (node->same_comdat_group
400 && node->externally_visible
401 && !DECL_EXTERNAL (node->decl))
403 symtab_node *next;
404 for (next = node->same_comdat_group;
405 next != node;
406 next = next->same_comdat_group)
407 if (!next->comdat_local_p ()
408 && !DECL_EXTERNAL (next->decl)
409 && !reachable.add (next))
410 enqueue_node (next, &first, &reachable);
412 /* Mark references as reachable. */
413 process_references (node, &first, &reachable);
416 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
418 /* Mark the callees reachable unless they are direct calls to extern
419 inline functions we decided to not inline. */
420 if (!in_boundary_p)
422 struct cgraph_edge *e;
423 /* Keep alive possible targets for devirtualization. */
424 if (opt_for_fn (cnode->decl, optimize)
425 && opt_for_fn (cnode->decl, flag_devirtualize))
427 struct cgraph_edge *next;
428 for (e = cnode->indirect_calls; e; e = next)
430 next = e->next_callee;
431 if (e->indirect_info->polymorphic)
432 walk_polymorphic_call_targets (&reachable_call_targets,
433 e, &first, &reachable);
436 for (e = cnode->callees; e; e = e->next_callee)
438 symtab_node *body = e->callee->function_symbol ();
439 if (e->callee->definition
440 && !e->callee->in_other_partition
441 && (!e->inline_failed
442 || !DECL_EXTERNAL (e->callee->decl)
443 || e->callee->alias
444 || possible_inline_candidate_p (e->callee)))
446 /* Be sure that we will not optimize out alias target
447 body. */
448 if (DECL_EXTERNAL (e->callee->decl)
449 && e->callee->alias
450 && symtab->state < IPA_SSA_AFTER_INLINING)
451 reachable.add (body);
452 reachable.add (e->callee);
454 enqueue_node (e->callee, &first, &reachable);
457 /* When inline clone exists, mark body to be preserved so when removing
458 offline copy of the function we don't kill it. */
459 if (cnode->inlined_to)
460 body_needed_for_clonning.add (cnode->decl);
462 /* For non-inline clones, force their origins to the boundary and ensure
463 that body is not removed. */
464 while (cnode->clone_of)
466 bool noninline = cnode->clone_of->decl != cnode->decl;
467 cnode = cnode->clone_of;
468 if (noninline)
470 body_needed_for_clonning.add (cnode->decl);
471 enqueue_node (cnode, &first, &reachable);
476 else if (cnode->thunk)
477 enqueue_node (cnode->callees->callee, &first, &reachable);
479 /* If any reachable function has simd clones, mark them as
480 reachable as well. */
481 if (cnode->simd_clones)
483 cgraph_node *next;
484 for (next = cnode->simd_clones;
485 next;
486 next = next->simdclone->next_clone)
487 if (in_boundary_p
488 || !reachable.add (next))
489 enqueue_node (next, &first, &reachable);
492 /* When we see constructor of external variable, keep referred nodes in the
493 boundary. This will also hold initializers of the external vars NODE
494 refers to. */
495 varpool_node *vnode = dyn_cast <varpool_node *> (node);
496 if (vnode
497 && DECL_EXTERNAL (node->decl)
498 && !vnode->alias
499 && in_boundary_p)
501 struct ipa_ref *ref = NULL;
502 for (int i = 0; node->iterate_reference (i, ref); i++)
503 enqueue_node (ref->referred, &first, &reachable);
507 /* Remove unreachable functions. */
508 for (node = first_function (); node; node = next)
510 next = next_function (node);
512 /* If node is not needed at all, remove it. */
513 if (!node->aux)
515 if (file)
516 fprintf (file, " %s", node->dump_name ());
517 node->remove ();
518 changed = true;
520 /* If node is unreachable, remove its body. */
521 else if (!reachable.contains (node))
523 /* We keep definitions of thunks and aliases in the boundary so
524 we can walk to the ultimate alias targets and function symbols
525 reliably. */
526 if (node->alias || node->thunk)
528 else if (!body_needed_for_clonning.contains (node->decl))
530 /* Make the node a non-clone so that we do not attempt to
531 materialize it later. */
532 if (node->clone_of)
533 node->remove_from_clone_tree ();
534 node->release_body ();
536 else if (!node->clone_of)
537 gcc_assert (in_lto_p || DECL_RESULT (node->decl));
538 if (node->definition && !node->alias && !node->thunk)
540 if (file)
541 fprintf (file, " %s", node->dump_name ());
542 node->body_removed = true;
543 node->analyzed = false;
544 node->definition = false;
545 node->cpp_implicit_alias = false;
546 node->alias = false;
547 node->transparent_alias = false;
548 node->thunk = false;
549 node->weakref = false;
550 /* After early inlining we drop always_inline attributes on
551 bodies of functions that are still referenced (have their
552 address taken). */
553 DECL_ATTRIBUTES (node->decl)
554 = remove_attribute ("always_inline",
555 DECL_ATTRIBUTES (node->decl));
556 if (!node->in_other_partition)
557 node->local = false;
558 node->remove_callees ();
559 node->remove_all_references ();
560 changed = true;
563 else
564 gcc_assert (node->clone_of || !node->has_gimple_body_p ()
565 || in_lto_p || DECL_RESULT (node->decl));
568 /* Inline clones might be kept around so their materializing allows further
569 cloning. If the function the clone is inlined into is removed, we need
570 to turn it into normal cone. */
571 FOR_EACH_FUNCTION (node)
573 if (node->inlined_to
574 && !node->callers)
576 gcc_assert (node->clones);
577 node->inlined_to = NULL;
578 update_inlined_to_pointer (node, node);
580 node->aux = NULL;
583 /* Remove unreachable variables. */
584 if (file)
585 fprintf (file, "\nReclaiming variables:");
586 for (vnode = first_variable (); vnode; vnode = vnext)
588 vnext = next_variable (vnode);
589 if (!vnode->aux
590 /* For can_refer_decl_in_current_unit_p we want to track for
591 all external variables if they are defined in other partition
592 or not. */
593 && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
595 struct ipa_ref *ref = NULL;
597 /* First remove the aliases, so varpool::remove can possibly lookup
598 the constructor and save it for future use. */
599 while (vnode->iterate_direct_aliases (0, ref))
601 if (file)
602 fprintf (file, " %s", ref->referred->dump_name ());
603 ref->referring->remove ();
605 if (file)
606 fprintf (file, " %s", vnode->dump_name ());
607 vnext = next_variable (vnode);
608 /* Signal removal to the debug machinery. */
609 if (! flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
611 vnode->definition = false;
612 (*debug_hooks->late_global_decl) (vnode->decl);
614 vnode->remove ();
615 changed = true;
617 else if (!reachable.contains (vnode) && !vnode->alias)
619 tree init;
620 if (vnode->definition)
622 if (file)
623 fprintf (file, " %s", vnode->dump_name ());
624 changed = true;
626 /* Keep body if it may be useful for constant folding. */
627 if ((flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
628 || ((init = ctor_for_folding (vnode->decl)) == error_mark_node))
629 vnode->remove_initializer ();
630 else
631 DECL_INITIAL (vnode->decl) = init;
632 vnode->body_removed = true;
633 vnode->definition = false;
634 vnode->analyzed = false;
635 vnode->aux = NULL;
637 vnode->remove_from_same_comdat_group ();
639 vnode->remove_all_references ();
641 else
642 vnode->aux = NULL;
645 /* Now update address_taken flags and try to promote functions to be local. */
646 if (file)
647 fprintf (file, "\nClearing address taken flags:");
648 FOR_EACH_DEFINED_FUNCTION (node)
649 if (node->address_taken
650 && !node->used_from_other_partition)
652 if (!node->call_for_symbol_and_aliases
653 (has_addr_references_p, NULL, true))
655 if (file)
656 fprintf (file, " %s", node->dump_name ());
657 node->address_taken = false;
658 changed = true;
659 if (node->local_p ()
660 /* Virtual functions may be kept in cgraph just because
661 of possible later devirtualization. Do not mark them as
662 local too early so we won't optimize them out before
663 we are done with polymorphic call analysis. */
664 && (symtab->state >= IPA_SSA_AFTER_INLINING
665 || !node->call_for_symbol_and_aliases
666 (is_indirect_call_target_p, NULL, true)))
668 node->local = true;
669 if (file)
670 fprintf (file, " (local)");
674 if (file)
675 fprintf (file, "\n");
677 symtab_node::checking_verify_symtab_nodes ();
679 /* If we removed something, perhaps profile could be improved. */
680 if (changed && (optimize || in_lto_p) && ipa_call_summaries)
681 FOR_EACH_DEFINED_FUNCTION (node)
682 ipa_propagate_frequency (node);
684 timevar_pop (TV_IPA_UNREACHABLE);
685 return changed;
688 /* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
689 as needed, also clear EXPLICIT_REFS if the references to given variable
690 do not need to be explicit. */
692 void
693 process_references (varpool_node *vnode,
694 bool *written, bool *address_taken,
695 bool *read, bool *explicit_refs)
697 int i;
698 struct ipa_ref *ref;
700 if (!vnode->all_refs_explicit_p ()
701 || TREE_THIS_VOLATILE (vnode->decl))
702 *explicit_refs = false;
704 for (i = 0; vnode->iterate_referring (i, ref)
705 && *explicit_refs && (!*written || !*address_taken || !*read); i++)
706 switch (ref->use)
708 case IPA_REF_ADDR:
709 *address_taken = true;
710 break;
711 case IPA_REF_LOAD:
712 *read = true;
713 break;
714 case IPA_REF_STORE:
715 *written = true;
716 break;
717 case IPA_REF_ALIAS:
718 process_references (dyn_cast<varpool_node *> (ref->referring), written,
719 address_taken, read, explicit_refs);
720 break;
724 /* Set TREE_READONLY bit. */
726 bool
727 set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
729 TREE_READONLY (vnode->decl) = true;
730 return false;
733 /* Set writeonly bit and clear the initalizer, since it will not be needed. */
735 bool
736 set_writeonly_bit (varpool_node *vnode, void *data)
738 vnode->writeonly = true;
739 if (optimize || in_lto_p)
741 DECL_INITIAL (vnode->decl) = NULL;
742 if (!vnode->alias)
744 if (vnode->num_references ())
745 *(bool *)data = true;
746 vnode->remove_all_references ();
749 return false;
752 /* Clear addressale bit of VNODE. */
754 bool
755 clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
757 vnode->address_taken = false;
758 TREE_ADDRESSABLE (vnode->decl) = 0;
759 return false;
762 /* Discover variables that have no longer address taken, are read-only or
763 write-only and update their flags.
765 Return true when unreachable symbol removal should be done.
767 FIXME: This cannot be done in between gimplify and omp_expand since
768 readonly flag plays role on what is shared and what is not. Currently we do
769 this transformation as part of whole program visibility and re-do at
770 ipa-reference pass (to take into account clonning), but it would
771 make sense to do it before early optimizations. */
773 bool
774 ipa_discover_variable_flags (void)
776 if (!flag_ipa_reference_addressable)
777 return false;
779 bool remove_p = false;
780 varpool_node *vnode;
781 if (dump_file)
782 fprintf (dump_file, "Clearing variable flags:");
783 FOR_EACH_VARIABLE (vnode)
784 if (!vnode->alias
785 && (TREE_ADDRESSABLE (vnode->decl)
786 || !vnode->writeonly
787 || !TREE_READONLY (vnode->decl)))
789 bool written = false;
790 bool address_taken = false;
791 bool read = false;
792 bool explicit_refs = true;
794 process_references (vnode, &written, &address_taken, &read,
795 &explicit_refs);
796 if (!explicit_refs)
797 continue;
798 if (!address_taken)
800 if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
801 fprintf (dump_file, " %s (non-addressable)",
802 vnode->dump_name ());
803 vnode->call_for_symbol_and_aliases (clear_addressable_bit, NULL,
804 true);
806 if (!address_taken && !written
807 /* Making variable in explicit section readonly can cause section
808 type conflict.
809 See e.g. gcc.c-torture/compile/pr23237.c */
810 && vnode->get_section () == NULL)
812 if (!TREE_READONLY (vnode->decl) && dump_file)
813 fprintf (dump_file, " %s (read-only)", vnode->dump_name ());
814 vnode->call_for_symbol_and_aliases (set_readonly_bit, NULL, true);
816 if (!vnode->writeonly && !read && !address_taken && written)
818 if (dump_file)
819 fprintf (dump_file, " %s (write-only)", vnode->dump_name ());
820 vnode->call_for_symbol_and_aliases (set_writeonly_bit, &remove_p,
821 true);
824 if (dump_file)
825 fprintf (dump_file, "\n");
826 return remove_p;
829 /* Generate and emit a static constructor or destructor. WHICH must
830 be one of 'I' (for a constructor), 'D' (for a destructor).
831 BODY is a STATEMENT_LIST containing GENERIC
832 statements. PRIORITY is the initialization priority for this
833 constructor or destructor.
835 FINAL specify whether the externally visible name for collect2 should
836 be produced. */
838 static tree
839 cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final,
840 tree optimization,
841 tree target)
843 static int counter = 0;
844 char which_buf[16];
845 tree decl, name, resdecl;
847 /* The priority is encoded in the constructor or destructor name.
848 collect2 will sort the names and arrange that they are called at
849 program startup. */
850 if (!targetm.have_ctors_dtors && final)
852 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
853 name = get_file_function_name (which_buf);
855 else
857 /* Proudce sane name but one not recognizable by collect2, just for the
858 case we fail to inline the function. */
859 sprintf (which_buf, "_sub_%c_%.5d_%d", which, priority, counter++);
860 name = get_identifier (which_buf);
863 decl = build_decl (input_location, FUNCTION_DECL, name,
864 build_function_type_list (void_type_node, NULL_TREE));
865 current_function_decl = decl;
867 resdecl = build_decl (input_location,
868 RESULT_DECL, NULL_TREE, void_type_node);
869 DECL_ARTIFICIAL (resdecl) = 1;
870 DECL_RESULT (decl) = resdecl;
871 DECL_CONTEXT (resdecl) = decl;
873 allocate_struct_function (decl, false);
875 TREE_STATIC (decl) = 1;
876 TREE_USED (decl) = 1;
877 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl) = optimization;
878 DECL_FUNCTION_SPECIFIC_TARGET (decl) = target;
879 DECL_ARTIFICIAL (decl) = 1;
880 DECL_IGNORED_P (decl) = 1;
881 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
882 DECL_SAVED_TREE (decl) = body;
883 if (!targetm.have_ctors_dtors && final)
885 TREE_PUBLIC (decl) = 1;
886 DECL_PRESERVE_P (decl) = 1;
888 DECL_UNINLINABLE (decl) = 1;
890 DECL_INITIAL (decl) = make_node (BLOCK);
891 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
892 TREE_USED (DECL_INITIAL (decl)) = 1;
894 DECL_SOURCE_LOCATION (decl) = input_location;
895 cfun->function_end_locus = input_location;
897 switch (which)
899 case 'I':
900 DECL_STATIC_CONSTRUCTOR (decl) = 1;
901 decl_init_priority_insert (decl, priority);
902 break;
903 case 'D':
904 DECL_STATIC_DESTRUCTOR (decl) = 1;
905 decl_fini_priority_insert (decl, priority);
906 break;
907 default:
908 gcc_unreachable ();
911 gimplify_function_tree (decl);
913 cgraph_node::add_new_function (decl, false);
915 set_cfun (NULL);
916 current_function_decl = NULL;
917 return decl;
920 /* Generate and emit a static constructor or destructor. WHICH must
921 be one of 'I' (for a constructor) or 'D' (for a destructor).
922 BODY is a STATEMENT_LIST containing GENERIC
923 statements. PRIORITY is the initialization priority for this
924 constructor or destructor. */
926 void
927 cgraph_build_static_cdtor (char which, tree body, int priority)
929 /* FIXME: We should be able to
930 gcc_assert (!in_lto_p);
931 because at LTO time the global options are not safe to use.
932 Unfortunately ASAN finish_file will produce constructors late and they
933 may lead to surprises. */
934 cgraph_build_static_cdtor_1 (which, body, priority, false,
935 optimization_default_node,
936 target_option_default_node);
939 /* When target does not have ctors and dtors, we call all constructor
940 and destructor by special initialization/destruction function
941 recognized by collect2.
943 When we are going to build this function, collect all constructors and
944 destructors and turn them into normal functions. */
946 static void
947 record_cdtor_fn (struct cgraph_node *node, vec<tree> *ctors, vec<tree> *dtors)
949 if (DECL_STATIC_CONSTRUCTOR (node->decl))
950 ctors->safe_push (node->decl);
951 if (DECL_STATIC_DESTRUCTOR (node->decl))
952 dtors->safe_push (node->decl);
953 node = cgraph_node::get (node->decl);
954 DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
957 /* Define global constructors/destructor functions for the CDTORS, of
958 which they are LEN. The CDTORS are sorted by initialization
959 priority. If CTOR_P is true, these are constructors; otherwise,
960 they are destructors. */
962 static void
963 build_cdtor (bool ctor_p, const vec<tree> &cdtors)
965 size_t i,j;
966 size_t len = cdtors.length ();
968 i = 0;
969 while (i < len)
971 tree body;
972 tree fn;
973 priority_type priority;
975 priority = 0;
976 body = NULL_TREE;
977 j = i;
980 priority_type p;
981 fn = cdtors[j];
982 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
983 if (j == i)
984 priority = p;
985 else if (p != priority)
986 break;
987 j++;
989 while (j < len);
991 /* When there is only one cdtor and target supports them, do nothing. */
992 if (j == i + 1
993 && targetm.have_ctors_dtors)
995 i++;
996 continue;
998 /* Find the next batch of constructors/destructors with the same
999 initialization priority. */
1000 for (;i < j; i++)
1002 tree call;
1003 fn = cdtors[i];
1004 call = build_call_expr (fn, 0);
1005 if (ctor_p)
1006 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1007 else
1008 DECL_STATIC_DESTRUCTOR (fn) = 0;
1009 /* We do not want to optimize away pure/const calls here.
1010 When optimizing, these should be already removed, when not
1011 optimizing, we want user to be able to breakpoint in them. */
1012 TREE_SIDE_EFFECTS (call) = 1;
1013 append_to_statement_list (call, &body);
1015 gcc_assert (body != NULL_TREE);
1016 /* Generate a function to call all the function of like
1017 priority. */
1018 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true,
1019 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (cdtors[0]),
1020 DECL_FUNCTION_SPECIFIC_TARGET (cdtors[0]));
1024 /* Helper functions for build_cxa_dtor_registrations ().
1025 Build a decl for __cxa_atexit (). */
1027 static tree
1028 build_cxa_atexit_decl ()
1030 /* The parameter to "__cxa_atexit" is "void (*)(void *)". */
1031 tree fn_type = build_function_type_list (void_type_node,
1032 ptr_type_node, NULL_TREE);
1033 tree fn_ptr_type = build_pointer_type (fn_type);
1034 /* The declaration for `__cxa_atexit' is:
1035 int __cxa_atexit (void (*)(void *), void *, void *). */
1036 const char *name = "__cxa_atexit";
1037 tree cxa_name = get_identifier (name);
1038 fn_type = build_function_type_list (integer_type_node, fn_ptr_type,
1039 ptr_type_node, ptr_type_node, NULL_TREE);
1040 tree atexit_fndecl = build_decl (BUILTINS_LOCATION, FUNCTION_DECL,
1041 cxa_name, fn_type);
1042 SET_DECL_ASSEMBLER_NAME (atexit_fndecl, cxa_name);
1043 DECL_VISIBILITY (atexit_fndecl) = VISIBILITY_DEFAULT;
1044 DECL_VISIBILITY_SPECIFIED (atexit_fndecl) = true;
1045 set_call_expr_flags (atexit_fndecl, ECF_LEAF | ECF_NOTHROW);
1046 TREE_PUBLIC (atexit_fndecl) = true;
1047 DECL_EXTERNAL (atexit_fndecl) = true;
1048 DECL_ARTIFICIAL (atexit_fndecl) = true;
1049 return atexit_fndecl;
1052 /* Build a decl for __dso_handle. */
1054 static tree
1055 build_dso_handle_decl ()
1057 /* Declare the __dso_handle variable. */
1058 tree dso_handle_decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
1059 get_identifier ("__dso_handle"),
1060 ptr_type_node);
1061 TREE_PUBLIC (dso_handle_decl) = true;
1062 DECL_EXTERNAL (dso_handle_decl) = true;
1063 DECL_ARTIFICIAL (dso_handle_decl) = true;
1064 #ifdef HAVE_GAS_HIDDEN
1065 if (dso_handle_decl != error_mark_node)
1067 DECL_VISIBILITY (dso_handle_decl) = VISIBILITY_HIDDEN;
1068 DECL_VISIBILITY_SPECIFIED (dso_handle_decl) = true;
1070 #endif
1071 return dso_handle_decl;
1074 /* This builds one or more constructor functions that register DTORs with
1075 __cxa_atexit (). Within a priority level, DTORs are registered in TU
1076 order - which means that they will run in reverse TU order from cxa_atexit.
1077 This is the same behavior as using a .fini / .mod_term_funcs section.
1078 As the functions are built, they are appended to the CTORs vector. */
1080 static void
1081 build_cxa_dtor_registrations (const vec<tree> &dtors, vec<tree> *ctors)
1083 size_t i,j;
1084 size_t len = dtors.length ();
1086 location_t sav_loc = input_location;
1087 input_location = UNKNOWN_LOCATION;
1089 tree atexit_fndecl = build_cxa_atexit_decl ();
1090 tree dso_handle_decl = build_dso_handle_decl ();
1092 /* We want &__dso_handle. */
1093 tree dso_ptr = build1_loc (UNKNOWN_LOCATION, ADDR_EXPR,
1094 ptr_type_node, dso_handle_decl);
1096 i = 0;
1097 while (i < len)
1099 priority_type priority = 0;
1100 tree body = NULL_TREE;
1101 j = i;
1104 priority_type p;
1105 tree fn = dtors[j];
1106 p = DECL_FINI_PRIORITY (fn);
1107 if (j == i)
1108 priority = p;
1109 else if (p != priority)
1110 break;
1111 j++;
1113 while (j < len);
1115 /* Find the next batch of destructors with the same initialization
1116 priority. */
1117 for (;i < j; i++)
1119 tree fn = dtors[i];
1120 DECL_STATIC_DESTRUCTOR (fn) = 0;
1121 tree dtor_ptr = build1_loc (UNKNOWN_LOCATION, ADDR_EXPR,
1122 ptr_type_node, fn);
1123 tree call_cxa_atexit
1124 = build_call_expr_loc (UNKNOWN_LOCATION, atexit_fndecl, 3,
1125 dtor_ptr, null_pointer_node, dso_ptr);
1126 TREE_SIDE_EFFECTS (call_cxa_atexit) = 1;
1127 append_to_statement_list (call_cxa_atexit, &body);
1130 gcc_assert (body != NULL_TREE);
1131 /* Generate a function to register the DTORs at this priority. */
1132 tree new_ctor
1133 = cgraph_build_static_cdtor_1 ('I', body, priority, true,
1134 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (dtors[0]),
1135 DECL_FUNCTION_SPECIFIC_TARGET (dtors[0]));
1136 /* Add this to the list of ctors. */
1137 ctors->safe_push (new_ctor);
1139 input_location = sav_loc;
1142 /* Comparison function for qsort. P1 and P2 are actually of type
1143 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1144 used to determine the sort order. */
1146 static int
1147 compare_ctor (const void *p1, const void *p2)
1149 tree f1;
1150 tree f2;
1151 int priority1;
1152 int priority2;
1154 f1 = *(const tree *)p1;
1155 f2 = *(const tree *)p2;
1156 priority1 = DECL_INIT_PRIORITY (f1);
1157 priority2 = DECL_INIT_PRIORITY (f2);
1159 if (priority1 < priority2)
1160 return -1;
1161 else if (priority1 > priority2)
1162 return 1;
1163 else
1164 /* Ensure a stable sort. Constructors are executed in backwarding
1165 order to make LTO initialize braries first. */
1166 return DECL_UID (f2) - DECL_UID (f1);
1169 /* Comparison function for qsort. P1 and P2 are actually of type
1170 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1171 used to determine the sort order. */
1173 static int
1174 compare_dtor (const void *p1, const void *p2)
1176 tree f1;
1177 tree f2;
1178 int priority1;
1179 int priority2;
1181 f1 = *(const tree *)p1;
1182 f2 = *(const tree *)p2;
1183 priority1 = DECL_FINI_PRIORITY (f1);
1184 priority2 = DECL_FINI_PRIORITY (f2);
1186 if (priority1 < priority2)
1187 return -1;
1188 else if (priority1 > priority2)
1189 return 1;
1190 else
1191 /* Ensure a stable sort - into TU order. */
1192 return DECL_UID (f1) - DECL_UID (f2);
1195 /* Comparison function for qsort. P1 and P2 are of type "tree *" and point to
1196 a pair of static constructors or destructors. We first sort on the basis of
1197 priority and then into TU order (on the strict assumption that DECL_UIDs are
1198 ordered in the same way as the original functions). ???: this seems quite
1199 fragile. */
1201 static int
1202 compare_cdtor_tu_order (const void *p1, const void *p2)
1204 tree f1;
1205 tree f2;
1206 int priority1;
1207 int priority2;
1209 f1 = *(const tree *)p1;
1210 f2 = *(const tree *)p2;
1211 /* We process the DTORs first, and then remove their flag, so this order
1212 allows for functions that are declared as both CTOR and DTOR. */
1213 if (DECL_STATIC_DESTRUCTOR (f1))
1215 gcc_checking_assert (DECL_STATIC_DESTRUCTOR (f2));
1216 priority1 = DECL_FINI_PRIORITY (f1);
1217 priority2 = DECL_FINI_PRIORITY (f2);
1219 else
1221 priority1 = DECL_INIT_PRIORITY (f1);
1222 priority2 = DECL_INIT_PRIORITY (f2);
1225 if (priority1 < priority2)
1226 return -1;
1227 else if (priority1 > priority2)
1228 return 1;
1229 else
1230 /* For equal priority, sort into the order of definition in the TU. */
1231 return DECL_UID (f1) - DECL_UID (f2);
1234 /* Generate functions to call static constructors and destructors
1235 for targets that do not support .ctors/.dtors sections. These
1236 functions have magic names which are detected by collect2. */
1238 static void
1239 build_cdtor_fns (vec<tree> *ctors, vec<tree> *dtors)
1241 if (!ctors->is_empty ())
1243 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1244 ctors->qsort (compare_ctor);
1245 build_cdtor (/*ctor_p=*/true, *ctors);
1248 if (!dtors->is_empty ())
1250 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1251 dtors->qsort (compare_dtor);
1252 build_cdtor (/*ctor_p=*/false, *dtors);
1256 /* Generate new CTORs to register static destructors with __cxa_atexit and add
1257 them to the existing list of CTORs; we then process the revised CTORs list.
1259 We sort the DTORs into priority and then TU order, this means that they are
1260 registered in that order with __cxa_atexit () and therefore will be run in
1261 the reverse order.
1263 Likewise, CTORs are sorted into priority and then TU order, which means that
1264 they will run in that order.
1266 This matches the behavior of using init/fini or mod_init_func/mod_term_func
1267 sections. */
1269 static void
1270 build_cxa_atexit_fns (vec<tree> *ctors, vec<tree> *dtors)
1272 if (!dtors->is_empty ())
1274 gcc_assert (targetm.dtors_from_cxa_atexit);
1275 dtors->qsort (compare_cdtor_tu_order);
1276 build_cxa_dtor_registrations (*dtors, ctors);
1279 if (!ctors->is_empty ())
1281 gcc_assert (targetm.dtors_from_cxa_atexit);
1282 ctors->qsort (compare_cdtor_tu_order);
1283 build_cdtor (/*ctor_p=*/true, *ctors);
1287 /* Look for constructors and destructors and produce function calling them.
1288 This is needed for targets not supporting ctors or dtors, but we perform the
1289 transformation also at linktime to merge possibly numerous
1290 constructors/destructors into single function to improve code locality and
1291 reduce size. */
1293 static unsigned int
1294 ipa_cdtor_merge (void)
1296 /* A vector of FUNCTION_DECLs declared as static constructors. */
1297 auto_vec<tree, 20> ctors;
1298 /* A vector of FUNCTION_DECLs declared as static destructors. */
1299 auto_vec<tree, 20> dtors;
1300 struct cgraph_node *node;
1301 FOR_EACH_DEFINED_FUNCTION (node)
1302 if (DECL_STATIC_CONSTRUCTOR (node->decl)
1303 || DECL_STATIC_DESTRUCTOR (node->decl))
1304 record_cdtor_fn (node, &ctors, &dtors);
1305 if (targetm.dtors_from_cxa_atexit)
1306 build_cxa_atexit_fns (&ctors, &dtors);
1307 else
1308 build_cdtor_fns (&ctors, &dtors);
1309 return 0;
1312 namespace {
1314 const pass_data pass_data_ipa_cdtor_merge =
1316 IPA_PASS, /* type */
1317 "cdtor", /* name */
1318 OPTGROUP_NONE, /* optinfo_flags */
1319 TV_CGRAPHOPT, /* tv_id */
1320 0, /* properties_required */
1321 0, /* properties_provided */
1322 0, /* properties_destroyed */
1323 0, /* todo_flags_start */
1324 0, /* todo_flags_finish */
1327 class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1329 public:
1330 pass_ipa_cdtor_merge (gcc::context *ctxt)
1331 : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1332 NULL, /* generate_summary */
1333 NULL, /* write_summary */
1334 NULL, /* read_summary */
1335 NULL, /* write_optimization_summary */
1336 NULL, /* read_optimization_summary */
1337 NULL, /* stmt_fixup */
1338 0, /* function_transform_todo_flags_start */
1339 NULL, /* function_transform */
1340 NULL) /* variable_transform */
1343 /* opt_pass methods: */
1344 bool gate (function *) final override;
1345 unsigned int execute (function *) final override
1347 return ipa_cdtor_merge ();
1350 }; // class pass_ipa_cdtor_merge
1352 bool
1353 pass_ipa_cdtor_merge::gate (function *)
1355 /* Perform the pass when we have no ctors/dtors support
1356 or at LTO time to merge multiple constructors into single
1357 function. */
1358 return !targetm.have_ctors_dtors || in_lto_p || targetm.dtors_from_cxa_atexit;
1361 } // anon namespace
1363 ipa_opt_pass_d *
1364 make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1366 return new pass_ipa_cdtor_merge (ctxt);
1369 /* Invalid pointer representing BOTTOM for single user dataflow. */
1370 #define BOTTOM ((cgraph_node *)(size_t) 2)
1372 /* Meet operation for single user dataflow.
1373 Here we want to associate variables with sigle function that may access it.
1375 FUNCTION is current single user of a variable, VAR is variable that uses it.
1376 Latttice is stored in SINGLE_USER_MAP.
1378 We represent:
1379 - TOP by no entry in SIGNLE_USER_MAP
1380 - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1381 - known single user by cgraph pointer in SINGLE_USER_MAP. */
1383 cgraph_node *
1384 meet (cgraph_node *function, varpool_node *var,
1385 hash_map<varpool_node *, cgraph_node *> &single_user_map)
1387 struct cgraph_node *user, **f;
1389 if (var->aux == BOTTOM)
1390 return BOTTOM;
1392 f = single_user_map.get (var);
1393 if (!f)
1394 return function;
1395 user = *f;
1396 if (!function)
1397 return user;
1398 else if (function != user)
1399 return BOTTOM;
1400 else
1401 return function;
1404 /* Propagation step of single-use dataflow.
1406 Check all uses of VNODE and see if they are used by single function FUNCTION.
1407 SINGLE_USER_MAP represents the dataflow lattice. */
1409 cgraph_node *
1410 propagate_single_user (varpool_node *vnode, cgraph_node *function,
1411 hash_map<varpool_node *, cgraph_node *> &single_user_map)
1413 int i;
1414 struct ipa_ref *ref;
1416 gcc_assert (!vnode->externally_visible);
1418 /* If node is an alias, first meet with its target. */
1419 if (vnode->alias)
1420 function = meet (function, vnode->get_alias_target (), single_user_map);
1422 /* Check all users and see if they correspond to a single function. */
1423 for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
1425 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1426 if (cnode)
1428 if (cnode->inlined_to)
1429 cnode = cnode->inlined_to;
1430 if (!function)
1431 function = cnode;
1432 else if (function != cnode)
1433 function = BOTTOM;
1435 else
1436 function = meet (function, dyn_cast <varpool_node *> (ref->referring),
1437 single_user_map);
1439 return function;
1442 /* Pass setting used_by_single_function flag.
1443 This flag is set on variable when there is only one function that may
1444 possibly referr to it. */
1446 static unsigned int
1447 ipa_single_use (void)
1449 varpool_node *first = (varpool_node *) (void *) 1;
1450 varpool_node *var;
1451 hash_map<varpool_node *, cgraph_node *> single_user_map;
1453 FOR_EACH_DEFINED_VARIABLE (var)
1454 if (!var->all_refs_explicit_p ())
1455 var->aux = BOTTOM;
1456 else
1458 /* Enqueue symbol for dataflow. */
1459 var->aux = first;
1460 first = var;
1463 /* The actual dataflow. */
1465 while (first != (void *) 1)
1467 cgraph_node *user, *orig_user, **f;
1469 var = first;
1470 first = (varpool_node *)first->aux;
1472 f = single_user_map.get (var);
1473 if (f)
1474 orig_user = *f;
1475 else
1476 orig_user = NULL;
1477 user = propagate_single_user (var, orig_user, single_user_map);
1479 gcc_checking_assert (var->aux != BOTTOM);
1481 /* If user differs, enqueue all references. */
1482 if (user != orig_user)
1484 unsigned int i;
1485 ipa_ref *ref;
1487 single_user_map.put (var, user);
1489 /* Enqueue all aliases for re-processing. */
1490 for (i = 0; var->iterate_direct_aliases (i, ref); i++)
1491 if (!ref->referring->aux)
1493 ref->referring->aux = first;
1494 first = dyn_cast <varpool_node *> (ref->referring);
1496 /* Enqueue all users for re-processing. */
1497 for (i = 0; var->iterate_reference (i, ref); i++)
1498 if (!ref->referred->aux
1499 && ref->referred->definition
1500 && is_a <varpool_node *> (ref->referred))
1502 ref->referred->aux = first;
1503 first = dyn_cast <varpool_node *> (ref->referred);
1506 /* If user is BOTTOM, just punt on this var. */
1507 if (user == BOTTOM)
1508 var->aux = BOTTOM;
1509 else
1510 var->aux = NULL;
1512 else
1513 var->aux = NULL;
1516 FOR_EACH_DEFINED_VARIABLE (var)
1518 if (var->aux != BOTTOM)
1520 /* Not having the single user known means that the VAR is
1521 unreachable. Either someone forgot to remove unreachable
1522 variables or the reachability here is wrong. */
1524 gcc_checking_assert (single_user_map.get (var));
1526 if (dump_file)
1528 fprintf (dump_file, "Variable %s is used by single function\n",
1529 var->dump_name ());
1531 var->used_by_single_function = true;
1533 var->aux = NULL;
1535 return 0;
1538 namespace {
1540 const pass_data pass_data_ipa_single_use =
1542 IPA_PASS, /* type */
1543 "single-use", /* name */
1544 OPTGROUP_NONE, /* optinfo_flags */
1545 TV_CGRAPHOPT, /* tv_id */
1546 0, /* properties_required */
1547 0, /* properties_provided */
1548 0, /* properties_destroyed */
1549 0, /* todo_flags_start */
1550 0, /* todo_flags_finish */
1553 class pass_ipa_single_use : public ipa_opt_pass_d
1555 public:
1556 pass_ipa_single_use (gcc::context *ctxt)
1557 : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1558 NULL, /* generate_summary */
1559 NULL, /* write_summary */
1560 NULL, /* read_summary */
1561 NULL, /* write_optimization_summary */
1562 NULL, /* read_optimization_summary */
1563 NULL, /* stmt_fixup */
1564 0, /* function_transform_todo_flags_start */
1565 NULL, /* function_transform */
1566 NULL) /* variable_transform */
1569 /* opt_pass methods: */
1570 unsigned int execute (function *) final override { return ipa_single_use (); }
1572 }; // class pass_ipa_single_use
1574 } // anon namespace
1576 ipa_opt_pass_d *
1577 make_pass_ipa_single_use (gcc::context *ctxt)
1579 return new pass_ipa_single_use (ctxt);