[PATCH] RISC-V: Move UNSPEC_SSP_SET and UNSPEC_SSP_TEST to correct enum
[gcc.git] / gcc / lto-cgraph.cc
blobac835a435ec5f53a234007978b0f12f88f72f0f9
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
4 Copyright (C) 2009-2025 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "predict.h"
31 #include "stringpool.h"
32 #include "tree-streamer.h"
33 #include "cgraph.h"
34 #include "tree-pass.h"
35 #include "profile.h"
36 #include "context.h"
37 #include "pass_manager.h"
38 #include "ipa-utils.h"
39 #include "omp-offload.h"
40 #include "omp-general.h"
41 #include "stringpool.h"
42 #include "attribs.h"
43 #include "alloc-pool.h"
44 #include "symbol-summary.h"
45 #include "symtab-thunks.h"
46 #include "symtab-clones.h"
48 /* True when asm nodes has been output. */
49 bool asm_nodes_output = false;
51 static void output_cgraph_opt_summary (void);
52 static void input_cgraph_opt_summary (vec<symtab_node *> nodes);
54 /* Number of LDPR values known to GCC. */
55 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
57 /* Cgraph streaming is organized as set of record whose type
58 is indicated by a tag. */
59 enum LTO_symtab_tags
61 /* Must leave 0 for the stopper. */
63 /* Cgraph node without body available. */
64 LTO_symtab_unavail_node = 1,
65 /* Cgraph node with function body. */
66 LTO_symtab_analyzed_node,
67 /* Cgraph edges. */
68 LTO_symtab_edge,
69 LTO_symtab_indirect_edge,
70 LTO_symtab_variable,
71 LTO_symtab_indirect_function,
72 LTO_symtab_last_tag
75 /* Create a new symtab encoder.
76 if FOR_INPUT, the encoder allocate only datastructures needed
77 to read the symtab. */
79 lto_symtab_encoder_t
80 lto_symtab_encoder_new (bool for_input)
82 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
84 if (!for_input)
85 encoder->map = new hash_map<symtab_node *, size_t>;
86 encoder->nodes.create (0);
87 return encoder;
91 /* Delete ENCODER and its components. */
93 void
94 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
96 encoder->nodes.release ();
97 if (encoder->map)
98 delete encoder->map;
99 if (encoder->order_remap)
100 delete encoder->order_remap;
101 free (encoder);
105 /* Return the existing reference number of NODE in the symtab encoder in
106 output block OB. Assign a new reference if this is the first time
107 NODE is encoded. */
110 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
111 symtab_node *node)
113 int ref;
115 if (!encoder->map)
117 lto_encoder_entry entry (node);
119 ref = encoder->nodes.length ();
120 encoder->nodes.safe_push (entry);
121 return ref;
124 size_t *slot = encoder->map->get (node);
125 if (!slot || !*slot)
127 lto_encoder_entry entry (node);
128 ref = encoder->nodes.length ();
129 if (!slot)
130 encoder->map->put (node, ref + 1);
131 encoder->nodes.safe_push (entry);
133 else
134 ref = *slot - 1;
136 return ref;
139 /* Remove NODE from encoder. */
141 bool
142 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
143 symtab_node *node)
145 int index;
147 size_t *slot = encoder->map->get (node);
148 if (slot == NULL || !*slot)
149 return false;
151 index = *slot - 1;
152 gcc_checking_assert (encoder->nodes[index].node == node);
154 /* Remove from vector. We do this by swapping node with the last element
155 of the vector. */
156 lto_encoder_entry last_node = encoder->nodes.pop ();
157 if (last_node.node != node)
159 bool existed = encoder->map->put (last_node.node, index + 1);
160 gcc_assert (existed);
162 /* Move the last element to the original spot of NODE. */
163 encoder->nodes[index] = last_node;
166 /* Remove element from hash table. */
167 encoder->map->remove (node);
168 return true;
171 /* Return TRUE if the NODE and its clones are always inlined. */
173 bool
174 lto_symtab_encoder_only_for_inlining_p (lto_symtab_encoder_t encoder,
175 struct cgraph_node *node)
177 int index = lto_symtab_encoder_lookup (encoder, node);
178 return encoder->nodes[index].only_for_inlining;
181 /* Return TRUE if we should encode the body of NODE (if any). */
183 bool
184 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
185 struct cgraph_node *node)
187 int index = lto_symtab_encoder_lookup (encoder, node);
188 return encoder->nodes[index].body;
191 /* Return TRUE if we should encode initializer of NODE (if any). */
193 bool
194 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
195 varpool_node *node)
197 int index = lto_symtab_encoder_lookup (encoder, node);
198 if (index == LCC_NOT_FOUND)
199 return false;
200 return encoder->nodes[index].initializer;
203 /* Specify that we should encode initializer of NODE (if any). */
205 static void
206 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
207 varpool_node *node)
209 int index = lto_symtab_encoder_lookup (encoder, node);
210 encoder->nodes[index].initializer = true;
213 /* Return TRUE if NODE is in this partition. */
215 bool
216 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
217 symtab_node *node)
219 int index = lto_symtab_encoder_lookup (encoder, node);
220 if (index == LCC_NOT_FOUND)
221 return false;
222 return encoder->nodes[index].in_partition;
225 /* Specify that NODE is in this partition. */
227 void
228 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
229 symtab_node *node)
231 int index = lto_symtab_encoder_encode (encoder, node);
232 encoder->nodes[index].in_partition = true;
235 /* Output the cgraph EDGE to OB using ENCODER. */
237 static void
238 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
239 lto_symtab_encoder_t encoder)
241 unsigned int uid;
242 intptr_t ref;
243 struct bitpack_d bp;
245 if (edge->indirect_unknown_callee)
246 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
247 LTO_symtab_indirect_edge);
248 else
249 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
250 LTO_symtab_edge);
252 ref = lto_symtab_encoder_lookup (encoder, edge->caller);
253 gcc_assert (ref != LCC_NOT_FOUND);
254 streamer_write_hwi_stream (ob->main_stream, ref);
256 if (!edge->indirect_unknown_callee)
258 ref = lto_symtab_encoder_lookup (encoder, edge->callee);
259 gcc_assert (ref != LCC_NOT_FOUND);
260 streamer_write_hwi_stream (ob->main_stream, ref);
263 edge->count.stream_out (ob->main_stream);
265 bp = bitpack_create (ob->main_stream);
266 uid = !edge->call_stmt ? edge->lto_stmt_uid
267 : gimple_uid (edge->call_stmt) + 1;
268 bp_pack_enum (&bp, cgraph_inline_failed_t,
269 CIF_N_REASONS, edge->inline_failed);
270 gcc_checking_assert (uid || edge->caller->thunk);
271 bp_pack_var_len_unsigned (&bp, uid);
272 bp_pack_value (&bp, edge->speculative_id, 16);
273 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
274 bp_pack_value (&bp, edge->speculative, 1);
275 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
276 gcc_assert (!edge->call_stmt_cannot_inline_p
277 || edge->inline_failed != CIF_BODY_NOT_AVAILABLE);
278 bp_pack_value (&bp, edge->can_throw_external, 1);
279 bp_pack_value (&bp, edge->in_polymorphic_cdtor, 1);
280 if (edge->indirect_unknown_callee)
282 int flags = edge->indirect_info->ecf_flags;
283 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
284 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
285 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
286 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
287 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
288 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
289 /* Flags that should not appear on indirect calls. */
290 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
291 | ECF_MAY_BE_ALLOCA
292 | ECF_SIBCALL
293 | ECF_LEAF
294 | ECF_NOVOPS)));
296 bp_pack_value (&bp, edge->indirect_info->num_speculative_call_targets,
297 16);
299 streamer_write_bitpack (&bp);
302 /* Return if NODE contain references from other partitions. */
304 bool
305 referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
307 int i;
308 struct ipa_ref *ref = NULL;
310 for (i = 0; node->iterate_referring (i, ref); i++)
312 /* Ignore references from non-offloadable nodes while streaming NODE into
313 offload LTO section. */
314 if (!ref->referring->need_lto_streaming)
315 continue;
317 if (ref->referring->in_other_partition
318 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
319 return true;
321 return false;
324 /* Return true when node is reachable from other partition. */
326 bool
327 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
329 struct cgraph_edge *e;
330 if (!node->definition)
331 return false;
332 if (node->inlined_to)
333 return false;
334 for (e = node->callers; e; e = e->next_caller)
336 /* Ignore references from non-offloadable nodes while streaming NODE into
337 offload LTO section. */
338 if (!e->caller->need_lto_streaming)
339 continue;
341 if (e->caller->in_other_partition
342 || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
343 return true;
345 return false;
348 /* Return if NODE contain references from other partitions. */
350 bool
351 referenced_from_this_partition_p (symtab_node *node,
352 lto_symtab_encoder_t encoder)
354 int i;
355 struct ipa_ref *ref = NULL;
357 for (i = 0; node->iterate_referring (i, ref); i++)
358 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
359 return true;
360 return false;
363 /* Return true when node is reachable from other partition. */
365 bool
366 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
368 struct cgraph_edge *e;
369 for (e = node->callers; e; e = e->next_caller)
370 if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
371 return true;
372 return false;
375 /* Output the cgraph NODE to OB. ENCODER is used to find the
376 reference number of NODE->inlined_to. SET is the set of nodes we
377 are writing to the current file. If NODE is not in SET, then NODE
378 is a boundary of a cgraph_node_set and we pretend NODE just has a
379 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
380 that have had their callgraph node written so far. This is used to
381 determine if NODE is a clone of a previously written node. */
383 static void
384 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
385 lto_symtab_encoder_t encoder)
387 unsigned int tag;
388 struct bitpack_d bp;
389 bool boundary_p;
390 intptr_t ref;
391 bool in_other_partition = false;
392 struct cgraph_node *clone_of, *ultimate_clone_of;
393 ipa_opt_pass_d *pass;
394 int i;
395 const char *comdat;
396 const char *section;
397 tree group;
399 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
401 if (node->analyzed && (!boundary_p || node->alias
402 || (node->thunk && !node->inlined_to)))
403 tag = LTO_symtab_analyzed_node;
404 else
405 tag = LTO_symtab_unavail_node;
407 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
408 tag);
409 int output_order = *encoder->order_remap->get (node->order);
410 streamer_write_hwi_stream (ob->main_stream, output_order);
412 /* In WPA mode, we only output part of the call-graph. Also, we
413 fake cgraph node attributes. There are two cases that we care.
415 Boundary nodes: There are nodes that are not part of SET but are
416 called from within SET. We artificially make them look like
417 externally visible nodes with no function body.
419 Cherry-picked nodes: These are nodes we pulled from other
420 translation units into SET during IPA-inlining. We make them as
421 local static nodes to prevent clashes with other local statics. */
422 if (boundary_p && node->analyzed
423 && node->get_partitioning_class () == SYMBOL_PARTITION)
425 /* Inline clones cannot be part of boundary.
426 gcc_assert (!node->inlined_to);
428 FIXME: At the moment they can be, when partition contains an inline
429 clone that is clone of inline clone from outside partition. We can
430 reshape the clone tree and make other tree to be the root, but it
431 needs a bit extra work and will be promplty done by cgraph_remove_node
432 after reading back. */
433 in_other_partition = 1;
435 else if (UNLIKELY (lto_stream_offload_p
436 && lookup_attribute ("omp target device_ancestor_host",
437 DECL_ATTRIBUTES (node->decl))))
438 /* This symbol is only used as argument to IFN_GOMP_TARGET_REV; this IFN
439 is ignored on ACCEL_COMPILER. Thus, mark it as in_other_partition to silence
440 verify_node_partition diagnostic. */
441 in_other_partition = 1;
443 clone_of = node->clone_of;
444 while (clone_of
445 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
446 if (clone_of->prev_sibling_clone)
447 clone_of = clone_of->prev_sibling_clone;
448 else
449 clone_of = clone_of->clone_of;
451 /* See if body of the master function is output. If not, we are seeing only
452 an declaration and we do not need to pass down clone tree. */
453 ultimate_clone_of = clone_of;
454 while (ultimate_clone_of && ultimate_clone_of->clone_of)
455 ultimate_clone_of = ultimate_clone_of->clone_of;
457 if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
458 clone_of = NULL;
460 if (tag == LTO_symtab_analyzed_node)
461 gcc_assert (clone_of || !node->clone_of);
462 if (!clone_of)
463 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
464 else
465 streamer_write_hwi_stream (ob->main_stream, ref);
468 lto_output_fn_decl_ref (ob->decl_state, ob->main_stream, node->decl);
469 node->count.stream_out (ob->main_stream);
470 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
472 streamer_write_hwi_stream (ob->main_stream,
473 node->ipa_transforms_to_apply.length ());
474 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
475 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
477 if (tag == LTO_symtab_analyzed_node)
479 if (node->inlined_to)
481 ref = lto_symtab_encoder_lookup (encoder, node->inlined_to);
482 gcc_assert (ref != LCC_NOT_FOUND);
484 else
485 ref = LCC_NOT_FOUND;
487 streamer_write_hwi_stream (ob->main_stream, ref);
490 group = node->get_comdat_group ();
491 if (group)
492 comdat = IDENTIFIER_POINTER (group);
493 else
494 comdat = "";
495 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
497 if (group)
499 if (node->same_comdat_group)
501 ref = LCC_NOT_FOUND;
502 for (struct symtab_node *n = node->same_comdat_group;
503 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
504 ref = lto_symtab_encoder_lookup (encoder, n);
506 else
507 ref = LCC_NOT_FOUND;
508 streamer_write_hwi_stream (ob->main_stream, ref);
511 section = node->get_section ();
512 if (!section)
513 section = "";
515 streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
517 bp = bitpack_create (ob->main_stream);
518 bp_pack_value (&bp, node->local, 1);
519 bp_pack_value (&bp, node->externally_visible, 1);
520 bp_pack_value (&bp, node->no_reorder, 1);
521 bp_pack_value (&bp, node->definition, 1);
522 bp_pack_value (&bp, node->versionable, 1);
523 bp_pack_value (&bp, node->can_change_signature, 1);
524 bp_pack_value (&bp, node->redefined_extern_inline, 1);
525 bp_pack_value (&bp, node->force_output, 1);
526 bp_pack_value (&bp, node->forced_by_abi, 1);
527 bp_pack_value (&bp, node->unique_name, 1);
528 bp_pack_value (&bp, node->body_removed, 1);
529 bp_pack_value (&bp, node->semantic_interposition, 1);
530 bp_pack_value (&bp, node->implicit_section, 1);
531 bp_pack_value (&bp, node->address_taken, 1);
532 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
533 && node->get_partitioning_class () == SYMBOL_PARTITION
534 && (reachable_from_other_partition_p (node, encoder)
535 || referenced_from_other_partition_p (node, encoder)), 1);
536 bp_pack_value (&bp, node->lowered, 1);
537 bp_pack_value (&bp, in_other_partition, 1);
538 bp_pack_value (&bp, node->alias, 1);
539 bp_pack_value (&bp, node->transparent_alias, 1);
540 bp_pack_value (&bp, node->weakref, 1);
541 bp_pack_value (&bp, node->symver, 1);
542 bp_pack_value (&bp, node->frequency, 2);
543 bp_pack_value (&bp, node->only_called_at_startup, 1);
544 bp_pack_value (&bp, node->only_called_at_exit, 1);
545 bp_pack_value (&bp, node->tm_clone, 1);
546 bp_pack_value (&bp, node->calls_comdat_local, 1);
547 bp_pack_value (&bp, node->icf_merged, 1);
548 bp_pack_value (&bp, node->nonfreeing_fn, 1);
549 bp_pack_value (&bp, node->merged_comdat, 1);
550 bp_pack_value (&bp, node->merged_extern_inline, 1);
551 bp_pack_value (&bp, node->thunk, 1);
552 bp_pack_value (&bp, node->parallelized_function, 1);
553 bp_pack_value (&bp, node->has_omp_variant_constructs, 1);
555 /* Stream thunk info always because we use it in
556 ipa_polymorphic_call_context::ipa_polymorphic_call_context
557 to properly interpret THIS pointers for thunks that has been converted
558 to Gimple. */
559 struct thunk_info *thunk = node->definition ? thunk_info::get (node) : NULL;
561 bp_pack_value (&bp, thunk != NULL, 1);
563 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
564 LDPR_NUM_KNOWN,
565 /* When doing incremental link, we will get new resolution
566 info next time we process the file. */
567 flag_incremental_link == INCREMENTAL_LINK_LTO
568 ? LDPR_UNKNOWN : node->resolution);
569 bp_pack_value (&bp, node->split_part, 1);
570 streamer_write_bitpack (&bp);
571 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
573 streamer_write_hwi_stream (ob->main_stream, node->profile_id);
574 streamer_write_hwi_stream (ob->main_stream, node->unit_id);
575 if (DECL_STATIC_CONSTRUCTOR (node->decl))
576 streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
577 if (DECL_STATIC_DESTRUCTOR (node->decl))
578 streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
580 if (thunk)
581 thunk_info::get (node)->stream_out (ob);
584 /* Output the varpool NODE to OB.
585 If NODE is not in SET, then NODE is a boundary. */
587 static void
588 lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
589 lto_symtab_encoder_t encoder)
591 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
592 bool encode_initializer_p
593 = (node->definition
594 && lto_symtab_encoder_encode_initializer_p (encoder, node));
595 struct bitpack_d bp;
596 int ref;
597 const char *comdat;
598 const char *section;
599 tree group;
601 gcc_assert (!encode_initializer_p || node->definition);
602 gcc_assert (boundary_p || encode_initializer_p);
604 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
605 LTO_symtab_variable);
606 int output_order = *encoder->order_remap->get (node->order);
607 streamer_write_hwi_stream (ob->main_stream, output_order);
608 lto_output_var_decl_ref (ob->decl_state, ob->main_stream, node->decl);
609 bp = bitpack_create (ob->main_stream);
610 bp_pack_value (&bp, node->externally_visible, 1);
611 bp_pack_value (&bp, node->no_reorder, 1);
612 bp_pack_value (&bp, node->force_output, 1);
613 bp_pack_value (&bp, node->forced_by_abi, 1);
614 bp_pack_value (&bp, node->unique_name, 1);
615 bp_pack_value (&bp,
616 node->body_removed
617 || (!encode_initializer_p && !node->alias && node->definition),
619 bp_pack_value (&bp, node->semantic_interposition, 1);
620 bp_pack_value (&bp, node->implicit_section, 1);
621 bp_pack_value (&bp, node->writeonly, 1);
622 bp_pack_value (&bp, node->definition && (encode_initializer_p || node->alias),
624 bp_pack_value (&bp, node->alias, 1);
625 bp_pack_value (&bp, node->transparent_alias, 1);
626 bp_pack_value (&bp, node->weakref, 1);
627 bp_pack_value (&bp, node->symver, 1);
628 bp_pack_value (&bp, node->analyzed && (!boundary_p || node->alias), 1);
629 gcc_assert (node->definition || !node->analyzed);
630 /* Constant pool initializers can be de-unified into individual ltrans units.
631 FIXME: Alternatively at -Os we may want to avoid generating for them the local
632 labels and share them across LTRANS partitions. */
633 if (node->get_partitioning_class () != SYMBOL_PARTITION)
635 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
636 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
638 else
640 bp_pack_value (&bp, node->definition
641 && referenced_from_other_partition_p (node, encoder), 1);
642 bp_pack_value (&bp, node->analyzed
643 && boundary_p && !DECL_EXTERNAL (node->decl), 1);
644 /* in_other_partition. */
646 bp_pack_value (&bp, node->tls_model, 3);
647 bp_pack_value (&bp, node->used_by_single_function, 1);
648 bp_pack_value (&bp, node->dynamically_initialized, 1);
649 streamer_write_bitpack (&bp);
651 group = node->get_comdat_group ();
652 if (group)
653 comdat = IDENTIFIER_POINTER (group);
654 else
655 comdat = "";
656 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
658 if (group)
660 if (node->same_comdat_group)
662 ref = LCC_NOT_FOUND;
663 for (struct symtab_node *n = node->same_comdat_group;
664 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
665 ref = lto_symtab_encoder_lookup (encoder, n);
667 else
668 ref = LCC_NOT_FOUND;
669 streamer_write_hwi_stream (ob->main_stream, ref);
672 section = node->get_section ();
673 if (!section)
674 section = "";
675 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
677 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
678 LDPR_NUM_KNOWN, node->resolution);
681 /* Output the varpool NODE to OB.
682 If NODE is not in SET, then NODE is a boundary. */
684 static void
685 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
686 lto_symtab_encoder_t encoder)
688 struct bitpack_d bp;
689 int nref;
690 int uid = !ref->stmt ? ref->lto_stmt_uid : gimple_uid (ref->stmt) + 1;
691 struct cgraph_node *node;
693 bp = bitpack_create (ob->main_stream);
694 bp_pack_value (&bp, ref->use, 3);
695 bp_pack_value (&bp, ref->speculative, 1);
696 streamer_write_bitpack (&bp);
697 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
698 gcc_assert (nref != LCC_NOT_FOUND);
699 streamer_write_hwi_stream (ob->main_stream, nref);
701 node = dyn_cast <cgraph_node *> (ref->referring);
702 if (node)
704 if (ref->stmt)
705 uid = gimple_uid (ref->stmt) + 1;
706 streamer_write_hwi_stream (ob->main_stream, uid);
707 bp_pack_value (&bp, ref->speculative_id, 16);
708 streamer_write_bitpack (&bp);
712 /* Stream out profile_summary to OB. */
714 static void
715 output_profile_summary (struct lto_simple_output_block *ob)
717 if (profile_info)
719 /* We do not output num and run_max, they are not used by
720 GCC profile feedback and they are difficult to merge from multiple
721 units. */
722 unsigned runs = (profile_info->runs);
723 streamer_write_uhwi_stream (ob->main_stream, runs);
725 /* IPA-profile computes hot bb threshold based on cumulated
726 whole program profile. We need to stream it down to ltrans. */
727 if (flag_wpa)
728 streamer_write_gcov_count_stream (ob->main_stream,
729 get_hot_bb_threshold ());
731 else
732 streamer_write_uhwi_stream (ob->main_stream, 0);
735 /* Output all callees or indirect outgoing edges. EDGE must be the first such
736 edge. */
738 static void
739 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
740 struct lto_simple_output_block *ob,
741 lto_symtab_encoder_t encoder)
743 if (!edge)
744 return;
746 /* Output edges in backward direction, so the reconstructed callgraph match
747 and it is easy to associate call sites in the IPA pass summaries. */
748 while (edge->next_callee)
749 edge = edge->next_callee;
750 for (; edge; edge = edge->prev_callee)
751 lto_output_edge (ob, edge, encoder);
754 /* Output the part of the cgraph in SET. */
756 static void
757 output_refs (lto_symtab_encoder_t encoder)
759 struct lto_simple_output_block *ob;
760 int count;
761 struct ipa_ref *ref;
763 ob = lto_create_simple_output_block (LTO_section_refs);
765 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
767 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
769 /* IPA_REF_ALIAS references are always preserved
770 in the boundary. Alias node can't have other references and
771 can be always handled as if it's not in the boundary. */
772 if (!node->alias && !lto_symtab_encoder_in_partition_p (encoder, node))
773 continue;
775 count = node->ref_list.nreferences ();
776 if (count)
778 streamer_write_gcov_count_stream (ob->main_stream, count);
779 streamer_write_uhwi_stream (ob->main_stream,
780 lto_symtab_encoder_lookup (encoder, node));
781 for (int i = 0; node->iterate_reference (i, ref); i++)
782 lto_output_ref (ob, ref, encoder);
786 streamer_write_uhwi_stream (ob->main_stream, 0);
788 lto_destroy_simple_output_block (ob);
791 /* Add NODE into encoder as well as nodes it is cloned from.
792 Do it in a way so clones appear first. */
794 static void
795 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
796 bool include_body, bool not_inlined)
798 if (node->clone_of)
799 add_node_to (encoder, node->clone_of, include_body, not_inlined);
801 int index = lto_symtab_encoder_encode (encoder, node);
802 gcc_checking_assert (encoder->nodes[index].node == node);
804 if (include_body)
805 encoder->nodes[index].body = true;
806 if (not_inlined)
807 encoder->nodes[index].only_for_inlining = false;
810 /* Add NODE into encoder as well as nodes it is cloned from.
811 Do it in a way so clones appear first. */
813 static void
814 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
815 bool include_body)
817 add_node_to (encoder, node, include_body, include_body && !node->inlined_to);
820 /* Add all references in NODE to encoders. */
822 static void
823 create_references (lto_symtab_encoder_t encoder, symtab_node *node)
825 int i;
826 struct ipa_ref *ref = NULL;
827 for (i = 0; node->iterate_reference (i, ref); i++)
828 if (is_a <cgraph_node *> (ref->referred))
829 add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
830 else
831 lto_symtab_encoder_encode (encoder, ref->referred);
834 /* Select what needs to be streamed out. In regular lto mode stream everything.
835 In offload lto mode stream only nodes marked as offloadable. */
836 void
837 select_what_to_stream (void)
839 struct symtab_node *snode;
840 FOR_EACH_SYMBOL (snode)
841 snode->need_lto_streaming = !lto_stream_offload_p || snode->offloadable;
844 /* Find all symbols we want to stream into given partition and insert them
845 to encoders.
847 The function actually replaces IN_ENCODER by new one. The reason is that
848 streaming code needs clone's origin to be streamed before clone. This
849 means that we need to insert the nodes in specific order. This order is
850 ignored by the partitioning logic earlier. */
852 lto_symtab_encoder_t
853 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
855 struct cgraph_edge *edge;
856 int i;
857 lto_symtab_encoder_t encoder;
858 lto_symtab_encoder_iterator lsei;
859 hash_set<void *> reachable_call_targets;
861 encoder = lto_symtab_encoder_new (false);
863 /* Go over all entries in the IN_ENCODER and duplicate them to
864 ENCODER. At the same time insert masters of clones so
865 every master appears before clone. */
866 for (lsei = lsei_start_function_in_partition (in_encoder);
867 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
869 struct cgraph_node *node = lsei_cgraph_node (lsei);
870 if (!node->need_lto_streaming)
871 continue;
872 add_node_to (encoder, node, true);
873 lto_set_symtab_encoder_in_partition (encoder, node);
874 create_references (encoder, node);
876 for (lsei = lsei_start_variable_in_partition (in_encoder);
877 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
879 varpool_node *vnode = lsei_varpool_node (lsei);
881 if (!vnode->need_lto_streaming)
882 continue;
883 lto_set_symtab_encoder_in_partition (encoder, vnode);
884 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
885 create_references (encoder, vnode);
887 /* Pickle in also the initializer of all referenced readonly variables
888 to help folding. Constant pool variables are not shared, so we must
889 pickle those too. */
890 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
892 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
893 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
895 if (!lto_symtab_encoder_encode_initializer_p (encoder,
896 vnode)
897 && (((vnode->ctor_useable_for_folding_p ()
898 && (!DECL_VIRTUAL_P (vnode->decl)
899 || !flag_wpa
900 || flag_ltrans_devirtualize)))))
902 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
903 create_references (encoder, vnode);
908 /* Go over all the nodes again to include callees that are not in
909 SET. */
910 for (lsei = lsei_start_function_in_partition (encoder);
911 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
913 struct cgraph_node *node = lsei_cgraph_node (lsei);
914 for (edge = node->callees; edge; edge = edge->next_callee)
916 struct cgraph_node *callee = edge->callee;
917 if (!lto_symtab_encoder_in_partition_p (encoder, callee))
919 /* We should have moved all the inlines. */
920 gcc_assert (!callee->inlined_to);
921 add_node_to (encoder, callee, false);
924 /* Add all possible targets for late devirtualization. */
925 if (flag_ltrans_devirtualize || !flag_wpa)
926 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
927 if (edge->indirect_info->polymorphic)
929 unsigned int i;
930 void *cache_token;
931 bool final;
932 vec <cgraph_node *>targets
933 = possible_polymorphic_call_targets
934 (edge, &final, &cache_token);
935 if (cache_token != NULL
936 && !reachable_call_targets.add (cache_token))
938 for (i = 0; i < targets.length (); i++)
940 struct cgraph_node *callee = targets[i];
942 /* Adding an external declarations into the unit serves
943 no purpose and just increases its boundary. */
944 if (callee->definition
945 && !lto_symtab_encoder_in_partition_p
946 (encoder, callee))
948 gcc_assert (!callee->inlined_to);
949 add_node_to (encoder, callee, false);
955 /* Be sure to also insert alias targert and thunk callees. These needs
956 to stay to aid local calling conventions. */
957 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
959 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
960 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
962 if (node->alias && node->analyzed)
963 create_references (encoder, node);
964 if (cnode
965 && cnode->thunk && !cnode->inlined_to)
966 add_node_to (encoder, cnode->callees->callee, false);
967 while (node->transparent_alias && node->analyzed)
969 node = node->get_alias_target ();
970 if (is_a <cgraph_node *> (node))
971 add_node_to (encoder, dyn_cast <cgraph_node *> (node),
972 false);
973 else
974 lto_symtab_encoder_encode (encoder, node);
977 lto_symtab_encoder_delete (in_encoder);
978 return encoder;
981 /* Output the part of the symtab in SET and VSET. */
983 void
984 output_symtab (void)
986 struct cgraph_node *node;
987 struct lto_simple_output_block *ob;
988 int i, n_nodes;
989 lto_symtab_encoder_t encoder;
991 if (flag_wpa)
992 output_cgraph_opt_summary ();
994 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
996 output_profile_summary (ob);
998 /* An encoder for cgraph nodes should have been created by
999 ipa_write_summaries_1. */
1000 gcc_assert (ob->decl_state->symtab_node_encoder);
1001 encoder = ob->decl_state->symtab_node_encoder;
1003 /* Write out the nodes. We must first output a node and then its clones,
1004 otherwise at a time reading back the node there would be nothing to clone
1005 from. */
1006 n_nodes = lto_symtab_encoder_size (encoder);
1007 for (i = 0; i < n_nodes; i++)
1009 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1010 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1011 lto_output_node (ob, cnode, encoder);
1012 else
1013 lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
1016 /* Go over the nodes in SET again to write edges. */
1017 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
1019 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder, i));
1020 if (node
1021 && ((node->thunk && !node->inlined_to)
1022 || lto_symtab_encoder_in_partition_p (encoder, node)))
1024 output_outgoing_cgraph_edges (node->callees, ob, encoder);
1025 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
1029 streamer_write_uhwi_stream (ob->main_stream, 0);
1031 lto_destroy_simple_output_block (ob);
1033 /* Emit toplevel asms.
1034 When doing WPA we must output every asm just once. Since we do not partition asm
1035 nodes at all, output them to first output. This is kind of hack, but should work
1036 well. */
1037 if (!asm_nodes_output && !lto_stream_offload_p)
1039 asm_nodes_output = true;
1040 lto_output_toplevel_asms ();
1043 output_refs (encoder);
1046 /* Return identifier encoded in IB as a plain string. */
1048 static tree
1049 read_identifier (class lto_input_block *ib)
1051 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1052 tree id;
1054 if (ib->data[ib->p + len])
1055 lto_section_overrun (ib);
1056 if (!len)
1058 ib->p++;
1059 return NULL;
1061 id = get_identifier (ib->data + ib->p);
1062 ib->p += len + 1;
1063 return id;
1066 /* Return string encoded in IB, NULL if string is empty. */
1068 static const char *
1069 read_string (class lto_input_block *ib)
1071 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1072 const char *str;
1074 if (ib->data[ib->p + len])
1075 lto_section_overrun (ib);
1076 if (!len)
1078 ib->p++;
1079 return NULL;
1081 str = ib->data + ib->p;
1082 ib->p += len + 1;
1083 return str;
1086 /* Output function/variable tables that will allow libgomp to look up offload
1087 target code.
1088 OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is filled in
1089 varpool_node::get_create. In WHOPR (partitioned) mode during the WPA stage
1090 both OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables. */
1092 void
1093 output_offload_tables (void)
1095 bool output_requires = (flag_openmp
1096 && (omp_requires_mask & OMP_REQUIRES_TARGET_USED) != 0);
1097 if (vec_safe_is_empty (offload_funcs) && vec_safe_is_empty (offload_vars)
1098 && !output_requires)
1099 return;
1101 struct lto_simple_output_block *ob
1102 = lto_create_simple_output_block (LTO_section_offload_table);
1104 for (unsigned i = 0; i < vec_safe_length (offload_funcs); i++)
1106 symtab_node *node = symtab_node::get ((*offload_funcs)[i]);
1107 if (!node)
1108 continue;
1109 node->force_output = true;
1110 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1111 LTO_symtab_last_tag, LTO_symtab_unavail_node);
1112 lto_output_fn_decl_ref (ob->decl_state, ob->main_stream,
1113 (*offload_funcs)[i]);
1116 for (unsigned i = 0; i < vec_safe_length (offload_vars); i++)
1118 symtab_node *node = symtab_node::get ((*offload_vars)[i]);
1119 if (!node)
1120 continue;
1121 node->force_output = true;
1122 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1123 LTO_symtab_last_tag, LTO_symtab_variable);
1124 lto_output_var_decl_ref (ob->decl_state, ob->main_stream,
1125 (*offload_vars)[i]);
1128 for (unsigned i = 0; i < vec_safe_length (offload_ind_funcs); i++)
1130 symtab_node *node = symtab_node::get ((*offload_ind_funcs)[i]);
1131 if (!node)
1132 continue;
1133 node->force_output = true;
1134 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1135 LTO_symtab_last_tag, LTO_symtab_indirect_function);
1136 lto_output_fn_decl_ref (ob->decl_state, ob->main_stream,
1137 (*offload_ind_funcs)[i]);
1140 if (output_requires)
1142 HOST_WIDE_INT val = ((HOST_WIDE_INT) omp_requires_mask
1143 & (OMP_REQUIRES_UNIFIED_ADDRESS
1144 | OMP_REQUIRES_UNIFIED_SHARED_MEMORY
1145 | OMP_REQUIRES_SELF_MAPS
1146 | OMP_REQUIRES_REVERSE_OFFLOAD
1147 | OMP_REQUIRES_TARGET_USED));
1148 /* (Mis)use LTO_symtab_edge for this variable. */
1149 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1150 LTO_symtab_last_tag, LTO_symtab_edge);
1151 streamer_write_hwi_stream (ob->main_stream, val);
1154 streamer_write_uhwi_stream (ob->main_stream, 0);
1155 lto_destroy_simple_output_block (ob);
1158 /* Verify the partitioning of NODE. */
1160 static inline void
1161 verify_node_partition (symtab_node *node)
1163 if (flag_ltrans)
1164 return;
1166 #ifdef ACCEL_COMPILER
1167 if (node->in_other_partition)
1169 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1171 if (lookup_attribute ("omp target device_ancestor_host",
1172 DECL_ATTRIBUTES (node->decl)) != NULL)
1173 return;
1174 error_at (DECL_SOURCE_LOCATION (node->decl),
1175 "function %qs has been referenced in offloaded code but"
1176 " hasn%'t been marked to be included in the offloaded code",
1177 node->name ());
1179 else if (VAR_P (node->decl))
1180 error_at (DECL_SOURCE_LOCATION (node->decl),
1181 "variable %qs has been referenced in offloaded code but"
1182 " hasn%'t been marked to be included in the offloaded code",
1183 node->name ());
1184 else
1185 gcc_unreachable ();
1187 #else
1188 gcc_assert (!node->in_other_partition
1189 && !node->used_from_other_partition);
1190 #endif
1193 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1194 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
1195 NODE or to replace the values in it, for instance because the first
1196 time we saw it, the function body was not available but now it
1197 is. BP is a bitpack with all the bitflags for NODE read from the
1198 stream. Initialize HAS_THUNK_INFO to indicate if thunk info should
1199 be streamed in. */
1201 static void
1202 input_overwrite_node (struct lto_file_decl_data *file_data,
1203 struct cgraph_node *node,
1204 enum LTO_symtab_tags tag,
1205 struct bitpack_d *bp, bool *has_thunk_info)
1207 node->aux = (void *) tag;
1208 node->lto_file_data = file_data;
1210 node->local = bp_unpack_value (bp, 1);
1211 node->externally_visible = bp_unpack_value (bp, 1);
1212 node->no_reorder = bp_unpack_value (bp, 1);
1213 node->definition = bp_unpack_value (bp, 1);
1214 node->versionable = bp_unpack_value (bp, 1);
1215 node->can_change_signature = bp_unpack_value (bp, 1);
1216 node->redefined_extern_inline = bp_unpack_value (bp, 1);
1217 node->force_output = bp_unpack_value (bp, 1);
1218 node->forced_by_abi = bp_unpack_value (bp, 1);
1219 node->unique_name = bp_unpack_value (bp, 1);
1220 node->body_removed = bp_unpack_value (bp, 1);
1221 node->semantic_interposition = bp_unpack_value (bp, 1);
1222 node->implicit_section = bp_unpack_value (bp, 1);
1223 node->address_taken = bp_unpack_value (bp, 1);
1224 node->used_from_other_partition = bp_unpack_value (bp, 1);
1225 node->lowered = bp_unpack_value (bp, 1);
1226 node->analyzed = tag == LTO_symtab_analyzed_node;
1227 node->in_other_partition = bp_unpack_value (bp, 1);
1228 if (node->in_other_partition
1229 /* Avoid updating decl when we are seeing just inline clone.
1230 When inlining function that has functions already inlined into it,
1231 we produce clones of inline clones.
1233 WPA partitioning might put each clone into different unit and
1234 we might end up streaming inline clone from other partition
1235 to support clone we are interested in. */
1236 && (!node->clone_of
1237 || node->clone_of->decl != node->decl))
1239 DECL_EXTERNAL (node->decl) = 1;
1240 TREE_STATIC (node->decl) = 0;
1242 node->alias = bp_unpack_value (bp, 1);
1243 node->transparent_alias = bp_unpack_value (bp, 1);
1244 node->weakref = bp_unpack_value (bp, 1);
1245 node->symver = bp_unpack_value (bp, 1);
1246 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1247 node->only_called_at_startup = bp_unpack_value (bp, 1);
1248 node->only_called_at_exit = bp_unpack_value (bp, 1);
1249 node->tm_clone = bp_unpack_value (bp, 1);
1250 node->calls_comdat_local = bp_unpack_value (bp, 1);
1251 node->icf_merged = bp_unpack_value (bp, 1);
1252 node->nonfreeing_fn = bp_unpack_value (bp, 1);
1253 node->merged_comdat = bp_unpack_value (bp, 1);
1254 node->merged_extern_inline = bp_unpack_value (bp, 1);
1255 node->thunk = bp_unpack_value (bp, 1);
1256 node->parallelized_function = bp_unpack_value (bp, 1);
1257 node->has_omp_variant_constructs = bp_unpack_value (bp, 1);
1258 *has_thunk_info = bp_unpack_value (bp, 1);
1259 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1260 LDPR_NUM_KNOWN);
1261 node->split_part = bp_unpack_value (bp, 1);
1262 verify_node_partition (node);
1265 /* Return string alias is alias of. */
1267 static tree
1268 get_alias_symbol (tree decl)
1270 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1271 return get_identifier (TREE_STRING_POINTER
1272 (TREE_VALUE (TREE_VALUE (alias))));
1275 /* Read a node from input_block IB. TAG is the node's tag just read.
1276 Return the node read or overwriten. */
1278 static struct cgraph_node *
1279 input_node (struct lto_file_decl_data *file_data,
1280 class lto_input_block *ib,
1281 enum LTO_symtab_tags tag,
1282 vec<symtab_node *> nodes)
1284 gcc::pass_manager *passes = g->get_passes ();
1285 tree fn_decl;
1286 struct cgraph_node *node;
1287 struct bitpack_d bp;
1288 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1289 int clone_ref;
1290 int order;
1291 int i, count;
1292 tree group;
1293 const char *section;
1294 order = streamer_read_hwi (ib) + file_data->order_base;
1295 clone_ref = streamer_read_hwi (ib);
1296 bool has_thunk_info;
1298 fn_decl = lto_input_fn_decl_ref (ib, file_data);
1300 if (clone_ref != LCC_NOT_FOUND)
1302 node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1303 profile_count::uninitialized (), false,
1304 vNULL, false, NULL, NULL);
1306 else
1308 /* Declaration of functions can be already merged with a declaration
1309 from other input file. We keep cgraph unmerged until after streaming
1310 of ipa passes is done. Alays forcingly create a fresh node. */
1311 node = symtab->create_empty ();
1312 node->decl = fn_decl;
1313 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (fn_decl)))
1314 node->ifunc_resolver = 1;
1315 node->register_symbol ();
1318 node->order = order;
1319 if (order >= symtab->order)
1320 symtab->order = order + 1;
1322 node->count = profile_count::stream_in (ib);
1323 node->count_materialization_scale = streamer_read_hwi (ib);
1325 count = streamer_read_hwi (ib);
1326 node->ipa_transforms_to_apply = vNULL;
1327 for (i = 0; i < count; i++)
1329 opt_pass *pass;
1330 int pid = streamer_read_hwi (ib);
1332 gcc_assert (pid < passes->passes_by_id_size);
1333 pass = passes->passes_by_id[pid];
1334 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1337 if (tag == LTO_symtab_analyzed_node)
1338 ref = streamer_read_hwi (ib);
1340 group = read_identifier (ib);
1341 if (group)
1342 ref2 = streamer_read_hwi (ib);
1344 /* Make sure that we have not read this node before. Nodes that
1345 have already been read will have their tag stored in the 'aux'
1346 field. Since built-in functions can be referenced in multiple
1347 functions, they are expected to be read more than once. */
1348 if (node->aux && !fndecl_built_in_p (node->decl))
1349 internal_error ("bytecode stream: found multiple instances of cgraph "
1350 "node with uid %d", node->get_uid ());
1352 node->tp_first_run = streamer_read_uhwi (ib);
1354 bp = streamer_read_bitpack (ib);
1356 input_overwrite_node (file_data, node, tag, &bp, &has_thunk_info);
1358 /* Store a reference for now, and fix up later to be a pointer. */
1359 node->inlined_to = (cgraph_node *) (intptr_t) ref;
1361 if (group)
1363 node->set_comdat_group (group);
1364 /* Store a reference for now, and fix up later to be a pointer. */
1365 node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1367 else
1368 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1369 section = read_string (ib);
1370 if (section)
1371 node->set_section_for_node (section);
1373 if (node->alias && !node->analyzed && node->weakref)
1374 node->alias_target = get_alias_symbol (node->decl);
1375 node->profile_id = streamer_read_hwi (ib);
1376 node->unit_id = streamer_read_hwi (ib) + file_data->unit_base;
1377 if (symtab->max_unit < node->unit_id)
1378 symtab->max_unit = node->unit_id;
1379 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1380 node->set_init_priority (streamer_read_hwi (ib));
1381 if (DECL_STATIC_DESTRUCTOR (node->decl))
1382 node->set_fini_priority (streamer_read_hwi (ib));
1384 if (has_thunk_info)
1385 thunk_info::get_create (node)->stream_in (ib);
1387 return node;
1390 /* Read a node from input_block IB. TAG is the node's tag just read.
1391 Return the node read or overwriten. */
1393 static varpool_node *
1394 input_varpool_node (struct lto_file_decl_data *file_data,
1395 class lto_input_block *ib)
1397 tree var_decl;
1398 varpool_node *node;
1399 struct bitpack_d bp;
1400 int ref = LCC_NOT_FOUND;
1401 int order;
1402 tree group;
1403 const char *section;
1405 order = streamer_read_hwi (ib) + file_data->order_base;
1406 var_decl = lto_input_var_decl_ref (ib, file_data);
1408 /* Declaration of functions can be already merged with a declaration
1409 from other input file. We keep cgraph unmerged until after streaming
1410 of ipa passes is done. Alays forcingly create a fresh node. */
1411 node = varpool_node::create_empty ();
1412 node->decl = var_decl;
1413 node->register_symbol ();
1415 node->order = order;
1416 if (order >= symtab->order)
1417 symtab->order = order + 1;
1418 node->lto_file_data = file_data;
1420 bp = streamer_read_bitpack (ib);
1421 node->externally_visible = bp_unpack_value (&bp, 1);
1422 node->no_reorder = bp_unpack_value (&bp, 1);
1423 node->force_output = bp_unpack_value (&bp, 1);
1424 node->forced_by_abi = bp_unpack_value (&bp, 1);
1425 node->unique_name = bp_unpack_value (&bp, 1);
1426 node->body_removed = bp_unpack_value (&bp, 1);
1427 node->semantic_interposition = bp_unpack_value (&bp, 1);
1428 node->implicit_section = bp_unpack_value (&bp, 1);
1429 node->writeonly = bp_unpack_value (&bp, 1);
1430 node->definition = bp_unpack_value (&bp, 1);
1431 node->alias = bp_unpack_value (&bp, 1);
1432 node->transparent_alias = bp_unpack_value (&bp, 1);
1433 node->weakref = bp_unpack_value (&bp, 1);
1434 node->symver = bp_unpack_value (&bp, 1);
1435 node->analyzed = bp_unpack_value (&bp, 1);
1436 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1437 node->in_other_partition = bp_unpack_value (&bp, 1);
1438 if (node->in_other_partition)
1440 DECL_EXTERNAL (node->decl) = 1;
1441 TREE_STATIC (node->decl) = 0;
1443 if (node->alias && !node->analyzed && node->weakref)
1444 node->alias_target = get_alias_symbol (node->decl);
1445 node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1446 node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1447 node->dynamically_initialized = bp_unpack_value (&bp, 1);
1448 group = read_identifier (ib);
1449 if (group)
1451 node->set_comdat_group (group);
1452 ref = streamer_read_hwi (ib);
1453 /* Store a reference for now, and fix up later to be a pointer. */
1454 node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1456 else
1457 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1458 section = read_string (ib);
1459 if (section)
1460 node->set_section_for_node (section);
1461 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1462 LDPR_NUM_KNOWN);
1463 verify_node_partition (node);
1464 return node;
1467 /* Read a node from input_block IB. TAG is the node's tag just read.
1468 Return the node read or overwriten. */
1470 static void
1471 input_ref (class lto_input_block *ib,
1472 symtab_node *referring_node,
1473 vec<symtab_node *> nodes)
1475 symtab_node *node = NULL;
1476 struct bitpack_d bp;
1477 enum ipa_ref_use use;
1478 bool speculative;
1479 struct ipa_ref *ref;
1481 bp = streamer_read_bitpack (ib);
1482 use = (enum ipa_ref_use) bp_unpack_value (&bp, 3);
1483 speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1484 node = nodes[streamer_read_hwi (ib)];
1485 ref = referring_node->create_reference (node, use);
1486 ref->speculative = speculative;
1487 if (is_a <cgraph_node *> (referring_node))
1489 ref->lto_stmt_uid = streamer_read_hwi (ib);
1490 bp = streamer_read_bitpack (ib);
1491 ref->speculative_id = bp_unpack_value (&bp, 16);
1495 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1496 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1497 edge being read is indirect (in the sense that it has
1498 indirect_unknown_callee set). */
1500 static void
1501 input_edge (class lto_input_block *ib, vec<symtab_node *> nodes,
1502 bool indirect)
1504 struct cgraph_node *caller, *callee;
1505 struct cgraph_edge *edge;
1506 unsigned int stmt_id, speculative_id;
1507 profile_count count;
1508 cgraph_inline_failed_t inline_failed;
1509 struct bitpack_d bp;
1510 int ecf_flags = 0;
1512 caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1513 if (caller == NULL || caller->decl == NULL_TREE)
1514 internal_error ("bytecode stream: no caller found while reading edge");
1516 if (!indirect)
1518 callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1519 if (callee == NULL || callee->decl == NULL_TREE)
1520 internal_error ("bytecode stream: no callee found while reading edge");
1522 else
1523 callee = NULL;
1525 count = profile_count::stream_in (ib);
1527 bp = streamer_read_bitpack (ib);
1528 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1529 stmt_id = bp_unpack_var_len_unsigned (&bp);
1530 speculative_id = bp_unpack_value (&bp, 16);
1532 if (indirect)
1533 edge = caller->create_indirect_edge (NULL, 0, count);
1534 else
1535 edge = caller->create_edge (callee, NULL, count);
1537 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1538 edge->speculative = bp_unpack_value (&bp, 1);
1539 edge->lto_stmt_uid = stmt_id;
1540 edge->speculative_id = speculative_id;
1541 edge->inline_failed = inline_failed;
1542 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1543 edge->can_throw_external = bp_unpack_value (&bp, 1);
1544 edge->in_polymorphic_cdtor = bp_unpack_value (&bp, 1);
1545 if (indirect)
1547 if (bp_unpack_value (&bp, 1))
1548 ecf_flags |= ECF_CONST;
1549 if (bp_unpack_value (&bp, 1))
1550 ecf_flags |= ECF_PURE;
1551 if (bp_unpack_value (&bp, 1))
1552 ecf_flags |= ECF_NORETURN;
1553 if (bp_unpack_value (&bp, 1))
1554 ecf_flags |= ECF_MALLOC;
1555 if (bp_unpack_value (&bp, 1))
1556 ecf_flags |= ECF_NOTHROW;
1557 if (bp_unpack_value (&bp, 1))
1558 ecf_flags |= ECF_RETURNS_TWICE;
1559 edge->indirect_info->ecf_flags = ecf_flags;
1561 edge->indirect_info->num_speculative_call_targets
1562 = bp_unpack_value (&bp, 16);
1567 /* Read a cgraph from IB using the info in FILE_DATA. */
1569 static vec<symtab_node *>
1570 input_cgraph_1 (struct lto_file_decl_data *file_data,
1571 class lto_input_block *ib)
1573 enum LTO_symtab_tags tag;
1574 vec<symtab_node *> nodes = vNULL;
1575 symtab_node *node;
1576 unsigned i;
1578 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1579 file_data->order_base = symtab->order;
1580 file_data->unit_base = symtab->max_unit + 1;
1581 while (tag)
1583 if (tag == LTO_symtab_edge)
1584 input_edge (ib, nodes, false);
1585 else if (tag == LTO_symtab_indirect_edge)
1586 input_edge (ib, nodes, true);
1587 else if (tag == LTO_symtab_variable)
1589 node = input_varpool_node (file_data, ib);
1590 nodes.safe_push (node);
1591 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1593 else
1595 node = input_node (file_data, ib, tag, nodes);
1596 if (node == NULL || node->decl == NULL_TREE)
1597 internal_error ("bytecode stream: found empty cgraph node");
1598 nodes.safe_push (node);
1599 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1602 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1605 lto_input_toplevel_asms (file_data, file_data->order_base);
1607 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1608 if (flag_checking)
1610 FOR_EACH_VEC_ELT (nodes, i, node)
1611 gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1613 FOR_EACH_VEC_ELT (nodes, i, node)
1615 int ref;
1616 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1618 ref = (int) (intptr_t) cnode->inlined_to;
1620 /* We share declaration of builtins, so we may read same node twice. */
1621 if (!node->aux)
1622 continue;
1623 node->aux = NULL;
1625 /* Fixup inlined_to from reference to pointer. */
1626 if (ref != LCC_NOT_FOUND)
1627 dyn_cast<cgraph_node *> (node)->inlined_to
1628 = dyn_cast<cgraph_node *> (nodes[ref]);
1629 else
1630 cnode->inlined_to = NULL;
1633 ref = (int) (intptr_t) node->same_comdat_group;
1635 /* Fixup same_comdat_group from reference to pointer. */
1636 if (ref != LCC_NOT_FOUND)
1637 node->same_comdat_group = nodes[ref];
1638 else
1639 node->same_comdat_group = NULL;
1641 FOR_EACH_VEC_ELT (nodes, i, node)
1642 node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1643 return nodes;
1646 /* Input ipa_refs. */
1648 static void
1649 input_refs (class lto_input_block *ib,
1650 vec<symtab_node *> nodes)
1652 int count;
1653 int idx;
1654 while (true)
1656 symtab_node *node;
1657 count = streamer_read_uhwi (ib);
1658 if (!count)
1659 break;
1660 idx = streamer_read_uhwi (ib);
1661 node = nodes[idx];
1662 while (count)
1664 input_ref (ib, node, nodes);
1665 count--;
1670 /* Input profile_info from IB. */
1671 static void
1672 input_profile_summary (class lto_input_block *ib,
1673 struct lto_file_decl_data *file_data)
1675 unsigned int runs = streamer_read_uhwi (ib);
1676 if (runs)
1678 file_data->profile_info.runs = runs;
1680 /* IPA-profile computes hot bb threshold based on cumulated
1681 whole program profile. We need to stream it down to ltrans. */
1682 if (flag_ltrans)
1683 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1688 /* Rescale profile summaries to the same number of runs in the whole unit. */
1690 static void
1691 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1693 struct lto_file_decl_data *file_data;
1694 unsigned int j;
1695 gcov_unsigned_t max_runs = 0;
1696 struct cgraph_node *node;
1697 struct cgraph_edge *edge;
1699 /* Find unit with maximal number of runs. If we ever get serious about
1700 roundoff errors, we might also consider computing smallest common
1701 multiply. */
1702 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1703 if (max_runs < file_data->profile_info.runs)
1704 max_runs = file_data->profile_info.runs;
1706 if (!max_runs)
1707 return;
1709 /* Simple overflow check. We probably don't need to support that many train
1710 runs. Such a large value probably imply data corruption anyway. */
1711 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1713 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1714 INT_MAX / REG_BR_PROB_BASE);
1715 return;
1718 profile_info = XCNEW (gcov_summary);
1719 profile_info->runs = max_runs;
1721 /* If merging already happent at WPA time, we are done. */
1722 if (flag_ltrans)
1723 return;
1725 /* Now compute count_materialization_scale of each node.
1726 During LTRANS we already have values of count_materialization_scale
1727 computed, so just update them. */
1728 FOR_EACH_FUNCTION (node)
1729 if (node->lto_file_data
1730 && node->lto_file_data->profile_info.runs)
1732 int scale;
1734 scale = RDIV (node->count_materialization_scale * max_runs,
1735 node->lto_file_data->profile_info.runs);
1736 node->count_materialization_scale = scale;
1737 if (scale < 0)
1738 fatal_error (input_location, "Profile information in %s corrupted",
1739 file_data->file_name);
1741 if (scale == REG_BR_PROB_BASE)
1742 continue;
1743 for (edge = node->callees; edge; edge = edge->next_callee)
1744 if (edge->count.ipa ().nonzero_p ())
1745 edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1746 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1747 if (edge->count.ipa ().nonzero_p ())
1748 edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1749 if (node->count.ipa ().nonzero_p ())
1750 node->count = node->count.apply_scale (scale, REG_BR_PROB_BASE);
1754 /* Input and merge the symtab from each of the .o files passed to
1755 lto1. */
1757 void
1758 input_symtab (void)
1760 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1761 struct lto_file_decl_data *file_data;
1762 unsigned int j = 0;
1763 struct cgraph_node *node;
1765 while ((file_data = file_data_vec[j++]))
1767 const char *data;
1768 size_t len;
1769 class lto_input_block *ib;
1770 vec<symtab_node *> nodes;
1772 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1773 &data, &len);
1774 if (!ib)
1775 fatal_error (input_location,
1776 "cannot find LTO cgraph in %s", file_data->file_name);
1777 input_profile_summary (ib, file_data);
1778 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1779 nodes = input_cgraph_1 (file_data, ib);
1780 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1781 ib, data, len);
1783 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1784 &data, &len);
1785 if (!ib)
1786 fatal_error (input_location, "cannot find LTO section refs in %s",
1787 file_data->file_name);
1788 input_refs (ib, nodes);
1789 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1790 ib, data, len);
1791 if (flag_ltrans)
1792 input_cgraph_opt_summary (nodes);
1793 nodes.release ();
1796 merge_profile_summaries (file_data_vec);
1798 /* Clear out the aux field that was used to store enough state to
1799 tell which nodes should be overwritten. */
1800 FOR_EACH_FUNCTION (node)
1802 /* Some nodes may have been created by cgraph_node. This
1803 happens when the callgraph contains nested functions. If the
1804 node for the parent function was never emitted to the gimple
1805 file, cgraph_node will create a node for it when setting the
1806 context of the nested function. */
1807 if (node->lto_file_data)
1808 node->aux = NULL;
1812 static void
1813 omp_requires_to_name (char *buf, size_t size, HOST_WIDE_INT requires_mask)
1815 char *end = buf + size, *p = buf;
1816 if (requires_mask & GOMP_REQUIRES_UNIFIED_ADDRESS)
1817 p += snprintf (p, end - p, "unified_address");
1818 if (requires_mask & GOMP_REQUIRES_UNIFIED_SHARED_MEMORY)
1819 p += snprintf (p, end - p, "%sunified_shared_memory",
1820 (p == buf ? "" : ", "));
1821 if (requires_mask & GOMP_REQUIRES_SELF_MAPS)
1822 p += snprintf (p, end - p, "%sself_maps",
1823 (p == buf ? "" : ", "));
1824 if (requires_mask & GOMP_REQUIRES_REVERSE_OFFLOAD)
1825 p += snprintf (p, end - p, "%sreverse_offload",
1826 (p == buf ? "" : ", "));
1829 /* Input function/variable tables that will allow libgomp to look up offload
1830 target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS. */
1832 void
1833 input_offload_tables (bool do_force_output)
1835 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1836 struct lto_file_decl_data *file_data;
1837 unsigned int j = 0;
1838 const char *requires_fn = NULL;
1839 tree requires_decl = NULL_TREE;
1841 omp_requires_mask = (omp_requires) 0;
1843 while ((file_data = file_data_vec[j++]))
1845 const char *data;
1846 size_t len;
1847 class lto_input_block *ib
1848 = lto_create_simple_input_block (file_data, LTO_section_offload_table,
1849 &data, &len);
1850 if (!ib)
1851 continue;
1853 tree tmp_decl = NULL_TREE;
1854 enum LTO_symtab_tags tag
1855 = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1856 while (tag)
1858 if (tag == LTO_symtab_unavail_node)
1860 tree fn_decl
1861 = lto_input_fn_decl_ref (ib, file_data);
1862 vec_safe_push (offload_funcs, fn_decl);
1864 /* Prevent IPA from removing fn_decl as unreachable, since there
1865 may be no refs from the parent function to child_fn in offload
1866 LTO mode. */
1867 if (do_force_output)
1868 cgraph_node::get (fn_decl)->mark_force_output ();
1869 tmp_decl = fn_decl;
1871 else if (tag == LTO_symtab_variable)
1873 tree var_decl
1874 = lto_input_var_decl_ref (ib, file_data);
1875 vec_safe_push (offload_vars, var_decl);
1877 /* Prevent IPA from removing var_decl as unused, since there
1878 may be no refs to var_decl in offload LTO mode. */
1879 if (do_force_output)
1880 varpool_node::get (var_decl)->force_output = 1;
1881 tmp_decl = var_decl;
1883 else if (tag == LTO_symtab_indirect_function)
1885 tree fn_decl
1886 = lto_input_fn_decl_ref (ib, file_data);
1887 vec_safe_push (offload_ind_funcs, fn_decl);
1889 /* Prevent IPA from removing fn_decl as unreachable, since there
1890 may be no refs from the parent function to child_fn in offload
1891 LTO mode. */
1892 if (do_force_output)
1893 cgraph_node::get (fn_decl)->mark_force_output ();
1894 tmp_decl = fn_decl;
1896 else if (tag == LTO_symtab_edge)
1898 static bool error_emitted = false;
1899 HOST_WIDE_INT val = streamer_read_hwi (ib);
1901 if (omp_requires_mask == 0)
1903 omp_requires_mask = (omp_requires) val;
1904 requires_decl = tmp_decl;
1905 requires_fn = file_data->file_name;
1907 else if (omp_requires_mask != val && !error_emitted)
1909 const char *fn1 = requires_fn;
1910 if (requires_decl != NULL_TREE)
1912 while (DECL_CONTEXT (requires_decl) != NULL_TREE
1913 && TREE_CODE (requires_decl) != TRANSLATION_UNIT_DECL)
1914 requires_decl = DECL_CONTEXT (requires_decl);
1915 if (requires_decl != NULL_TREE)
1916 fn1 = IDENTIFIER_POINTER (DECL_NAME (requires_decl));
1919 const char *fn2 = file_data->file_name;
1920 if (tmp_decl != NULL_TREE)
1922 while (DECL_CONTEXT (tmp_decl) != NULL_TREE
1923 && TREE_CODE (tmp_decl) != TRANSLATION_UNIT_DECL)
1924 tmp_decl = DECL_CONTEXT (tmp_decl);
1925 if (tmp_decl != NULL_TREE)
1926 fn2 = IDENTIFIER_POINTER (DECL_NAME (tmp_decl));
1928 if (fn1 == fn2)
1930 fn1 = requires_fn;
1931 fn2 = file_data->file_name;
1934 char buf1[sizeof ("unified_address, unified_shared_memory, "
1935 "reverse_offload")];
1936 char buf2[sizeof ("unified_address, unified_shared_memory, "
1937 "reverse_offload")];
1938 omp_requires_to_name (buf2, sizeof (buf2),
1939 val != OMP_REQUIRES_TARGET_USED
1940 ? val
1941 : (HOST_WIDE_INT) omp_requires_mask);
1942 if (val != OMP_REQUIRES_TARGET_USED
1943 && omp_requires_mask != OMP_REQUIRES_TARGET_USED)
1945 omp_requires_to_name (buf1, sizeof (buf1),
1946 omp_requires_mask);
1947 error ("OpenMP %<requires%> directive with non-identical "
1948 "clauses in multiple compilation units: %qs vs. "
1949 "%qs", buf1, buf2);
1950 inform (UNKNOWN_LOCATION, "%qs has %qs", fn1, buf1);
1951 inform (UNKNOWN_LOCATION, "%qs has %qs", fn2, buf2);
1953 else
1955 error ("OpenMP %<requires%> directive with %qs specified "
1956 "only in some compilation units", buf2);
1957 inform (UNKNOWN_LOCATION, "%qs has %qs",
1958 val != OMP_REQUIRES_TARGET_USED ? fn2 : fn1,
1959 buf2);
1960 inform (UNKNOWN_LOCATION, "but %qs has not",
1961 val != OMP_REQUIRES_TARGET_USED ? fn1 : fn2);
1963 error_emitted = true;
1966 else
1967 fatal_error (input_location,
1968 "invalid offload table in %s", file_data->file_name);
1970 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1973 lto_destroy_simple_input_block (file_data, LTO_section_offload_table,
1974 ib, data, len);
1976 #ifdef ACCEL_COMPILER
1977 char *omp_requires_file = getenv ("GCC_OFFLOAD_OMP_REQUIRES_FILE");
1978 if (omp_requires_file == NULL || omp_requires_file[0] == '\0')
1979 fatal_error (input_location, "GCC_OFFLOAD_OMP_REQUIRES_FILE unset");
1980 FILE *f = fopen (omp_requires_file, "wb");
1981 if (!f)
1982 fatal_error (input_location, "Cannot open omp_requires file %qs",
1983 omp_requires_file);
1984 uint32_t req_mask = omp_requires_mask;
1985 fwrite (&req_mask, sizeof (req_mask), 1, f);
1986 fclose (f);
1987 #endif
1990 /* True when we need optimization summary for NODE. */
1992 static int
1993 output_cgraph_opt_summary_p (struct cgraph_node *node)
1995 if (node->clone_of || node->former_clone_of)
1996 return true;
1997 clone_info *info = clone_info::get (node);
1998 return info && (info->tree_map || info->param_adjustments);
2001 /* Output optimization summary for EDGE to OB. */
2002 static void
2003 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
2004 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
2008 /* Output optimization summary for NODE to OB. */
2010 static void
2011 output_node_opt_summary (struct output_block *ob,
2012 struct cgraph_node *node,
2013 lto_symtab_encoder_t encoder)
2015 struct ipa_replace_map *map;
2016 int i;
2017 struct cgraph_edge *e;
2019 /* TODO: Should this code be moved to ipa-param-manipulation? */
2020 struct bitpack_d bp;
2021 bp = bitpack_create (ob->main_stream);
2022 clone_info *info = clone_info::get (node);
2024 bp_pack_value (&bp, (info && info->param_adjustments != NULL), 1);
2025 streamer_write_bitpack (&bp);
2026 if (ipa_param_adjustments *adjustments
2027 = info ? info->param_adjustments : NULL)
2029 streamer_write_uhwi (ob, vec_safe_length (adjustments->m_adj_params));
2030 ipa_adjusted_param *adj;
2031 FOR_EACH_VEC_SAFE_ELT (adjustments->m_adj_params, i, adj)
2033 bp = bitpack_create (ob->main_stream);
2034 bp_pack_value (&bp, adj->base_index, IPA_PARAM_MAX_INDEX_BITS);
2035 bp_pack_value (&bp, adj->prev_clone_index, IPA_PARAM_MAX_INDEX_BITS);
2036 bp_pack_value (&bp, adj->op, 2);
2037 bp_pack_value (&bp, adj->param_prefix_index, 2);
2038 bp_pack_value (&bp, adj->prev_clone_adjustment, 1);
2039 bp_pack_value (&bp, adj->reverse, 1);
2040 bp_pack_value (&bp, adj->user_flag, 1);
2041 streamer_write_bitpack (&bp);
2042 if (adj->op == IPA_PARAM_OP_SPLIT
2043 || adj->op == IPA_PARAM_OP_NEW)
2045 stream_write_tree (ob, adj->type, true);
2046 if (adj->op == IPA_PARAM_OP_SPLIT)
2048 stream_write_tree (ob, adj->alias_ptr_type, true);
2049 streamer_write_uhwi (ob, adj->unit_offset);
2053 streamer_write_hwi (ob, adjustments->m_always_copy_start);
2054 bp = bitpack_create (ob->main_stream);
2055 bp_pack_value (&bp, info->param_adjustments->m_skip_return, 1);
2056 streamer_write_bitpack (&bp);
2059 streamer_write_uhwi (ob, info ? vec_safe_length (info->tree_map) : 0);
2060 if (info)
2061 FOR_EACH_VEC_SAFE_ELT (info->tree_map, i, map)
2063 streamer_write_uhwi (ob, map->parm_num);
2064 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
2065 stream_write_tree (ob, map->new_tree, true);
2068 if (lto_symtab_encoder_in_partition_p (encoder, node))
2070 for (e = node->callees; e; e = e->next_callee)
2071 output_edge_opt_summary (ob, e);
2072 for (e = node->indirect_calls; e; e = e->next_callee)
2073 output_edge_opt_summary (ob, e);
2077 /* Output optimization summaries stored in callgraph.
2078 At the moment it is the clone info structure. */
2080 static void
2081 output_cgraph_opt_summary (void)
2083 int i, n_nodes;
2084 lto_symtab_encoder_t encoder;
2085 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
2086 unsigned count = 0;
2088 ob->symbol = NULL;
2089 encoder = ob->decl_state->symtab_node_encoder;
2090 n_nodes = lto_symtab_encoder_size (encoder);
2091 for (i = 0; i < n_nodes; i++)
2093 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
2094 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2095 if (cnode && output_cgraph_opt_summary_p (cnode))
2096 count++;
2098 streamer_write_uhwi (ob, count);
2099 for (i = 0; i < n_nodes; i++)
2101 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
2102 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2103 if (cnode && output_cgraph_opt_summary_p (cnode))
2105 streamer_write_uhwi (ob, i);
2106 output_node_opt_summary (ob, cnode, encoder);
2109 produce_asm (ob);
2110 destroy_output_block (ob);
2113 /* Input optimisation summary of EDGE. */
2115 static void
2116 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
2117 class lto_input_block *ib_main ATTRIBUTE_UNUSED)
2121 /* Input optimisation summary of NODE. */
2123 static void
2124 input_node_opt_summary (struct cgraph_node *node,
2125 class lto_input_block *ib_main,
2126 class data_in *data_in)
2128 int i;
2129 int count;
2130 struct cgraph_edge *e;
2132 /* TODO: Should this code be moved to ipa-param-manipulation? */
2133 struct bitpack_d bp;
2134 bp = streamer_read_bitpack (ib_main);
2135 bool have_adjustments = bp_unpack_value (&bp, 1);
2136 clone_info *info = clone_info::get_create (node);
2138 if (have_adjustments)
2140 count = streamer_read_uhwi (ib_main);
2141 vec<ipa_adjusted_param, va_gc> *new_params = NULL;
2142 for (i = 0; i < count; i++)
2144 ipa_adjusted_param adj;
2145 memset (&adj, 0, sizeof (adj));
2146 bp = streamer_read_bitpack (ib_main);
2147 adj.base_index = bp_unpack_value (&bp, IPA_PARAM_MAX_INDEX_BITS);
2148 adj.prev_clone_index
2149 = bp_unpack_value (&bp, IPA_PARAM_MAX_INDEX_BITS);
2150 adj.op = (enum ipa_parm_op) bp_unpack_value (&bp, 2);
2151 adj.param_prefix_index = bp_unpack_value (&bp, 2);
2152 adj.prev_clone_adjustment = bp_unpack_value (&bp, 1);
2153 adj.reverse = bp_unpack_value (&bp, 1);
2154 adj.user_flag = bp_unpack_value (&bp, 1);
2155 if (adj.op == IPA_PARAM_OP_SPLIT
2156 || adj.op == IPA_PARAM_OP_NEW)
2158 adj.type = stream_read_tree (ib_main, data_in);
2159 if (adj.op == IPA_PARAM_OP_SPLIT)
2161 adj.alias_ptr_type = stream_read_tree (ib_main, data_in);
2162 adj.unit_offset = streamer_read_uhwi (ib_main);
2165 vec_safe_push (new_params, adj);
2167 int always_copy_start = streamer_read_hwi (ib_main);
2168 bp = streamer_read_bitpack (ib_main);
2169 bool skip_return = bp_unpack_value (&bp, 1);
2170 info->param_adjustments
2171 = (new (ggc_alloc <ipa_param_adjustments> ())
2172 ipa_param_adjustments (new_params, always_copy_start, skip_return));
2175 count = streamer_read_uhwi (ib_main);
2176 for (i = 0; i < count; i++)
2178 struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
2180 vec_safe_push (info->tree_map, map);
2181 map->parm_num = streamer_read_uhwi (ib_main);
2182 map->new_tree = stream_read_tree (ib_main, data_in);
2184 for (e = node->callees; e; e = e->next_callee)
2185 input_edge_opt_summary (e, ib_main);
2186 for (e = node->indirect_calls; e; e = e->next_callee)
2187 input_edge_opt_summary (e, ib_main);
2190 /* Read section in file FILE_DATA of length LEN with data DATA. */
2192 static void
2193 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
2194 const char *data, size_t len,
2195 vec<symtab_node *> nodes)
2197 const struct lto_function_header *header =
2198 (const struct lto_function_header *) data;
2199 const int cfg_offset = sizeof (struct lto_function_header);
2200 const int main_offset = cfg_offset + header->cfg_size;
2201 const int string_offset = main_offset + header->main_size;
2202 class data_in *data_in;
2203 unsigned int i;
2204 unsigned int count;
2206 lto_input_block ib_main ((const char *) data + main_offset,
2207 header->main_size, file_data);
2209 data_in =
2210 lto_data_in_create (file_data, (const char *) data + string_offset,
2211 header->string_size, vNULL);
2212 count = streamer_read_uhwi (&ib_main);
2214 for (i = 0; i < count; i++)
2216 int ref = streamer_read_uhwi (&ib_main);
2217 input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
2218 &ib_main, data_in);
2220 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
2221 len);
2222 lto_data_in_delete (data_in);
2225 /* Input optimization summary of cgraph. */
2227 static void
2228 input_cgraph_opt_summary (vec<symtab_node *> nodes)
2230 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2231 struct lto_file_decl_data *file_data;
2232 unsigned int j = 0;
2234 while ((file_data = file_data_vec[j++]))
2236 size_t len;
2237 const char *data
2238 = lto_get_summary_section_data (file_data, LTO_section_cgraph_opt_sum,
2239 &len);
2240 if (data)
2241 input_cgraph_opt_section (file_data, data, len, nodes);