tree-object-size: use size_for_offset in more cases
[official-gcc.git] / gcc / lto-streamer-in.cc
blob15181c3f5746bec39593e35ad4bde0d262e5898f
1 /* Read the GIMPLE representation from a file stream.
3 Copyright (C) 2009-2024 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "cfghooks.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "gimple-streamer.h"
35 #include "toplev.h"
36 #include "gimple-iterator.h"
37 #include "tree-cfg.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "tree-ssa.h"
41 #include "except.h"
42 #include "cgraph.h"
43 #include "cfgloop.h"
44 #include "debug.h"
45 #include "alloc-pool.h"
46 #include "toplev.h"
48 /* Allocator used to hold string slot entries for line map streaming. */
49 static struct object_allocator<struct string_slot> *string_slot_allocator;
51 /* The table to hold the file names. */
52 static hash_table<string_slot_hasher> *file_name_hash_table;
54 /* The table to hold the relative pathname prefixes. */
56 /* This obstack holds file names used in locators. Line map datastructures
57 points here and thus it needs to be kept allocated as long as linemaps
58 exists. */
59 static struct obstack file_name_obstack;
61 /* Map a pair of nul terminated strings where the first one can be
62 pointer compared, but the second can't, to another string. */
63 struct string_pair_map
65 const char *str1;
66 const char *str2;
67 const char *str3;
68 hashval_t hash;
69 bool prefix;
72 /* Allocator used to hold string pair map entries for line map streaming. */
73 static struct object_allocator<struct string_pair_map>
74 *string_pair_map_allocator;
76 struct string_pair_map_hasher : nofree_ptr_hash <string_pair_map>
78 static inline hashval_t hash (const string_pair_map *);
79 static inline bool equal (const string_pair_map *, const string_pair_map *);
82 inline hashval_t
83 string_pair_map_hasher::hash (const string_pair_map *spm)
85 return spm->hash;
88 inline bool
89 string_pair_map_hasher::equal (const string_pair_map *spm1,
90 const string_pair_map *spm2)
92 return (spm1->hash == spm2->hash
93 && spm1->str1 == spm2->str1
94 && spm1->prefix == spm2->prefix
95 && strcmp (spm1->str2, spm2->str2) == 0);
98 /* The table to hold the pairs of pathnames and corresponding
99 resulting pathname. Used for both mapping of get_src_pwd ()
100 and recorded source working directory to relative path prefix
101 from current working directory to the recorded one, and for
102 mapping of that relative path prefix and some relative path
103 to those concatenated. */
104 static hash_table<string_pair_map_hasher> *path_name_pair_hash_table;
107 /* Check that tag ACTUAL has one of the given values. NUM_TAGS is the
108 number of valid tag values to check. */
110 void
111 lto_tag_check_set (enum LTO_tags actual, int ntags, ...)
113 va_list ap;
114 int i;
116 va_start (ap, ntags);
117 for (i = 0; i < ntags; i++)
118 if ((unsigned) actual == va_arg (ap, unsigned))
120 va_end (ap);
121 return;
124 va_end (ap);
125 internal_error ("bytecode stream: unexpected tag %s", lto_tag_name (actual));
129 /* Read LENGTH bytes from STREAM to ADDR. */
131 void
132 lto_input_data_block (class lto_input_block *ib, void *addr, size_t length)
134 size_t i;
135 unsigned char *const buffer = (unsigned char *) addr;
137 for (i = 0; i < length; i++)
138 buffer[i] = streamer_read_uchar (ib);
141 /* Compute the relative path to get to DATA_WD (absolute directory name)
142 from CWD (another absolute directory name). E.g. for
143 DATA_WD of "/tmp/foo/bar" and CWD of "/tmp/baz/qux" return
144 "../../foo/bar". Returned string should be freed by the caller.
145 Return NULL if absolute file name needs to be used. */
147 static char *
148 relative_path_prefix (const char *data_wd, const char *cwd)
150 const char *d = data_wd;
151 const char *c = cwd;
152 #ifdef HAVE_DOS_BASED_FILE_SYSTEM
153 if (d[1] == ':')
155 if (!IS_DIR_SEPARATOR (d[2]))
156 return NULL;
157 if (c[0] == d[0] && c[1] == ':' && IS_DIR_SEPARATOR (c[2]))
159 c += 3;
160 d += 3;
162 else
163 return NULL;
165 else if (c[1] == ':')
166 return NULL;
167 #endif
170 while (IS_DIR_SEPARATOR (*d))
171 d++;
172 while (IS_DIR_SEPARATOR (*c))
173 c++;
174 size_t i;
175 for (i = 0; c[i] && !IS_DIR_SEPARATOR (c[i]) && c[i] == d[i]; i++)
177 if ((c[i] == '\0' || IS_DIR_SEPARATOR (c[i]))
178 && (d[i] == '\0' || IS_DIR_SEPARATOR (d[i])))
180 c += i;
181 d += i;
182 if (*c == '\0' || *d == '\0')
183 break;
185 else
186 break;
188 while (1);
189 size_t num_up = 0;
192 while (IS_DIR_SEPARATOR (*c))
193 c++;
194 if (*c == '\0')
195 break;
196 num_up++;
197 while (*c && !IS_DIR_SEPARATOR (*c))
198 c++;
200 while (1);
201 while (IS_DIR_SEPARATOR (*d))
202 d++;
203 size_t len = strlen (d);
204 if (len == 0 && num_up == 0)
205 return xstrdup (".");
206 char *ret = XNEWVEC (char, num_up * 3 + len + 1);
207 char *p = ret;
208 for (; num_up; num_up--)
210 const char dir_up[3] = { '.', '.', DIR_SEPARATOR };
211 memcpy (p, dir_up, 3);
212 p += 3;
214 memcpy (p, d, len + 1);
215 return ret;
218 /* Look up DATA_WD in hash table of relative prefixes. If found,
219 return relative path from CWD to DATA_WD from the hash table,
220 otherwise create it. */
222 static const char *
223 canon_relative_path_prefix (const char *data_wd, const char *cwd)
225 if (!IS_ABSOLUTE_PATH (data_wd) || !IS_ABSOLUTE_PATH (cwd))
226 return NULL;
228 if (!path_name_pair_hash_table)
230 path_name_pair_hash_table
231 = new hash_table<string_pair_map_hasher> (37);
232 string_pair_map_allocator
233 = new object_allocator <struct string_pair_map>
234 ("line map string pair map hash");
237 inchash::hash h;
238 h.add_ptr (cwd);
239 h.merge_hash (htab_hash_string (data_wd));
240 h.add_int (true);
242 string_pair_map s_slot;
243 s_slot.str1 = cwd;
244 s_slot.str2 = data_wd;
245 s_slot.str3 = NULL;
246 s_slot.hash = h.end ();
247 s_slot.prefix = true;
249 string_pair_map **slot
250 = path_name_pair_hash_table->find_slot (&s_slot, INSERT);
251 if (*slot == NULL)
253 /* Compute relative path from cwd directory to data_wd directory.
254 E.g. if cwd is /tmp/foo/bar and data_wd is /tmp/baz/qux ,
255 it will return ../../baz/qux . */
256 char *relative_path = relative_path_prefix (data_wd, cwd);
257 const char *relative = relative_path ? relative_path : data_wd;
258 size_t relative_len = strlen (relative);
259 gcc_assert (relative_len);
261 size_t data_wd_len = strlen (data_wd);
262 bool add_separator = false;
263 if (!IS_DIR_SEPARATOR (relative[relative_len - 1]))
264 add_separator = true;
266 size_t len = relative_len + 1 + data_wd_len + 1 + add_separator;
268 char *saved_string = XOBNEWVEC (&file_name_obstack, char, len);
269 struct string_pair_map *new_slot
270 = string_pair_map_allocator->allocate ();
271 memcpy (saved_string, data_wd, data_wd_len + 1);
272 memcpy (saved_string + data_wd_len + 1, relative, relative_len);
273 if (add_separator)
274 saved_string[len - 2] = DIR_SEPARATOR;
275 saved_string[len - 1] = '\0';
276 new_slot->str1 = cwd;
277 new_slot->str2 = saved_string;
278 new_slot->str3 = saved_string + data_wd_len + 1;
279 if (relative_len == 1 && relative[0] == '.')
280 new_slot->str3 = NULL;
281 new_slot->hash = s_slot.hash;
282 new_slot->prefix = true;
283 *slot = new_slot;
284 free (relative_path);
285 return new_slot->str3;
287 else
289 string_pair_map *old_slot = *slot;
290 return old_slot->str3;
294 /* Look up the pair of RELATIVE_PREFIX and STRING strings in a hash table.
295 If found, return the concatenation of those from the hash table,
296 otherwise concatenate them. */
298 static const char *
299 canon_relative_file_name (const char *relative_prefix, const char *string)
301 inchash::hash h;
302 h.add_ptr (relative_prefix);
303 h.merge_hash (htab_hash_string (string));
305 string_pair_map s_slot;
306 s_slot.str1 = relative_prefix;
307 s_slot.str2 = string;
308 s_slot.str3 = NULL;
309 s_slot.hash = h.end ();
310 s_slot.prefix = false;
312 string_pair_map **slot
313 = path_name_pair_hash_table->find_slot (&s_slot, INSERT);
314 if (*slot == NULL)
316 size_t relative_prefix_len = strlen (relative_prefix);
317 size_t string_len = strlen (string);
318 size_t len = relative_prefix_len + string_len + 1;
320 char *saved_string = XOBNEWVEC (&file_name_obstack, char, len);
321 struct string_pair_map *new_slot
322 = string_pair_map_allocator->allocate ();
323 memcpy (saved_string, relative_prefix, relative_prefix_len);
324 memcpy (saved_string + relative_prefix_len, string, string_len + 1);
325 new_slot->str1 = relative_prefix;
326 new_slot->str2 = saved_string + relative_prefix_len;
327 new_slot->str3 = saved_string;
328 new_slot->hash = s_slot.hash;
329 new_slot->prefix = false;
330 *slot = new_slot;
331 return new_slot->str3;
333 else
335 string_pair_map *old_slot = *slot;
336 return old_slot->str3;
340 /* Lookup STRING in file_name_hash_table. If found, return the existing
341 string, otherwise insert STRING as the canonical version.
342 If STRING is a relative pathname and RELATIVE_PREFIX is non-NULL, use
343 canon_relative_file_name instead. */
345 static const char *
346 canon_file_name (const char *relative_prefix, const char *string)
348 if (relative_prefix && !IS_ABSOLUTE_PATH (string))
349 return canon_relative_file_name (relative_prefix, string);
351 string_slot **slot;
352 struct string_slot s_slot;
353 size_t len = strlen (string);
355 s_slot.s = string;
356 s_slot.len = len;
358 slot = file_name_hash_table->find_slot (&s_slot, INSERT);
359 if (*slot == NULL)
361 char *saved_string;
362 struct string_slot *new_slot;
364 saved_string = XOBNEWVEC (&file_name_obstack, char, len + 1);
365 new_slot = string_slot_allocator->allocate ();
366 memcpy (saved_string, string, len + 1);
367 new_slot->s = saved_string;
368 new_slot->len = len;
369 *slot = new_slot;
370 return saved_string;
372 else
374 struct string_slot *old_slot = *slot;
375 return old_slot->s;
379 /* Pointer to currently alive instance of lto_location_cache. */
381 lto_location_cache *lto_location_cache::current_cache;
383 /* Sort locations in source order. Start with file from last application. */
386 lto_location_cache::cmp_loc (const void *pa, const void *pb)
388 const cached_location *a = ((const cached_location *)pa);
389 const cached_location *b = ((const cached_location *)pb);
390 const char *current_file = current_cache->current_file;
391 int current_line = current_cache->current_line;
393 if (a->file == current_file && b->file != current_file)
394 return -1;
395 if (a->file != current_file && b->file == current_file)
396 return 1;
397 if (a->file == current_file && b->file == current_file)
399 if (a->line == current_line && b->line != current_line)
400 return -1;
401 if (a->line != current_line && b->line == current_line)
402 return 1;
404 if (a->file != b->file)
405 return strcmp (a->file, b->file);
406 if (a->sysp != b->sysp)
407 return a->sysp ? 1 : -1;
408 if (a->line != b->line)
409 return a->line - b->line;
410 if (a->col != b->col)
411 return a->col - b->col;
412 if (a->discr != b->discr)
413 return a->discr - b->discr;
414 if ((a->block == NULL_TREE) != (b->block == NULL_TREE))
415 return a->block ? 1 : -1;
416 if (a->block)
418 if (BLOCK_NUMBER (a->block) < BLOCK_NUMBER (b->block))
419 return -1;
420 if (BLOCK_NUMBER (a->block) > BLOCK_NUMBER (b->block))
421 return 1;
423 return 0;
426 /* Apply all changes in location cache. Add locations into linemap and patch
427 trees. */
429 bool
430 lto_location_cache::apply_location_cache ()
432 static const char *prev_file;
433 if (!loc_cache.length ())
434 return false;
435 if (loc_cache.length () > 1)
436 loc_cache.qsort (cmp_loc);
438 for (unsigned int i = 0; i < loc_cache.length (); i++)
440 struct cached_location loc = loc_cache[i];
442 if (current_file != loc.file)
443 linemap_add (line_table, prev_file ? LC_RENAME : LC_ENTER,
444 loc.sysp, loc.file, loc.line);
445 else if (current_line != loc.line)
447 int max = loc.col;
449 for (unsigned int j = i + 1; j < loc_cache.length (); j++)
450 if (loc.file != loc_cache[j].file
451 || loc.line != loc_cache[j].line)
452 break;
453 else if (max < loc_cache[j].col)
454 max = loc_cache[j].col;
455 linemap_line_start (line_table, loc.line, max + 1);
457 gcc_assert (*loc.loc == BUILTINS_LOCATION + 1);
458 if (current_file != loc.file
459 || current_line != loc.line
460 || current_col != loc.col)
462 current_loc = linemap_position_for_column (line_table, loc.col);
463 if (loc.block)
464 current_loc = set_block (current_loc, loc.block);
465 if (loc.discr)
466 current_loc = location_with_discriminator (current_loc, loc.discr);
468 else if (current_block != loc.block)
470 if (loc.block)
471 current_loc = set_block (current_loc, loc.block);
472 else
473 current_loc = LOCATION_LOCUS (current_loc);
474 if (loc.discr)
475 current_loc = location_with_discriminator (current_loc, loc.discr);
477 else if (current_discr != loc.discr)
478 current_loc = location_with_discriminator (current_loc, loc.discr);
479 *loc.loc = current_loc;
480 current_line = loc.line;
481 prev_file = current_file = loc.file;
482 current_col = loc.col;
483 current_block = loc.block;
484 current_discr = loc.discr;
486 loc_cache.truncate (0);
487 accepted_length = 0;
488 return true;
491 /* Tree merging did not succeed; mark all changes in the cache as accepted. */
493 void
494 lto_location_cache::accept_location_cache ()
496 gcc_assert (current_cache == this);
497 accepted_length = loc_cache.length ();
500 /* Tree merging did succeed; throw away recent changes. */
502 void
503 lto_location_cache::revert_location_cache ()
505 loc_cache.truncate (accepted_length);
508 /* Read a location bitpack from bit pack BP and either update *LOC directly
509 or add it to the location cache. If IB is non-NULL, stream in a block
510 afterwards.
511 It is neccesary to call apply_location_cache to get *LOC updated. */
513 void
514 lto_location_cache::input_location_and_block (location_t *loc,
515 struct bitpack_d *bp,
516 class lto_input_block *ib,
517 class data_in *data_in)
519 static const char *stream_file;
520 static int stream_line;
521 static int stream_col;
522 static bool stream_sysp;
523 static tree stream_block;
524 static unsigned stream_discr;
525 static const char *stream_relative_path_prefix;
527 gcc_assert (current_cache == this);
529 *loc = bp_unpack_int_in_range (bp, "location", 0,
530 RESERVED_LOCATION_COUNT + 1);
532 if (*loc < RESERVED_LOCATION_COUNT)
534 if (ib)
536 bool block_change = bp_unpack_value (bp, 1);
537 if (block_change)
538 stream_block = stream_read_tree (ib, data_in);
539 if (stream_block)
540 *loc = set_block (*loc, stream_block);
542 return;
545 bool file_change = (*loc == RESERVED_LOCATION_COUNT + 1);
546 /* Keep value RESERVED_LOCATION_COUNT in *loc as linemap lookups will
547 ICE on it. */
548 *loc = RESERVED_LOCATION_COUNT;
549 bool line_change = bp_unpack_value (bp, 1);
550 bool column_change = bp_unpack_value (bp, 1);
551 bool discr_change = bp_unpack_value (bp, 1);
553 if (file_change)
555 bool pwd_change = bp_unpack_value (bp, 1);
556 if (pwd_change)
558 const char *pwd = bp_unpack_string (data_in, bp);
559 const char *src_pwd = get_src_pwd ();
560 if (strcmp (pwd, src_pwd) == 0)
561 stream_relative_path_prefix = NULL;
562 else
563 stream_relative_path_prefix
564 = canon_relative_path_prefix (pwd, src_pwd);
566 stream_file = canon_file_name (stream_relative_path_prefix,
567 bp_unpack_string (data_in, bp));
568 stream_sysp = bp_unpack_value (bp, 1);
571 if (line_change)
572 stream_line = bp_unpack_var_len_unsigned (bp);
574 if (column_change)
575 stream_col = bp_unpack_var_len_unsigned (bp);
577 if (discr_change)
578 stream_discr = bp_unpack_var_len_unsigned (bp);
580 tree block = NULL_TREE;
581 if (ib)
583 bool block_change = bp_unpack_value (bp, 1);
584 if (block_change)
585 stream_block = stream_read_tree (ib, data_in);
586 block = stream_block;
589 /* This optimization saves location cache operations during gimple
590 streaming. */
592 if (current_file == stream_file
593 && current_line == stream_line
594 && current_col == stream_col
595 && current_sysp == stream_sysp
596 && current_discr == stream_discr)
598 if (current_block == block)
599 *loc = current_loc;
600 else if (block)
601 *loc = set_block (current_loc, block);
602 else
603 *loc = LOCATION_LOCUS (current_loc);
604 return;
607 struct cached_location entry
608 = {stream_file, loc, stream_line, stream_col, stream_sysp, block, stream_discr};
609 loc_cache.safe_push (entry);
612 /* Read a location bitpack from bit pack BP and either update *LOC directly
613 or add it to the location cache.
614 It is neccesary to call apply_location_cache to get *LOC updated. */
616 void
617 lto_location_cache::input_location (location_t *loc, struct bitpack_d *bp,
618 class data_in *data_in)
620 return input_location_and_block (loc, bp, NULL, data_in);
623 /* Read a location bitpack from input block IB and either update *LOC directly
624 or add it to the location cache.
625 It is neccesary to call apply_location_cache to get *LOC updated. */
627 void
628 lto_input_location (location_t *loc, struct bitpack_d *bp,
629 class data_in *data_in)
631 data_in->location_cache.input_location (loc, bp, data_in);
634 /* Read a reference to a tree node from DATA_IN using input block IB.
635 TAG is the expected node that should be found in IB, if TAG belongs
636 to one of the indexable trees, expect to read a reference index to
637 be looked up in one of the symbol tables, otherwise read the pysical
638 representation of the tree using stream_read_tree. FN is the
639 function scope for the read tree. */
641 tree
642 lto_input_tree_ref (class lto_input_block *ib, class data_in *data_in,
643 struct function *fn, enum LTO_tags tag)
645 unsigned HOST_WIDE_INT ix_u;
646 tree result = NULL_TREE;
648 if (tag == LTO_ssa_name_ref)
650 ix_u = streamer_read_uhwi (ib);
651 result = (*SSANAMES (fn))[ix_u];
653 else
655 gcc_checking_assert (tag == LTO_global_stream_ref);
656 ix_u = streamer_read_uhwi (ib);
657 result = (*data_in->file_data->current_decl_state
658 ->streams[LTO_DECL_STREAM])[ix_u];
661 gcc_assert (result);
663 return result;
666 /* Read VAR_DECL reference to DATA from IB. */
668 tree
669 lto_input_var_decl_ref (lto_input_block *ib, lto_file_decl_data *file_data)
671 unsigned int ix_u = streamer_read_uhwi (ib);
672 tree result = (*file_data->current_decl_state
673 ->streams[LTO_DECL_STREAM])[ix_u];
674 gcc_assert (VAR_P (result));
675 return result;
678 /* Read VAR_DECL reference to DATA from IB. */
680 tree
681 lto_input_fn_decl_ref (lto_input_block *ib, lto_file_decl_data *file_data)
683 unsigned int ix_u = streamer_read_uhwi (ib);
684 tree result = (*file_data->current_decl_state
685 ->streams[LTO_DECL_STREAM])[ix_u];
686 gcc_assert (TREE_CODE (result) == FUNCTION_DECL);
687 return result;
691 /* Read and return a double-linked list of catch handlers from input
692 block IB, using descriptors in DATA_IN. */
694 static struct eh_catch_d *
695 lto_input_eh_catch_list (class lto_input_block *ib, class data_in *data_in,
696 eh_catch *last_p)
698 eh_catch first;
699 enum LTO_tags tag;
701 *last_p = first = NULL;
702 tag = streamer_read_record_start (ib);
703 while (tag)
705 tree list;
706 eh_catch n;
708 lto_tag_check_range (tag, LTO_eh_catch, LTO_eh_catch);
710 /* Read the catch node. */
711 n = ggc_cleared_alloc<eh_catch_d> ();
712 n->type_list = stream_read_tree (ib, data_in);
713 n->filter_list = stream_read_tree (ib, data_in);
714 n->label = stream_read_tree (ib, data_in);
716 /* Register all the types in N->FILTER_LIST. */
717 for (list = n->filter_list; list; list = TREE_CHAIN (list))
718 add_type_for_runtime (TREE_VALUE (list));
720 /* Chain N to the end of the list. */
721 if (*last_p)
722 (*last_p)->next_catch = n;
723 n->prev_catch = *last_p;
724 *last_p = n;
726 /* Set the head of the list the first time through the loop. */
727 if (first == NULL)
728 first = n;
730 tag = streamer_read_record_start (ib);
733 return first;
737 /* Read and return EH region IX from input block IB, using descriptors
738 in DATA_IN. */
740 static eh_region
741 input_eh_region (class lto_input_block *ib, class data_in *data_in, int ix)
743 enum LTO_tags tag;
744 eh_region r;
746 /* Read the region header. */
747 tag = streamer_read_record_start (ib);
748 if (tag == LTO_null)
749 return NULL;
751 r = ggc_cleared_alloc<eh_region_d> ();
752 r->index = streamer_read_hwi (ib);
754 gcc_assert (r->index == ix);
756 /* Read all the region pointers as region numbers. We'll fix up
757 the pointers once the whole array has been read. */
758 r->outer = (eh_region) (intptr_t) streamer_read_hwi (ib);
759 r->inner = (eh_region) (intptr_t) streamer_read_hwi (ib);
760 r->next_peer = (eh_region) (intptr_t) streamer_read_hwi (ib);
762 switch (tag)
764 case LTO_ert_cleanup:
765 r->type = ERT_CLEANUP;
766 break;
768 case LTO_ert_try:
770 struct eh_catch_d *last_catch;
771 r->type = ERT_TRY;
772 r->u.eh_try.first_catch = lto_input_eh_catch_list (ib, data_in,
773 &last_catch);
774 r->u.eh_try.last_catch = last_catch;
775 break;
778 case LTO_ert_allowed_exceptions:
780 tree l;
782 r->type = ERT_ALLOWED_EXCEPTIONS;
783 r->u.allowed.type_list = stream_read_tree (ib, data_in);
784 r->u.allowed.label = stream_read_tree (ib, data_in);
785 r->u.allowed.filter = streamer_read_uhwi (ib);
787 for (l = r->u.allowed.type_list; l ; l = TREE_CHAIN (l))
788 add_type_for_runtime (TREE_VALUE (l));
790 break;
792 case LTO_ert_must_not_throw:
794 r->type = ERT_MUST_NOT_THROW;
795 r->u.must_not_throw.failure_decl = stream_read_tree (ib, data_in);
796 bitpack_d bp = streamer_read_bitpack (ib);
797 stream_input_location (&r->u.must_not_throw.failure_loc,
798 &bp, data_in);
800 break;
802 default:
803 gcc_unreachable ();
806 r->landing_pads = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
808 return r;
812 /* Read and return EH landing pad IX from input block IB, using descriptors
813 in DATA_IN. */
815 static eh_landing_pad
816 input_eh_lp (class lto_input_block *ib, class data_in *data_in, int ix)
818 enum LTO_tags tag;
819 eh_landing_pad lp;
821 /* Read the landing pad header. */
822 tag = streamer_read_record_start (ib);
823 if (tag == LTO_null)
824 return NULL;
826 lto_tag_check_range (tag, LTO_eh_landing_pad, LTO_eh_landing_pad);
828 lp = ggc_cleared_alloc<eh_landing_pad_d> ();
829 lp->index = streamer_read_hwi (ib);
830 gcc_assert (lp->index == ix);
831 lp->next_lp = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
832 lp->region = (eh_region) (intptr_t) streamer_read_hwi (ib);
833 lp->post_landing_pad = stream_read_tree (ib, data_in);
835 return lp;
839 /* After reading the EH regions, pointers to peer and children regions
840 are region numbers. This converts all these region numbers into
841 real pointers into the rematerialized regions for FN. ROOT_REGION
842 is the region number for the root EH region in FN. */
844 static void
845 fixup_eh_region_pointers (struct function *fn, HOST_WIDE_INT root_region)
847 unsigned i;
848 vec<eh_region, va_gc> *eh_array = fn->eh->region_array;
849 vec<eh_landing_pad, va_gc> *lp_array = fn->eh->lp_array;
850 eh_region r;
851 eh_landing_pad lp;
853 gcc_assert (eh_array && lp_array);
855 gcc_assert (root_region >= 0);
856 fn->eh->region_tree = (*eh_array)[root_region];
858 #define FIXUP_EH_REGION(r) (r) = (*eh_array)[(HOST_WIDE_INT) (intptr_t) (r)]
859 #define FIXUP_EH_LP(p) (p) = (*lp_array)[(HOST_WIDE_INT) (intptr_t) (p)]
861 /* Convert all the index numbers stored in pointer fields into
862 pointers to the corresponding slots in the EH region array. */
863 FOR_EACH_VEC_ELT (*eh_array, i, r)
865 /* The array may contain NULL regions. */
866 if (r == NULL)
867 continue;
869 gcc_assert (i == (unsigned) r->index);
870 FIXUP_EH_REGION (r->outer);
871 FIXUP_EH_REGION (r->inner);
872 FIXUP_EH_REGION (r->next_peer);
873 FIXUP_EH_LP (r->landing_pads);
876 /* Convert all the index numbers stored in pointer fields into
877 pointers to the corresponding slots in the EH landing pad array. */
878 FOR_EACH_VEC_ELT (*lp_array, i, lp)
880 /* The array may contain NULL landing pads. */
881 if (lp == NULL)
882 continue;
884 gcc_assert (i == (unsigned) lp->index);
885 FIXUP_EH_LP (lp->next_lp);
886 FIXUP_EH_REGION (lp->region);
889 #undef FIXUP_EH_REGION
890 #undef FIXUP_EH_LP
894 /* Initialize EH support. */
896 void
897 lto_init_eh (void)
899 static bool eh_initialized_p = false;
901 if (eh_initialized_p)
902 return;
904 /* Contrary to most other FEs, we only initialize EH support when at
905 least one of the files in the set contains exception regions in
906 it. Since this happens much later than the call to init_eh in
907 lang_dependent_init, we have to set flag_exceptions and call
908 init_eh again to initialize the EH tables. */
909 flag_exceptions = 1;
910 init_eh ();
912 eh_initialized_p = true;
916 /* Read the exception table for FN from IB using the data descriptors
917 in DATA_IN. */
919 static void
920 input_eh_regions (class lto_input_block *ib, class data_in *data_in,
921 struct function *fn)
923 HOST_WIDE_INT i, root_region, len;
924 enum LTO_tags tag;
926 tag = streamer_read_record_start (ib);
927 if (tag == LTO_null)
928 return;
930 lto_tag_check_range (tag, LTO_eh_table, LTO_eh_table);
932 gcc_assert (fn->eh);
934 root_region = streamer_read_hwi (ib);
935 gcc_assert (root_region == (int) root_region);
937 /* Read the EH region array. */
938 len = streamer_read_hwi (ib);
939 gcc_assert (len == (int) len);
940 if (len > 0)
942 vec_safe_grow_cleared (fn->eh->region_array, len, true);
943 for (i = 0; i < len; i++)
945 eh_region r = input_eh_region (ib, data_in, i);
946 (*fn->eh->region_array)[i] = r;
950 /* Read the landing pads. */
951 len = streamer_read_hwi (ib);
952 gcc_assert (len == (int) len);
953 if (len > 0)
955 vec_safe_grow_cleared (fn->eh->lp_array, len, true);
956 for (i = 0; i < len; i++)
958 eh_landing_pad lp = input_eh_lp (ib, data_in, i);
959 (*fn->eh->lp_array)[i] = lp;
963 /* Read the runtime type data. */
964 len = streamer_read_hwi (ib);
965 gcc_assert (len == (int) len);
966 if (len > 0)
968 vec_safe_grow_cleared (fn->eh->ttype_data, len, true);
969 for (i = 0; i < len; i++)
971 tree ttype = stream_read_tree (ib, data_in);
972 (*fn->eh->ttype_data)[i] = ttype;
976 /* Read the table of action chains. */
977 len = streamer_read_hwi (ib);
978 gcc_assert (len == (int) len);
979 if (len > 0)
981 if (targetm.arm_eabi_unwinder)
983 vec_safe_grow_cleared (fn->eh->ehspec_data.arm_eabi, len, true);
984 for (i = 0; i < len; i++)
986 tree t = stream_read_tree (ib, data_in);
987 (*fn->eh->ehspec_data.arm_eabi)[i] = t;
990 else
992 vec_safe_grow_cleared (fn->eh->ehspec_data.other, len, true);
993 for (i = 0; i < len; i++)
995 uchar c = streamer_read_uchar (ib);
996 (*fn->eh->ehspec_data.other)[i] = c;
1001 /* Reconstruct the EH region tree by fixing up the peer/children
1002 pointers. */
1003 fixup_eh_region_pointers (fn, root_region);
1005 tag = streamer_read_record_start (ib);
1006 lto_tag_check_range (tag, LTO_null, LTO_null);
1010 /* Make a new basic block with index INDEX in function FN. */
1012 static basic_block
1013 make_new_block (struct function *fn, unsigned int index)
1015 basic_block bb = alloc_block ();
1016 bb->index = index;
1017 SET_BASIC_BLOCK_FOR_FN (fn, index, bb);
1018 n_basic_blocks_for_fn (fn)++;
1019 return bb;
1023 /* Read the CFG for function FN from input block IB. */
1025 static void
1026 input_cfg (class lto_input_block *ib, class data_in *data_in,
1027 struct function *fn)
1029 unsigned int bb_count;
1030 basic_block p_bb;
1031 unsigned int i;
1032 int index;
1033 bool full_profile = false;
1035 init_empty_tree_cfg_for_function (fn);
1037 profile_status_for_fn (fn) = streamer_read_enum (ib, profile_status_d,
1038 PROFILE_LAST);
1040 bb_count = streamer_read_uhwi (ib);
1042 last_basic_block_for_fn (fn) = bb_count;
1043 if (bb_count > basic_block_info_for_fn (fn)->length ())
1044 vec_safe_grow_cleared (basic_block_info_for_fn (fn), bb_count, true);
1046 if (bb_count > label_to_block_map_for_fn (fn)->length ())
1047 vec_safe_grow_cleared (label_to_block_map_for_fn (fn), bb_count, true);
1049 index = streamer_read_hwi (ib);
1050 while (index != -1)
1052 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
1053 unsigned int edge_count;
1055 if (bb == NULL)
1056 bb = make_new_block (fn, index);
1058 edge_count = streamer_read_uhwi (ib);
1060 /* Connect up the CFG. */
1061 for (i = 0; i < edge_count; i++)
1063 bitpack_d bp = streamer_read_bitpack (ib);
1064 unsigned int dest_index = bp_unpack_var_len_unsigned (&bp);
1065 unsigned int edge_flags = bp_unpack_var_len_unsigned (&bp);
1066 basic_block dest = BASIC_BLOCK_FOR_FN (fn, dest_index);
1068 if (dest == NULL)
1069 dest = make_new_block (fn, dest_index);
1071 edge e = make_edge (bb, dest, edge_flags);
1072 data_in->location_cache.input_location_and_block (&e->goto_locus,
1073 &bp, ib, data_in);
1074 e->probability = profile_probability::stream_in (ib);
1075 if (!e->probability.initialized_p ())
1076 full_profile = false;
1080 index = streamer_read_hwi (ib);
1083 p_bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
1084 index = streamer_read_hwi (ib);
1085 while (index != -1)
1087 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
1088 bb->prev_bb = p_bb;
1089 p_bb->next_bb = bb;
1090 p_bb = bb;
1091 index = streamer_read_hwi (ib);
1094 /* ??? The cfgloop interface is tied to cfun. */
1095 gcc_assert (cfun == fn);
1097 /* Input the loop tree. */
1098 unsigned n_loops = streamer_read_uhwi (ib);
1099 if (n_loops == 0)
1100 return;
1102 struct loops *loops = ggc_cleared_alloc<struct loops> ();
1103 init_loops_structure (fn, loops, n_loops);
1104 set_loops_for_fn (fn, loops);
1106 /* Input each loop and associate it with its loop header so
1107 flow_loops_find can rebuild the loop tree. */
1108 for (unsigned i = 1; i < n_loops; ++i)
1110 int header_index = streamer_read_hwi (ib);
1111 if (header_index == -1)
1113 loops->larray->quick_push (NULL);
1114 continue;
1117 class loop *loop = alloc_loop ();
1118 loop->header = BASIC_BLOCK_FOR_FN (fn, header_index);
1119 loop->header->loop_father = loop;
1121 /* Read everything copy_loop_info copies. */
1122 loop->estimate_state = streamer_read_enum (ib, loop_estimation, EST_LAST);
1123 loop->any_upper_bound = streamer_read_hwi (ib);
1124 if (loop->any_upper_bound)
1125 loop->nb_iterations_upper_bound
1126 = bound_wide_int::from (streamer_read_widest_int (ib), SIGNED);
1127 loop->any_likely_upper_bound = streamer_read_hwi (ib);
1128 if (loop->any_likely_upper_bound)
1129 loop->nb_iterations_likely_upper_bound
1130 = bound_wide_int::from (streamer_read_widest_int (ib), SIGNED);
1131 loop->any_estimate = streamer_read_hwi (ib);
1132 if (loop->any_estimate)
1133 loop->nb_iterations_estimate
1134 = bound_wide_int::from (streamer_read_widest_int (ib), SIGNED);
1136 /* Read OMP SIMD related info. */
1137 loop->safelen = streamer_read_hwi (ib);
1138 loop->unroll = streamer_read_hwi (ib);
1139 loop->owned_clique = streamer_read_hwi (ib);
1140 loop->dont_vectorize = streamer_read_hwi (ib);
1141 loop->force_vectorize = streamer_read_hwi (ib);
1142 loop->finite_p = streamer_read_hwi (ib);
1143 loop->simduid = stream_read_tree (ib, data_in);
1145 place_new_loop (fn, loop);
1147 /* flow_loops_find doesn't like loops not in the tree, hook them
1148 all as siblings of the tree root temporarily. */
1149 flow_loop_tree_node_add (loops->tree_root, loop);
1152 /* Rebuild the loop tree. */
1153 flow_loops_find (loops);
1154 cfun->cfg->full_profile = full_profile;
1158 /* Read the SSA names array for function FN from DATA_IN using input
1159 block IB. */
1161 static void
1162 input_ssa_names (class lto_input_block *ib, class data_in *data_in,
1163 struct function *fn)
1165 unsigned int i, size;
1167 size = streamer_read_uhwi (ib);
1168 init_tree_ssa (fn, size);
1169 cfun->gimple_df->in_ssa_p = true;
1170 init_ssa_operands (fn);
1172 i = streamer_read_uhwi (ib);
1173 while (i)
1175 tree ssa_name, name;
1176 bool is_default_def;
1178 /* Skip over the elements that had been freed. */
1179 while (SSANAMES (fn)->length () < i)
1180 SSANAMES (fn)->quick_push (NULL_TREE);
1182 is_default_def = (streamer_read_uchar (ib) != 0);
1183 name = stream_read_tree (ib, data_in);
1184 ssa_name = make_ssa_name_fn (fn, name, NULL);
1186 if (is_default_def)
1188 set_ssa_default_def (cfun, SSA_NAME_VAR (ssa_name), ssa_name);
1189 SSA_NAME_DEF_STMT (ssa_name) = gimple_build_nop ();
1192 i = streamer_read_uhwi (ib);
1197 /* Go through all NODE edges and fixup call_stmt pointers
1198 so they point to STMTS. */
1200 static void
1201 fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple **stmts,
1202 struct function *fn)
1204 #define STMT_UID_NOT_IN_RANGE(uid) \
1205 (gimple_stmt_max_uid (fn) < uid || uid == 0)
1207 struct cgraph_edge *cedge;
1208 struct ipa_ref *ref = NULL;
1209 unsigned int i;
1211 for (cedge = node->callees; cedge; cedge = cedge->next_callee)
1213 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
1214 fatal_error (input_location,
1215 "Cgraph edge statement index out of range");
1216 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
1217 cedge->lto_stmt_uid = 0;
1218 if (!cedge->call_stmt)
1219 fatal_error (input_location,
1220 "Cgraph edge statement index not found");
1222 for (cedge = node->indirect_calls; cedge; cedge = cedge->next_callee)
1224 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
1225 fatal_error (input_location,
1226 "Cgraph edge statement index out of range");
1227 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
1228 cedge->lto_stmt_uid = 0;
1229 if (!cedge->call_stmt)
1230 fatal_error (input_location, "Cgraph edge statement index not found");
1232 for (i = 0; node->iterate_reference (i, ref); i++)
1233 if (ref->lto_stmt_uid)
1235 if (STMT_UID_NOT_IN_RANGE (ref->lto_stmt_uid))
1236 fatal_error (input_location,
1237 "Reference statement index out of range");
1238 ref->stmt = stmts[ref->lto_stmt_uid - 1];
1239 ref->lto_stmt_uid = 0;
1240 if (!ref->stmt)
1241 fatal_error (input_location, "Reference statement index not found");
1246 /* Fixup call_stmt pointers in NODE and all clones. */
1248 static void
1249 fixup_call_stmt_edges (struct cgraph_node *orig, gimple **stmts)
1251 struct cgraph_node *node;
1252 struct function *fn;
1254 while (orig->clone_of)
1255 orig = orig->clone_of;
1256 fn = DECL_STRUCT_FUNCTION (orig->decl);
1258 if (!orig->thunk)
1259 fixup_call_stmt_edges_1 (orig, stmts, fn);
1260 if (orig->clones)
1261 for (node = orig->clones; node != orig;)
1263 if (!node->thunk)
1264 fixup_call_stmt_edges_1 (node, stmts, fn);
1265 if (node->clones)
1266 node = node->clones;
1267 else if (node->next_sibling_clone)
1268 node = node->next_sibling_clone;
1269 else
1271 while (node != orig && !node->next_sibling_clone)
1272 node = node->clone_of;
1273 if (node != orig)
1274 node = node->next_sibling_clone;
1280 /* Input the base body of struct function FN from DATA_IN
1281 using input block IB. */
1283 static void
1284 input_struct_function_base (struct function *fn, class data_in *data_in,
1285 class lto_input_block *ib)
1287 struct bitpack_d bp;
1288 int len;
1290 /* Read the static chain and non-local goto save area. */
1291 fn->static_chain_decl = stream_read_tree (ib, data_in);
1292 fn->nonlocal_goto_save_area = stream_read_tree (ib, data_in);
1294 /* Read all the local symbols. */
1295 len = streamer_read_hwi (ib);
1296 if (len > 0)
1298 int i;
1299 vec_safe_grow_cleared (fn->local_decls, len, true);
1300 for (i = 0; i < len; i++)
1302 tree t = stream_read_tree (ib, data_in);
1303 (*fn->local_decls)[i] = t;
1307 /* Input the current IL state of the function. */
1308 fn->curr_properties = streamer_read_uhwi (ib);
1310 /* Read all the attributes for FN. */
1311 bp = streamer_read_bitpack (ib);
1312 fn->is_thunk = bp_unpack_value (&bp, 1);
1313 fn->has_local_explicit_reg_vars = bp_unpack_value (&bp, 1);
1314 fn->returns_pcc_struct = bp_unpack_value (&bp, 1);
1315 fn->returns_struct = bp_unpack_value (&bp, 1);
1316 fn->can_throw_non_call_exceptions = bp_unpack_value (&bp, 1);
1317 fn->can_delete_dead_exceptions = bp_unpack_value (&bp, 1);
1318 fn->always_inline_functions_inlined = bp_unpack_value (&bp, 1);
1319 fn->after_inlining = bp_unpack_value (&bp, 1);
1320 fn->stdarg = bp_unpack_value (&bp, 1);
1321 fn->has_nonlocal_label = bp_unpack_value (&bp, 1);
1322 fn->has_forced_label_in_static = bp_unpack_value (&bp, 1);
1323 fn->calls_alloca = bp_unpack_value (&bp, 1);
1324 fn->calls_setjmp = bp_unpack_value (&bp, 1);
1325 fn->calls_eh_return = bp_unpack_value (&bp, 1);
1326 fn->has_force_vectorize_loops = bp_unpack_value (&bp, 1);
1327 fn->has_simduid_loops = bp_unpack_value (&bp, 1);
1328 fn->has_musttail = bp_unpack_value (&bp, 1);
1329 fn->has_unroll = bp_unpack_value (&bp, 1);
1330 fn->assume_function = bp_unpack_value (&bp, 1);
1331 fn->va_list_fpr_size = bp_unpack_value (&bp, 8);
1332 fn->va_list_gpr_size = bp_unpack_value (&bp, 8);
1333 fn->last_clique = bp_unpack_value (&bp, sizeof (short) * 8);
1335 /* Input the function start and end loci. */
1336 stream_input_location (&fn->function_start_locus, &bp, data_in);
1337 stream_input_location (&fn->function_end_locus, &bp, data_in);
1339 /* Restore the instance discriminators if present. */
1340 int instance_number = bp_unpack_value (&bp, 1);
1341 if (instance_number)
1343 instance_number = bp_unpack_value (&bp, sizeof (int) * CHAR_BIT);
1344 maybe_create_decl_to_instance_map ()->put (fn->decl, instance_number);
1348 /* Read a chain of tree nodes from input block IB. DATA_IN contains
1349 tables and descriptors for the file being read. */
1351 static tree
1352 streamer_read_chain (class lto_input_block *ib, class data_in *data_in)
1354 tree first, prev, curr;
1356 /* The chain is written as NULL terminated list of trees. */
1357 first = prev = NULL_TREE;
1360 curr = stream_read_tree (ib, data_in);
1361 if (prev)
1362 TREE_CHAIN (prev) = curr;
1363 else
1364 first = curr;
1366 prev = curr;
1368 while (curr);
1370 return first;
1373 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1375 static void
1376 input_function (tree fn_decl, class data_in *data_in,
1377 class lto_input_block *ib, class lto_input_block *ib_cfg,
1378 cgraph_node *node)
1380 struct function *fn;
1381 enum LTO_tags tag;
1382 gimple **stmts;
1383 basic_block bb;
1385 tag = streamer_read_record_start (ib);
1386 lto_tag_check (tag, LTO_function);
1388 /* Read decls for parameters and args. */
1389 DECL_RESULT (fn_decl) = stream_read_tree (ib, data_in);
1390 DECL_ARGUMENTS (fn_decl) = streamer_read_chain (ib, data_in);
1392 /* Read debug args if available. */
1393 unsigned n_debugargs = streamer_read_uhwi (ib);
1394 if (n_debugargs)
1396 vec<tree, va_gc> **debugargs = decl_debug_args_insert (fn_decl);
1397 vec_safe_grow (*debugargs, n_debugargs, true);
1398 for (unsigned i = 0; i < n_debugargs; ++i)
1399 (**debugargs)[i] = stream_read_tree (ib, data_in);
1402 /* Read the tree of lexical scopes for the function. */
1403 DECL_INITIAL (fn_decl) = stream_read_tree (ib, data_in);
1404 unsigned block_leaf_count = streamer_read_uhwi (ib);
1405 while (block_leaf_count--)
1406 stream_read_tree (ib, data_in);
1408 if (!streamer_read_uhwi (ib))
1409 return;
1411 push_struct_function (fn_decl);
1412 fn = DECL_STRUCT_FUNCTION (fn_decl);
1414 gimple_register_cfg_hooks ();
1416 input_struct_function_base (fn, data_in, ib);
1417 input_cfg (ib_cfg, data_in, fn);
1419 /* Read all the SSA names. */
1420 input_ssa_names (ib, data_in, fn);
1422 /* Read the exception handling regions in the function. */
1423 input_eh_regions (ib, data_in, fn);
1425 gcc_assert (DECL_INITIAL (fn_decl));
1426 DECL_SAVED_TREE (fn_decl) = NULL_TREE;
1428 /* Read all the basic blocks. */
1429 tag = streamer_read_record_start (ib);
1430 while (tag)
1432 input_bb (ib, tag, data_in, fn,
1433 node->count_materialization_scale);
1434 tag = streamer_read_record_start (ib);
1437 /* Finalize gimple_location/gimple_block of stmts and phis. */
1438 data_in->location_cache.apply_location_cache ();
1440 /* Fix up the call statements that are mentioned in the callgraph
1441 edges. */
1442 set_gimple_stmt_max_uid (cfun, 0);
1443 FOR_ALL_BB_FN (bb, cfun)
1445 gimple_stmt_iterator gsi;
1446 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1448 gimple *stmt = gsi_stmt (gsi);
1449 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1451 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1453 gimple *stmt = gsi_stmt (gsi);
1454 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1457 stmts = (gimple **) xcalloc (gimple_stmt_max_uid (fn), sizeof (gimple *));
1458 FOR_ALL_BB_FN (bb, cfun)
1460 gimple_stmt_iterator bsi = gsi_start_phis (bb);
1461 while (!gsi_end_p (bsi))
1463 gimple *stmt = gsi_stmt (bsi);
1464 gsi_next (&bsi);
1465 stmts[gimple_uid (stmt)] = stmt;
1467 bsi = gsi_start_bb (bb);
1468 while (!gsi_end_p (bsi))
1470 gimple *stmt = gsi_stmt (bsi);
1471 bool remove = false;
1472 /* If we're recompiling LTO objects with debug stmts but
1473 we're not supposed to have debug stmts, remove them now.
1474 We can't remove them earlier because this would cause uid
1475 mismatches in fixups, but we can do it at this point, as
1476 long as debug stmts don't require fixups.
1477 Similarly remove all IFN_*SAN_* internal calls */
1478 if (!flag_wpa)
1480 if (is_gimple_debug (stmt)
1481 && (gimple_debug_nonbind_marker_p (stmt)
1482 ? !MAY_HAVE_DEBUG_MARKER_STMTS
1483 : !MAY_HAVE_DEBUG_BIND_STMTS))
1484 remove = true;
1485 /* In case the linemap overflows locations can be dropped
1486 to zero. Thus do not keep nonsensical inline entry markers
1487 we'd later ICE on. */
1488 tree block;
1489 if (gimple_debug_inline_entry_p (stmt)
1490 && (((block = gimple_block (stmt))
1491 && !inlined_function_outer_scope_p (block))
1492 || !debug_inline_points))
1493 remove = true;
1494 if (is_gimple_call (stmt)
1495 && gimple_call_internal_p (stmt))
1497 bool replace = false;
1498 switch (gimple_call_internal_fn (stmt))
1500 case IFN_UBSAN_NULL:
1501 if ((flag_sanitize
1502 & (SANITIZE_NULL | SANITIZE_ALIGNMENT)) == 0)
1503 replace = true;
1504 break;
1505 case IFN_UBSAN_BOUNDS:
1506 if ((flag_sanitize & SANITIZE_BOUNDS) == 0)
1507 replace = true;
1508 break;
1509 case IFN_UBSAN_VPTR:
1510 if ((flag_sanitize & SANITIZE_VPTR) == 0)
1511 replace = true;
1512 break;
1513 case IFN_UBSAN_OBJECT_SIZE:
1514 if ((flag_sanitize & SANITIZE_OBJECT_SIZE) == 0)
1515 replace = true;
1516 break;
1517 case IFN_UBSAN_PTR:
1518 if ((flag_sanitize & SANITIZE_POINTER_OVERFLOW) == 0)
1519 replace = true;
1520 break;
1521 case IFN_ASAN_MARK:
1522 if ((flag_sanitize & SANITIZE_ADDRESS) == 0)
1523 replace = true;
1524 break;
1525 case IFN_TSAN_FUNC_EXIT:
1526 if ((flag_sanitize & SANITIZE_THREAD) == 0)
1527 replace = true;
1528 break;
1529 default:
1530 break;
1532 if (replace)
1534 gimple_call_set_internal_fn (as_a <gcall *> (stmt),
1535 IFN_NOP);
1536 update_stmt (stmt);
1540 if (remove)
1542 gimple_stmt_iterator gsi = bsi;
1543 gsi_next (&bsi);
1544 unlink_stmt_vdef (stmt);
1545 release_defs (stmt);
1546 gsi_remove (&gsi, true);
1548 else
1550 gsi_next (&bsi);
1551 stmts[gimple_uid (stmt)] = stmt;
1553 /* Remember that the input function has begin stmt
1554 markers, so that we know to expect them when emitting
1555 debug info. */
1556 if (!cfun->debug_nonbind_markers
1557 && gimple_debug_nonbind_marker_p (stmt))
1558 cfun->debug_nonbind_markers = true;
1563 /* Set the gimple body to the statement sequence in the entry
1564 basic block. FIXME lto, this is fairly hacky. The existence
1565 of a gimple body is used by the cgraph routines, but we should
1566 really use the presence of the CFG. */
1568 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1569 gimple_set_body (fn_decl, bb_seq (ei_edge (ei)->dest));
1572 update_max_bb_count ();
1573 fixup_call_stmt_edges (node, stmts);
1574 execute_all_ipa_stmt_fixups (node, stmts);
1576 free_dominance_info (CDI_DOMINATORS);
1577 free_dominance_info (CDI_POST_DOMINATORS);
1578 free (stmts);
1579 pop_cfun ();
1582 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1584 static void
1585 input_constructor (tree var, class data_in *data_in,
1586 class lto_input_block *ib)
1588 DECL_INITIAL (var) = stream_read_tree (ib, data_in);
1592 /* Read the body from DATA for function NODE and fill it in.
1593 FILE_DATA are the global decls and types. SECTION_TYPE is either
1594 LTO_section_function_body or LTO_section_static_initializer. If
1595 section type is LTO_section_function_body, FN must be the decl for
1596 that function. */
1598 static void
1599 lto_read_body_or_constructor (struct lto_file_decl_data *file_data, struct symtab_node *node,
1600 const char *data, enum lto_section_type section_type)
1602 const struct lto_function_header *header;
1603 class data_in *data_in;
1604 int cfg_offset;
1605 int main_offset;
1606 int string_offset;
1607 tree fn_decl = node->decl;
1609 header = (const struct lto_function_header *) data;
1610 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1612 cfg_offset = sizeof (struct lto_function_header);
1613 main_offset = cfg_offset + header->cfg_size;
1614 string_offset = main_offset + header->main_size;
1616 else
1618 main_offset = sizeof (struct lto_function_header);
1619 string_offset = main_offset + header->main_size;
1622 data_in = lto_data_in_create (file_data, data + string_offset,
1623 header->string_size, vNULL);
1625 if (section_type == LTO_section_function_body)
1627 struct lto_in_decl_state *decl_state;
1628 unsigned from;
1630 gcc_checking_assert (node);
1632 /* Use the function's decl state. */
1633 decl_state = lto_get_function_in_decl_state (file_data, fn_decl);
1634 gcc_assert (decl_state);
1635 file_data->current_decl_state = decl_state;
1638 /* Set up the struct function. */
1639 from = data_in->reader_cache->nodes.length ();
1640 lto_input_block ib_main (data + main_offset, header->main_size,
1641 file_data);
1642 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1644 lto_input_block ib_cfg (data + cfg_offset, header->cfg_size,
1645 file_data);
1646 input_function (fn_decl, data_in, &ib_main, &ib_cfg,
1647 dyn_cast <cgraph_node *>(node));
1649 else
1650 input_constructor (fn_decl, data_in, &ib_main);
1651 data_in->location_cache.apply_location_cache ();
1652 /* And fixup types we streamed locally. */
1654 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1655 unsigned len = cache->nodes.length ();
1656 unsigned i;
1657 for (i = len; i-- > from;)
1659 tree t = streamer_tree_cache_get_tree (cache, i);
1660 if (t == NULL_TREE)
1661 continue;
1663 if (TYPE_P (t))
1665 gcc_assert (TYPE_STRUCTURAL_EQUALITY_P (t));
1666 if (type_with_alias_set_p (t)
1667 && canonical_type_used_p (t))
1668 TYPE_CANONICAL (t) = TYPE_MAIN_VARIANT (t);
1669 if (TYPE_MAIN_VARIANT (t) != t)
1671 gcc_assert (TYPE_NEXT_VARIANT (t) == NULL_TREE);
1672 TYPE_NEXT_VARIANT (t)
1673 = TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t));
1674 TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t)) = t;
1680 /* Restore decl state */
1681 file_data->current_decl_state = file_data->global_decl_state;
1684 lto_data_in_delete (data_in);
1688 /* Read the body of NODE using DATA. FILE_DATA holds the global
1689 decls and types. */
1691 void
1692 lto_input_function_body (struct lto_file_decl_data *file_data,
1693 struct cgraph_node *node, const char *data)
1695 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1698 /* Read the body of NODE using DATA. FILE_DATA holds the global
1699 decls and types. */
1701 void
1702 lto_input_variable_constructor (struct lto_file_decl_data *file_data,
1703 struct varpool_node *node, const char *data)
1705 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1709 /* Queue of acummulated decl -> DIE mappings. Similar to locations those
1710 are only applied to prevailing tree nodes during tree merging. */
1711 vec<dref_entry> dref_queue;
1713 /* Read the physical representation of a tree node EXPR from
1714 input block IB using the per-file context in DATA_IN. */
1716 static void
1717 lto_read_tree_1 (class lto_input_block *ib, class data_in *data_in, tree expr)
1719 /* Read all the bitfield values in EXPR. Note that for LTO, we
1720 only write language-independent bitfields, so no more unpacking is
1721 needed. */
1722 streamer_read_tree_bitfields (ib, data_in, expr);
1724 /* Read all the pointer fields in EXPR. */
1725 streamer_read_tree_body (ib, data_in, expr);
1727 /* Read any LTO-specific data not read by the tree streamer. Do not use
1728 stream_read_tree here since that flushes the dref_queue in mids of
1729 SCC reading. */
1730 if (DECL_P (expr)
1731 && TREE_CODE (expr) != FUNCTION_DECL
1732 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1733 DECL_INITIAL (expr)
1734 = lto_input_tree_1 (ib, data_in, streamer_read_record_start (ib), 0);
1736 /* Stream references to early generated DIEs. Keep in sync with the
1737 trees handled in dwarf2out_register_external_die. */
1738 if ((DECL_P (expr)
1739 && TREE_CODE (expr) != FIELD_DECL
1740 && TREE_CODE (expr) != DEBUG_EXPR_DECL
1741 && TREE_CODE (expr) != TYPE_DECL)
1742 || TREE_CODE (expr) == BLOCK)
1744 const char *str = streamer_read_string (data_in, ib);
1745 if (str)
1747 unsigned HOST_WIDE_INT off = streamer_read_uhwi (ib);
1748 dref_entry e = { expr, str, off };
1749 dref_queue.safe_push (e);
1751 /* When there's no early DIE to refer to but dwarf2out set up
1752 things in a way to expect that fixup. This tends to happen
1753 with -g1, see for example PR113488. */
1754 else if (DECL_P (expr) && DECL_ABSTRACT_ORIGIN (expr) == expr)
1755 DECL_ABSTRACT_ORIGIN (expr) = NULL_TREE;
1758 #ifdef ACCEL_COMPILER
1759 if ((VAR_P (expr)
1760 || TREE_CODE (expr) == PARM_DECL
1761 || TREE_CODE (expr) == FIELD_DECL)
1762 && DECL_MODE (expr) == VOIDmode)
1764 tree type = TREE_TYPE (expr);
1765 if (AGGREGATE_TYPE_P (type))
1766 SET_DECL_MODE (expr, TYPE_MODE (type));
1767 else if (VECTOR_TYPE_P (type))
1768 SET_DECL_MODE (expr, TYPE_MODE_RAW (type));
1771 if (VECTOR_TYPE_P (expr) && TYPE_MODE (expr) == VOIDmode)
1773 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (expr);
1774 tree innertype = TREE_TYPE (expr);
1775 machine_mode vmode
1776 = mode_for_vector (SCALAR_TYPE_MODE (innertype), nunits).else_blk ();
1777 SET_TYPE_MODE (expr, vmode);
1779 #endif
1782 /* Read the physical representation of a tree node with tag TAG from
1783 input block IB using the per-file context in DATA_IN. */
1785 static tree
1786 lto_read_tree (class lto_input_block *ib, class data_in *data_in,
1787 enum LTO_tags tag, hashval_t hash)
1789 /* Instantiate a new tree node. */
1790 tree result = streamer_alloc_tree (ib, data_in, tag);
1792 /* Enter RESULT in the reader cache. This will make RESULT
1793 available so that circular references in the rest of the tree
1794 structure can be resolved in subsequent calls to stream_read_tree. */
1795 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1797 lto_read_tree_1 (ib, data_in, result);
1799 return result;
1803 /* Populate the reader cache with trees materialized from the SCC
1804 following in the IB, DATA_IN stream.
1805 If SHARED_SCC is true we input LTO_tree_scc. */
1807 hashval_t
1808 lto_input_scc (class lto_input_block *ib, class data_in *data_in,
1809 unsigned *len, unsigned *entry_len, bool shared_scc)
1811 unsigned size = streamer_read_uhwi (ib);
1812 hashval_t scc_hash = 0;
1813 unsigned scc_entry_len = 1;
1815 if (shared_scc)
1817 if (size & 1)
1818 scc_entry_len = streamer_read_uhwi (ib);
1819 size /= 2;
1820 scc_hash = streamer_read_uhwi (ib);
1823 if (size == 1)
1825 enum LTO_tags tag = streamer_read_record_start (ib);
1826 lto_input_tree_1 (ib, data_in, tag, scc_hash);
1828 else
1830 unsigned int first = data_in->reader_cache->nodes.length ();
1831 tree result;
1833 /* Materialize size trees by reading their headers. */
1834 for (unsigned i = 0; i < size; ++i)
1836 enum LTO_tags tag = streamer_read_record_start (ib);
1837 if (tag == LTO_null
1838 || tag == LTO_global_stream_ref
1839 || tag == LTO_tree_pickle_reference
1840 || tag == LTO_integer_cst
1841 || tag == LTO_tree_scc
1842 || tag == LTO_trees)
1843 gcc_unreachable ();
1845 result = streamer_alloc_tree (ib, data_in, tag);
1846 streamer_tree_cache_append (data_in->reader_cache, result, 0);
1849 /* Read the tree bitpacks and references. */
1850 for (unsigned i = 0; i < size; ++i)
1852 result = streamer_tree_cache_get_tree (data_in->reader_cache,
1853 first + i);
1854 lto_read_tree_1 (ib, data_in, result);
1858 *len = size;
1859 *entry_len = scc_entry_len;
1860 return scc_hash;
1863 /* Read reference to tree from IB and DATA_IN.
1864 This is used for streaming tree bodies where we know that
1865 the tree is already in cache or is indexable and
1866 must be matched with stream_write_tree_ref. */
1868 tree
1869 stream_read_tree_ref (lto_input_block *ib, data_in *data_in)
1871 int ix = streamer_read_hwi (ib);
1872 if (!ix)
1873 return NULL_TREE;
1874 if (ix > 0)
1875 return streamer_tree_cache_get_tree (data_in->reader_cache, ix - 1);
1877 ix = -ix - 1;
1878 int id = ix & 1;
1879 ix /= 2;
1881 tree ret;
1882 if (!id)
1883 ret = (*data_in->file_data->current_decl_state
1884 ->streams[LTO_DECL_STREAM])[ix];
1885 else
1886 ret = (*SSANAMES (cfun))[ix];
1887 return ret;
1890 /* Read a tree from input block IB using the per-file context in
1891 DATA_IN. This context is used, for example, to resolve references
1892 to previously read nodes. */
1894 tree
1895 lto_input_tree_1 (class lto_input_block *ib, class data_in *data_in,
1896 enum LTO_tags tag, hashval_t hash)
1898 tree result;
1900 gcc_assert ((unsigned) tag < (unsigned) LTO_NUM_TAGS);
1902 if (tag == LTO_null)
1903 result = NULL_TREE;
1904 else if (tag == LTO_global_stream_ref || tag == LTO_ssa_name_ref)
1906 /* If TAG is a reference to an indexable tree, the next value
1907 in IB is the index into the table where we expect to find
1908 that tree. */
1909 result = lto_input_tree_ref (ib, data_in, cfun, tag);
1911 else if (tag == LTO_tree_pickle_reference)
1913 /* If TAG is a reference to a previously read tree, look it up in
1914 the reader cache. */
1915 result = streamer_get_pickled_tree (ib, data_in);
1917 else if (tag == LTO_integer_cst)
1919 /* For shared integer constants in singletons we can use the
1920 existing tree integer constant merging code. */
1921 tree type = stream_read_tree_ref (ib, data_in);
1922 unsigned HOST_WIDE_INT len = streamer_read_uhwi (ib);
1923 unsigned HOST_WIDE_INT i;
1924 HOST_WIDE_INT abuf[WIDE_INT_MAX_INL_ELTS], *a = abuf;
1926 if (UNLIKELY (len > WIDE_INT_MAX_INL_ELTS))
1927 a = XALLOCAVEC (HOST_WIDE_INT, len);
1928 for (i = 0; i < len; i++)
1929 a[i] = streamer_read_hwi (ib);
1930 gcc_assert (TYPE_PRECISION (type) <= WIDE_INT_MAX_PRECISION);
1931 result
1932 = wide_int_to_tree (type,
1933 wide_int::from_array (a, len,
1934 TYPE_PRECISION (type)));
1935 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1937 else if (tag == LTO_tree_scc || tag == LTO_trees)
1938 gcc_unreachable ();
1939 else
1941 /* Otherwise, materialize a new node from IB. */
1942 result = lto_read_tree (ib, data_in, tag, hash);
1945 return result;
1948 tree
1949 lto_input_tree (class lto_input_block *ib, class data_in *data_in)
1951 enum LTO_tags tag;
1953 /* Input pickled trees needed to stream in the reference. */
1954 while ((tag = streamer_read_record_start (ib)) == LTO_trees)
1956 unsigned len, entry_len;
1957 lto_input_scc (ib, data_in, &len, &entry_len, false);
1959 /* Register DECLs with the debuginfo machinery. */
1960 while (!dref_queue.is_empty ())
1962 dref_entry e = dref_queue.pop ();
1963 debug_hooks->register_external_die (e.decl, e.sym, e.off);
1966 tree t = lto_input_tree_1 (ib, data_in, tag, 0);
1968 if (!dref_queue.is_empty ())
1970 dref_entry e = dref_queue.pop ();
1971 debug_hooks->register_external_die (e.decl, e.sym, e.off);
1972 gcc_checking_assert (dref_queue.is_empty ());
1974 return t;
1978 /* Input toplevel asms. */
1980 void
1981 lto_input_toplevel_asms (struct lto_file_decl_data *file_data, int order_base)
1983 size_t len;
1984 const char *data
1985 = lto_get_summary_section_data (file_data, LTO_section_asm, &len);
1986 const struct lto_simple_header_with_strings *header
1987 = (const struct lto_simple_header_with_strings *) data;
1988 int string_offset;
1989 class data_in *data_in;
1990 tree str;
1992 if (! data)
1993 return;
1995 string_offset = sizeof (*header) + header->main_size;
1997 lto_input_block ib (data + sizeof (*header), header->main_size,
1998 file_data);
2000 data_in = lto_data_in_create (file_data, data + string_offset,
2001 header->string_size, vNULL);
2003 while ((str = streamer_read_string_cst (data_in, &ib)))
2005 asm_node *node = symtab->finalize_toplevel_asm (str);
2006 node->order = streamer_read_hwi (&ib) + order_base;
2007 if (node->order >= symtab->order)
2008 symtab->order = node->order + 1;
2011 lto_data_in_delete (data_in);
2013 lto_free_section_data (file_data, LTO_section_asm, NULL, data, len);
2017 /* Input mode table. */
2019 void
2020 lto_input_mode_table (struct lto_file_decl_data *file_data)
2022 size_t len;
2023 const char *data
2024 = lto_get_summary_section_data (file_data, LTO_section_mode_table, &len);
2025 if (! data)
2026 internal_error ("cannot read LTO mode table from %s",
2027 file_data->file_name);
2029 const struct lto_simple_header_with_strings *header
2030 = (const struct lto_simple_header_with_strings *) data;
2031 int string_offset;
2032 class data_in *data_in;
2033 string_offset = sizeof (*header) + header->main_size;
2035 lto_input_block ib (data + sizeof (*header), header->main_size, NULL);
2036 data_in = lto_data_in_create (file_data, data + string_offset,
2037 header->string_size, vNULL);
2038 bitpack_d bp = streamer_read_bitpack (&ib);
2040 #ifdef ACCEL_COMPILER
2041 host_num_poly_int_coeffs
2042 = bp_unpack_value (&bp, MAX_NUM_POLY_INT_COEFFS_BITS);
2043 #endif
2045 unsigned mode_bits = bp_unpack_value (&bp, 5);
2046 unsigned char *table = ggc_cleared_vec_alloc<unsigned char> (1 << mode_bits);
2048 file_data->mode_table = table;
2049 file_data->mode_bits = mode_bits;
2051 table[VOIDmode] = VOIDmode;
2052 table[BLKmode] = BLKmode;
2053 unsigned int m;
2054 while ((m = bp_unpack_value (&bp, mode_bits)) != VOIDmode)
2056 enum mode_class mclass
2057 = bp_unpack_enum (&bp, mode_class, MAX_MODE_CLASS);
2058 poly_uint16 size = bp_unpack_poly_value (&bp, 16);
2059 poly_uint16 prec = bp_unpack_poly_value (&bp, 16);
2060 machine_mode inner = (machine_mode) bp_unpack_value (&bp, mode_bits);
2061 poly_uint16 nunits = bp_unpack_poly_value (&bp, 16);
2062 unsigned int ibit = 0, fbit = 0;
2063 unsigned int real_fmt_len = 0;
2064 const char *real_fmt_name = NULL;
2065 switch (mclass)
2067 case MODE_FRACT:
2068 case MODE_UFRACT:
2069 case MODE_ACCUM:
2070 case MODE_UACCUM:
2071 ibit = bp_unpack_value (&bp, 8);
2072 fbit = bp_unpack_value (&bp, 8);
2073 break;
2074 case MODE_FLOAT:
2075 case MODE_DECIMAL_FLOAT:
2076 real_fmt_name = bp_unpack_indexed_string (data_in, &bp,
2077 &real_fmt_len);
2078 break;
2079 default:
2080 break;
2082 /* First search just the GET_CLASS_NARROWEST_MODE to wider modes,
2083 if not found, fallback to all modes. */
2084 int pass;
2085 for (pass = 0; pass < 2; pass++)
2086 for (machine_mode mr = pass ? VOIDmode
2087 : GET_CLASS_NARROWEST_MODE (mclass);
2088 pass ? mr < MAX_MACHINE_MODE : mr != VOIDmode;
2089 pass ? mr = (machine_mode) (mr + 1)
2090 : mr = GET_MODE_WIDER_MODE (mr).else_void ())
2091 if (GET_MODE_CLASS (mr) != mclass
2092 || maybe_ne (GET_MODE_SIZE (mr), size)
2093 || maybe_ne (GET_MODE_PRECISION (mr), prec)
2094 || (inner == m
2095 ? GET_MODE_INNER (mr) != mr
2096 : GET_MODE_INNER (mr) != table[(int) inner])
2097 || GET_MODE_IBIT (mr) != ibit
2098 || GET_MODE_FBIT (mr) != fbit
2099 || maybe_ne (GET_MODE_NUNITS (mr), nunits))
2100 continue;
2101 else if ((mclass == MODE_FLOAT || mclass == MODE_DECIMAL_FLOAT)
2102 && strcmp (REAL_MODE_FORMAT (mr)->name, real_fmt_name) != 0)
2103 continue;
2104 else
2106 table[m] = mr;
2107 pass = 2;
2108 break;
2110 unsigned int mname_len;
2111 const char *mname = bp_unpack_indexed_string (data_in, &bp, &mname_len);
2112 if (pass == 2)
2114 switch (mclass)
2116 case MODE_VECTOR_BOOL:
2117 case MODE_VECTOR_INT:
2118 case MODE_VECTOR_FLOAT:
2119 case MODE_VECTOR_FRACT:
2120 case MODE_VECTOR_UFRACT:
2121 case MODE_VECTOR_ACCUM:
2122 case MODE_VECTOR_UACCUM:
2123 /* Vector modes are recomputed on accel side and shouldn't have
2124 been streamed-out from host. */
2125 gcc_unreachable ();
2126 /* FALLTHRU */
2127 default:
2128 /* This is only used for offloading-target compilations and
2129 is a user-facing error. Give a better error message for
2130 the common modes; see also mode-classes.def. */
2131 if (mclass == MODE_FLOAT)
2132 fatal_error (UNKNOWN_LOCATION,
2133 "%s - %u-bit-precision floating-point numbers "
2134 "unsupported (mode %qs)", TARGET_MACHINE,
2135 prec.to_constant (), mname);
2136 else if (mclass == MODE_DECIMAL_FLOAT)
2137 fatal_error (UNKNOWN_LOCATION,
2138 "%s - %u-bit-precision decimal floating-point "
2139 "numbers unsupported (mode %qs)", TARGET_MACHINE,
2140 prec.to_constant (), mname);
2141 else if (mclass == MODE_COMPLEX_FLOAT)
2142 fatal_error (UNKNOWN_LOCATION,
2143 "%s - %u-bit-precision complex floating-point "
2144 "numbers unsupported (mode %qs)", TARGET_MACHINE,
2145 prec.to_constant (), mname);
2146 else if (mclass == MODE_INT)
2147 fatal_error (UNKNOWN_LOCATION,
2148 "%s - %u-bit integer numbers unsupported (mode "
2149 "%qs)", TARGET_MACHINE, prec.to_constant (), mname);
2150 else
2151 fatal_error (UNKNOWN_LOCATION, "%s - unsupported mode %qs",
2152 TARGET_MACHINE, mname);
2153 break;
2157 lto_data_in_delete (data_in);
2159 lto_free_section_data (file_data, LTO_section_mode_table, NULL, data, len);
2163 /* Initialization for the LTO reader. */
2165 void
2166 lto_reader_init (void)
2168 lto_streamer_init ();
2169 file_name_hash_table
2170 = new hash_table<string_slot_hasher> (37);
2171 string_slot_allocator = new object_allocator <struct string_slot>
2172 ("line map file name hash");
2173 gcc_obstack_init (&file_name_obstack);
2176 /* Free hash table used to stream in location file names. */
2178 void
2179 lto_free_file_name_hash (void)
2181 delete file_name_hash_table;
2182 file_name_hash_table = NULL;
2183 delete string_slot_allocator;
2184 string_slot_allocator = NULL;
2185 delete path_name_pair_hash_table;
2186 path_name_pair_hash_table = NULL;
2187 delete string_pair_map_allocator;
2188 string_pair_map_allocator = NULL;
2189 /* file_name_obstack must stay allocated since it is referred to by
2190 line map table. */
2194 /* Create a new data_in object for FILE_DATA. STRINGS is the string
2195 table to use with LEN strings. RESOLUTIONS is the vector of linker
2196 resolutions (NULL if not using a linker plugin). */
2198 class data_in *
2199 lto_data_in_create (struct lto_file_decl_data *file_data, const char *strings,
2200 unsigned len,
2201 vec<ld_plugin_symbol_resolution_t> resolutions)
2203 class data_in *data_in = new (class data_in);
2204 data_in->file_data = file_data;
2205 data_in->strings = strings;
2206 data_in->strings_len = len;
2207 data_in->globals_resolution = resolutions;
2208 data_in->reader_cache = streamer_tree_cache_create (false, false, true);
2209 return data_in;
2213 /* Remove DATA_IN. */
2215 void
2216 lto_data_in_delete (class data_in *data_in)
2218 data_in->globals_resolution.release ();
2219 streamer_tree_cache_delete (data_in->reader_cache);
2220 delete data_in;