OpenMP: Update documentation of metadirective implementation status.
[gcc.git] / gcc / final.cc
blob12c6eb0ac09c34107c80fbee2aa8c713eab0a35a
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987-2025 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This is the final pass of the compiler.
21 It looks at the rtl code for a function and outputs assembler code.
23 Call `final_start_function' to output the assembler code for function entry,
24 `final' to output assembler code for some RTL code,
25 `final_end_function' to output assembler code for function exit.
26 If a function is compiled in several pieces, each piece is
27 output separately with `final'.
29 Some optimizations are also done at this level.
30 Move instructions that were made unnecessary by good register allocation
31 are detected and omitted from the output. (Though most of these
32 are removed by the last jump pass.)
34 Instructions to set the condition codes are omitted when it can be
35 seen that the condition codes already had the desired values.
37 In some cases it is sufficient if the inherited condition codes
38 have related values, but this may require the following insn
39 (the one that tests the condition codes) to be modified.
41 The code for the function prologue and epilogue are generated
42 directly in assembler by the target functions function_prologue and
43 function_epilogue. Those instructions never exist as rtl. */
45 #include "config.h"
46 #define INCLUDE_ALGORITHM /* reverse */
47 #include "system.h"
48 #include "coretypes.h"
49 #include "backend.h"
50 #include "target.h"
51 #include "rtl.h"
52 #include "tree.h"
53 #include "cfghooks.h"
54 #include "df.h"
55 #include "memmodel.h"
56 #include "tm_p.h"
57 #include "insn-config.h"
58 #include "regs.h"
59 #include "emit-rtl.h"
60 #include "recog.h"
61 #include "cgraph.h"
62 #include "tree-pretty-print.h" /* for dump_function_header */
63 #include "varasm.h"
64 #include "insn-attr.h"
65 #include "conditions.h"
66 #include "flags.h"
67 #include "output.h"
68 #include "except.h"
69 #include "rtl-error.h"
70 #include "toplev.h" /* exact_log2, floor_log2 */
71 #include "reload.h"
72 #include "intl.h"
73 #include "cfgrtl.h"
74 #include "debug.h"
75 #include "tree-pass.h"
76 #include "tree-ssa.h"
77 #include "cfgloop.h"
78 #include "stringpool.h"
79 #include "attribs.h"
80 #include "asan.h"
81 #include "rtl-iter.h"
82 #include "print-rtl.h"
83 #include "function-abi.h"
84 #include "common/common-target.h"
85 #include "diagnostic.h"
87 #include "dwarf2out.h"
89 /* Most ports don't need to define CC_STATUS_INIT.
90 So define a null default for it to save conditionalization later. */
91 #ifndef CC_STATUS_INIT
92 #define CC_STATUS_INIT
93 #endif
95 /* Is the given character a logical line separator for the assembler? */
96 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
97 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
98 #endif
100 #ifndef JUMP_TABLES_IN_TEXT_SECTION
101 #define JUMP_TABLES_IN_TEXT_SECTION 0
102 #endif
104 /* Bitflags used by final_scan_insn. */
105 #define SEEN_NOTE 1
106 #define SEEN_EMITTED 2
107 #define SEEN_NEXT_VIEW 4
109 /* Last insn processed by final_scan_insn. */
110 static rtx_insn *debug_insn;
111 rtx_insn *current_output_insn;
113 /* Line number of last NOTE. */
114 static int last_linenum;
116 /* Column number of last NOTE. */
117 static int last_columnnum;
119 /* Discriminator written to assembly. */
120 static int last_discriminator;
122 /* Compute discriminator to be written to assembly for current instruction.
123 Note: actual usage depends on loc_discriminator_kind setting. */
124 static inline int compute_discriminator (location_t loc);
126 /* Highest line number in current block. */
127 static int high_block_linenum;
129 /* Likewise for function. */
130 static int high_function_linenum;
132 /* Filename of last NOTE. */
133 static const char *last_filename;
135 /* Override filename, line and column number. */
136 static const char *override_filename;
137 static int override_linenum;
138 static int override_columnnum;
139 static int override_discriminator;
141 /* Whether to force emission of a line note before the next insn. */
142 static bool force_source_line = false;
144 extern const int length_unit_log; /* This is defined in insn-attrtab.cc. */
146 /* Nonzero while outputting an `asm' with operands.
147 This means that inconsistencies are the user's fault, so don't die.
148 The precise value is the insn being output, to pass to error_for_asm. */
149 const rtx_insn *this_is_asm_operands;
151 /* Number of operands of this insn, for an `asm' with operands. */
152 unsigned int insn_noperands;
154 /* Compare optimization flag. */
156 static rtx last_ignored_compare = 0;
158 /* Assign a unique number to each insn that is output.
159 This can be used to generate unique local labels. */
161 static int insn_counter = 0;
163 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
165 static int block_depth;
167 /* True if have enabled APP processing of our assembler output. */
169 static bool app_on;
171 /* If we are outputting an insn sequence, this contains the sequence rtx.
172 Zero otherwise. */
174 rtx_sequence *final_sequence;
176 #ifdef ASSEMBLER_DIALECT
178 /* Number of the assembler dialect to use, starting at 0. */
179 static int dialect_number;
180 #endif
182 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
183 rtx current_insn_predicate;
185 /* True if printing into -fdump-final-insns= dump. */
186 bool final_insns_dump_p;
188 /* True if profile_function should be called, but hasn't been called yet. */
189 static bool need_profile_function;
191 static int asm_insn_count (rtx);
192 static void profile_function (FILE *);
193 static void profile_after_prologue (FILE *);
194 static bool notice_source_line (rtx_insn *, bool *);
195 static rtx walk_alter_subreg (rtx *, bool *);
196 static void output_asm_name (void);
197 static void output_alternate_entry_point (FILE *, rtx_insn *);
198 static tree get_mem_expr_from_op (rtx, int *);
199 static void output_asm_operand_names (rtx *, int *, int);
200 #ifdef LEAF_REGISTERS
201 static void leaf_renumber_regs (rtx_insn *);
202 #endif
203 static int align_fuzz (rtx, rtx, int, unsigned);
204 static void collect_fn_hard_reg_usage (void);
206 /* Initialize data in final at the beginning of a compilation. */
208 void
209 init_final (const char *filename ATTRIBUTE_UNUSED)
211 app_on = 0;
212 final_sequence = 0;
214 #ifdef ASSEMBLER_DIALECT
215 dialect_number = ASSEMBLER_DIALECT;
216 #endif
219 /* Default target function prologue and epilogue assembler output.
221 If not overridden for epilogue code, then the function body itself
222 contains return instructions wherever needed. */
223 void
224 default_function_pro_epilogue (FILE *)
228 void
229 default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED,
230 tree decl ATTRIBUTE_UNUSED,
231 bool new_is_cold ATTRIBUTE_UNUSED)
235 /* Default target hook that outputs nothing to a stream. */
236 void
237 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
241 /* Enable APP processing of subsequent output.
242 Used before the output from an `asm' statement. */
244 void
245 app_enable (void)
247 if (! app_on)
249 fputs (ASM_APP_ON, asm_out_file);
250 app_on = 1;
254 /* Disable APP processing of subsequent output.
255 Called from varasm.cc before most kinds of output. */
257 void
258 app_disable (void)
260 if (app_on)
262 fputs (ASM_APP_OFF, asm_out_file);
263 app_on = 0;
267 /* Return the number of slots filled in the current
268 delayed branch sequence (we don't count the insn needing the
269 delay slot). Zero if not in a delayed branch sequence. */
272 dbr_sequence_length (void)
274 if (final_sequence != 0)
275 return XVECLEN (final_sequence, 0) - 1;
276 else
277 return 0;
280 /* The next two pages contain routines used to compute the length of an insn
281 and to shorten branches. */
283 /* Arrays for insn lengths, and addresses. The latter is referenced by
284 `insn_current_length'. */
286 static int *insn_lengths;
288 vec<int> insn_addresses_;
290 /* Max uid for which the above arrays are valid. */
291 static int insn_lengths_max_uid;
293 /* Address of insn being processed. Used by `insn_current_length'. */
294 int insn_current_address;
296 /* Address of insn being processed in previous iteration. */
297 int insn_last_address;
299 /* known invariant alignment of insn being processed. */
300 int insn_current_align;
302 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
303 gives the next following alignment insn that increases the known
304 alignment, or NULL_RTX if there is no such insn.
305 For any alignment obtained this way, we can again index uid_align with
306 its uid to obtain the next following align that in turn increases the
307 alignment, till we reach NULL_RTX; the sequence obtained this way
308 for each insn we'll call the alignment chain of this insn in the following
309 comments. */
311 static rtx *uid_align;
312 static int *uid_shuid;
313 static vec<align_flags> label_align;
315 /* Indicate that branch shortening hasn't yet been done. */
317 void
318 init_insn_lengths (void)
320 if (uid_shuid)
322 free (uid_shuid);
323 uid_shuid = 0;
325 if (insn_lengths)
327 free (insn_lengths);
328 insn_lengths = 0;
329 insn_lengths_max_uid = 0;
331 if (HAVE_ATTR_length)
332 INSN_ADDRESSES_FREE ();
333 if (uid_align)
335 free (uid_align);
336 uid_align = 0;
340 /* Obtain the current length of an insn. If branch shortening has been done,
341 get its actual length. Otherwise, use FALLBACK_FN to calculate the
342 length. */
343 static int
344 get_attr_length_1 (rtx_insn *insn, int (*fallback_fn) (rtx_insn *))
346 rtx body;
347 int i;
348 int length = 0;
350 if (!HAVE_ATTR_length)
351 return 0;
353 if (insn_lengths_max_uid > INSN_UID (insn))
354 return insn_lengths[INSN_UID (insn)];
355 else
356 switch (GET_CODE (insn))
358 case NOTE:
359 case BARRIER:
360 case CODE_LABEL:
361 case DEBUG_INSN:
362 return 0;
364 case CALL_INSN:
365 case JUMP_INSN:
366 body = PATTERN (insn);
367 if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
368 length = asm_insn_count (body) * fallback_fn (insn);
369 else
370 length = fallback_fn (insn);
371 break;
373 case INSN:
374 body = PATTERN (insn);
375 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
376 return 0;
378 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
379 length = asm_insn_count (body) * fallback_fn (insn);
380 else if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
381 for (i = 0; i < seq->len (); i++)
382 length += get_attr_length_1 (seq->insn (i), fallback_fn);
383 else
384 length = fallback_fn (insn);
385 break;
387 default:
388 break;
391 #ifdef ADJUST_INSN_LENGTH
392 ADJUST_INSN_LENGTH (insn, length);
393 #endif
394 return length;
397 /* Obtain the current length of an insn. If branch shortening has been done,
398 get its actual length. Otherwise, get its maximum length. */
400 get_attr_length (rtx_insn *insn)
402 return get_attr_length_1 (insn, insn_default_length);
405 /* Obtain the current length of an insn. If branch shortening has been done,
406 get its actual length. Otherwise, get its minimum length. */
408 get_attr_min_length (rtx_insn *insn)
410 return get_attr_length_1 (insn, insn_min_length);
413 /* Code to handle alignment inside shorten_branches. */
415 /* Here is an explanation how the algorithm in align_fuzz can give
416 proper results:
418 Call a sequence of instructions beginning with alignment point X
419 and continuing until the next alignment point `block X'. When `X'
420 is used in an expression, it means the alignment value of the
421 alignment point.
423 Call the distance between the start of the first insn of block X, and
424 the end of the last insn of block X `IX', for the `inner size of X'.
425 This is clearly the sum of the instruction lengths.
427 Likewise with the next alignment-delimited block following X, which we
428 shall call block Y.
430 Call the distance between the start of the first insn of block X, and
431 the start of the first insn of block Y `OX', for the `outer size of X'.
433 The estimated padding is then OX - IX.
435 OX can be safely estimated as
437 if (X >= Y)
438 OX = round_up(IX, Y)
439 else
440 OX = round_up(IX, X) + Y - X
442 Clearly est(IX) >= real(IX), because that only depends on the
443 instruction lengths, and those being overestimated is a given.
445 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
446 we needn't worry about that when thinking about OX.
448 When X >= Y, the alignment provided by Y adds no uncertainty factor
449 for branch ranges starting before X, so we can just round what we have.
450 But when X < Y, we don't know anything about the, so to speak,
451 `middle bits', so we have to assume the worst when aligning up from an
452 address mod X to one mod Y, which is Y - X. */
454 #ifndef LABEL_ALIGN
455 #define LABEL_ALIGN(LABEL) align_labels
456 #endif
458 #ifndef LOOP_ALIGN
459 #define LOOP_ALIGN(LABEL) align_loops
460 #endif
462 #ifndef LABEL_ALIGN_AFTER_BARRIER
463 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
464 #endif
466 #ifndef JUMP_ALIGN
467 #define JUMP_ALIGN(LABEL) align_jumps
468 #endif
470 #ifndef ADDR_VEC_ALIGN
471 static int
472 final_addr_vec_align (rtx_jump_table_data *addr_vec)
474 int align = GET_MODE_SIZE (addr_vec->get_data_mode ());
476 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
477 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
478 return exact_log2 (align);
482 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
483 #endif
485 #ifndef INSN_LENGTH_ALIGNMENT
486 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
487 #endif
489 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
491 static int min_labelno, max_labelno;
493 #define LABEL_TO_ALIGNMENT(LABEL) \
494 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno])
496 /* For the benefit of port specific code do this also as a function. */
498 align_flags
499 label_to_alignment (rtx label)
501 if (CODE_LABEL_NUMBER (label) <= max_labelno)
502 return LABEL_TO_ALIGNMENT (label);
503 return align_flags ();
506 /* The differences in addresses
507 between a branch and its target might grow or shrink depending on
508 the alignment the start insn of the range (the branch for a forward
509 branch or the label for a backward branch) starts out on; if these
510 differences are used naively, they can even oscillate infinitely.
511 We therefore want to compute a 'worst case' address difference that
512 is independent of the alignment the start insn of the range end
513 up on, and that is at least as large as the actual difference.
514 The function align_fuzz calculates the amount we have to add to the
515 naively computed difference, by traversing the part of the alignment
516 chain of the start insn of the range that is in front of the end insn
517 of the range, and considering for each alignment the maximum amount
518 that it might contribute to a size increase.
520 For casesi tables, we also want to know worst case minimum amounts of
521 address difference, in case a machine description wants to introduce
522 some common offset that is added to all offsets in a table.
523 For this purpose, align_fuzz with a growth argument of 0 computes the
524 appropriate adjustment. */
526 /* Compute the maximum delta by which the difference of the addresses of
527 START and END might grow / shrink due to a different address for start
528 which changes the size of alignment insns between START and END.
529 KNOWN_ALIGN_LOG is the alignment known for START.
530 GROWTH should be ~0 if the objective is to compute potential code size
531 increase, and 0 if the objective is to compute potential shrink.
532 The return value is undefined for any other value of GROWTH. */
534 static int
535 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
537 int uid = INSN_UID (start);
538 rtx align_label;
539 int known_align = 1 << known_align_log;
540 int end_shuid = INSN_SHUID (end);
541 int fuzz = 0;
543 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
545 int align_addr, new_align;
547 uid = INSN_UID (align_label);
548 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
549 if (uid_shuid[uid] > end_shuid)
550 break;
551 align_flags alignment = LABEL_TO_ALIGNMENT (align_label);
552 new_align = 1 << alignment.levels[0].log;
553 if (new_align < known_align)
554 continue;
555 fuzz += (-align_addr ^ growth) & (new_align - known_align);
556 known_align = new_align;
558 return fuzz;
561 /* Compute a worst-case reference address of a branch so that it
562 can be safely used in the presence of aligned labels. Since the
563 size of the branch itself is unknown, the size of the branch is
564 not included in the range. I.e. for a forward branch, the reference
565 address is the end address of the branch as known from the previous
566 branch shortening pass, minus a value to account for possible size
567 increase due to alignment. For a backward branch, it is the start
568 address of the branch as known from the current pass, plus a value
569 to account for possible size increase due to alignment.
570 NB.: Therefore, the maximum offset allowed for backward branches needs
571 to exclude the branch size. */
574 insn_current_reference_address (rtx_insn *branch)
576 rtx dest;
577 int seq_uid;
579 if (! INSN_ADDRESSES_SET_P ())
580 return 0;
582 rtx_insn *seq = NEXT_INSN (PREV_INSN (branch));
583 seq_uid = INSN_UID (seq);
584 if (!jump_to_label_p (branch))
585 /* This can happen for example on the PA; the objective is to know the
586 offset to address something in front of the start of the function.
587 Thus, we can treat it like a backward branch.
588 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
589 any alignment we'd encounter, so we skip the call to align_fuzz. */
590 return insn_current_address;
591 dest = JUMP_LABEL (branch);
593 /* BRANCH has no proper alignment chain set, so use SEQ.
594 BRANCH also has no INSN_SHUID. */
595 if (INSN_SHUID (seq) < INSN_SHUID (dest))
597 /* Forward branch. */
598 return (insn_last_address + insn_lengths[seq_uid]
599 - align_fuzz (seq, dest, length_unit_log, ~0));
601 else
603 /* Backward branch. */
604 return (insn_current_address
605 + align_fuzz (dest, seq, length_unit_log, ~0));
609 /* Compute branch alignments based on CFG profile. */
611 void
612 compute_alignments (void)
614 basic_block bb;
615 align_flags max_alignment;
617 label_align.truncate (0);
619 max_labelno = max_label_num ();
620 min_labelno = get_first_label_num ();
621 label_align.safe_grow_cleared (max_labelno - min_labelno + 1, true);
623 /* If not optimizing or optimizing for size, don't assign any alignments. */
624 if (! optimize || optimize_function_for_size_p (cfun))
625 return;
627 if (dump_file)
629 dump_reg_info (dump_file);
630 dump_flow_info (dump_file, TDF_DETAILS);
631 flow_loops_dump (dump_file, NULL, 1);
633 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
634 profile_count count_threshold = cfun->cfg->count_max / param_align_threshold;
636 if (dump_file)
638 fprintf (dump_file, "count_max: ");
639 cfun->cfg->count_max.dump (dump_file);
640 fprintf (dump_file, "\n");
642 FOR_EACH_BB_FN (bb, cfun)
644 rtx_insn *label = BB_HEAD (bb);
645 bool has_fallthru = 0;
646 edge e;
647 edge_iterator ei;
649 if (!LABEL_P (label)
650 || optimize_bb_for_size_p (bb))
652 if (dump_file)
653 fprintf (dump_file,
654 "BB %4i loop %2i loop_depth %2i skipped.\n",
655 bb->index,
656 bb->loop_father->num,
657 bb_loop_depth (bb));
658 continue;
660 max_alignment = LABEL_ALIGN (label);
661 profile_count fallthru_count = profile_count::zero ();
662 profile_count branch_count = profile_count::zero ();
664 FOR_EACH_EDGE (e, ei, bb->preds)
666 if (e->flags & EDGE_FALLTHRU)
667 has_fallthru = 1, fallthru_count += e->count ();
668 else
669 branch_count += e->count ();
671 if (dump_file)
673 fprintf (dump_file, "BB %4i loop %2i loop_depth"
674 " %2i fall ",
675 bb->index, bb->loop_father->num,
676 bb_loop_depth (bb));
677 fallthru_count.dump (dump_file);
678 fprintf (dump_file, " branch ");
679 branch_count.dump (dump_file);
680 if (!bb->loop_father->inner && bb->loop_father->num)
681 fprintf (dump_file, " inner_loop");
682 if (bb->loop_father->header == bb)
683 fprintf (dump_file, " loop_header");
684 fprintf (dump_file, "\n");
686 if (!fallthru_count.initialized_p () || !branch_count.initialized_p ())
687 continue;
689 /* There are two purposes to align block with no fallthru incoming edge:
690 1) to avoid fetch stalls when branch destination is near cache boundary
691 2) to improve cache efficiency in case the previous block is not executed
692 (so it does not need to be in the cache).
694 We to catch first case, we align frequently executed blocks.
695 To catch the second, we align blocks that are executed more frequently
696 than the predecessor and the predecessor is likely to not be executed
697 when function is called. */
699 if (!has_fallthru
700 && (branch_count > count_threshold
701 || (bb->count > bb->prev_bb->count * 10
702 && (bb->prev_bb->count
703 <= ENTRY_BLOCK_PTR_FOR_FN (cfun)->count / 2))))
705 align_flags alignment = JUMP_ALIGN (label);
706 if (dump_file)
707 fprintf (dump_file, " jump alignment added.\n");
708 max_alignment = align_flags::max (max_alignment, alignment);
710 /* In case block is frequent and reached mostly by non-fallthru edge,
711 align it. It is most likely a first block of loop. */
712 if (has_fallthru
713 && !(single_succ_p (bb)
714 && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun))
715 && optimize_bb_for_speed_p (bb)
716 && branch_count + fallthru_count > count_threshold
717 && (branch_count > fallthru_count * param_align_loop_iterations))
719 align_flags alignment = LOOP_ALIGN (label);
720 if (dump_file)
721 fprintf (dump_file, " internal loop alignment added.\n");
722 max_alignment = align_flags::max (max_alignment, alignment);
724 LABEL_TO_ALIGNMENT (label) = max_alignment;
727 loop_optimizer_finalize ();
728 free_dominance_info (CDI_DOMINATORS);
731 /* Grow the LABEL_ALIGN array after new labels are created. */
733 static void
734 grow_label_align (void)
736 int old = max_labelno;
737 int n_labels;
738 int n_old_labels;
740 max_labelno = max_label_num ();
742 n_labels = max_labelno - min_labelno + 1;
743 n_old_labels = old - min_labelno + 1;
745 label_align.safe_grow_cleared (n_labels, true);
747 /* Range of labels grows monotonically in the function. Failing here
748 means that the initialization of array got lost. */
749 gcc_assert (n_old_labels <= n_labels);
752 /* Update the already computed alignment information. LABEL_PAIRS is a vector
753 made up of pairs of labels for which the alignment information of the first
754 element will be copied from that of the second element. */
756 void
757 update_alignments (vec<rtx> &label_pairs)
759 unsigned int i = 0;
760 rtx iter, label = NULL_RTX;
762 if (max_labelno != max_label_num ())
763 grow_label_align ();
765 FOR_EACH_VEC_ELT (label_pairs, i, iter)
766 if (i & 1)
767 LABEL_TO_ALIGNMENT (label) = LABEL_TO_ALIGNMENT (iter);
768 else
769 label = iter;
772 namespace {
774 const pass_data pass_data_compute_alignments =
776 RTL_PASS, /* type */
777 "alignments", /* name */
778 OPTGROUP_NONE, /* optinfo_flags */
779 TV_NONE, /* tv_id */
780 0, /* properties_required */
781 0, /* properties_provided */
782 0, /* properties_destroyed */
783 0, /* todo_flags_start */
784 0, /* todo_flags_finish */
787 class pass_compute_alignments : public rtl_opt_pass
789 public:
790 pass_compute_alignments (gcc::context *ctxt)
791 : rtl_opt_pass (pass_data_compute_alignments, ctxt)
794 /* opt_pass methods: */
795 unsigned int execute (function *) final override
797 compute_alignments ();
798 return 0;
801 }; // class pass_compute_alignments
803 } // anon namespace
805 rtl_opt_pass *
806 make_pass_compute_alignments (gcc::context *ctxt)
808 return new pass_compute_alignments (ctxt);
812 /* Make a pass over all insns and compute their actual lengths by shortening
813 any branches of variable length if possible. */
815 /* shorten_branches might be called multiple times: for example, the SH
816 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
817 In order to do this, it needs proper length information, which it obtains
818 by calling shorten_branches. This cannot be collapsed with
819 shorten_branches itself into a single pass unless we also want to integrate
820 reorg.cc, since the branch splitting exposes new instructions with delay
821 slots. */
823 void
824 shorten_branches (rtx_insn *first)
826 rtx_insn *insn;
827 int max_uid;
828 int i;
829 rtx_insn *seq;
830 bool something_changed = true;
831 char *varying_length;
832 rtx body;
833 int uid;
834 rtx align_tab[MAX_CODE_ALIGN + 1];
836 /* Compute maximum UID and allocate label_align / uid_shuid. */
837 max_uid = get_max_uid ();
839 /* Free uid_shuid before reallocating it. */
840 free (uid_shuid);
842 uid_shuid = XNEWVEC (int, max_uid);
844 if (max_labelno != max_label_num ())
845 grow_label_align ();
847 /* Initialize label_align and set up uid_shuid to be strictly
848 monotonically rising with insn order. */
849 /* We use alignment here to keep track of the maximum alignment we want to
850 impose on the next CODE_LABEL (or the current one if we are processing
851 the CODE_LABEL itself). */
853 align_flags max_alignment;
855 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
857 INSN_SHUID (insn) = i++;
858 if (INSN_P (insn))
859 continue;
861 if (rtx_code_label *label = dyn_cast <rtx_code_label *> (insn))
863 /* Merge in alignments computed by compute_alignments. */
864 align_flags alignment = LABEL_TO_ALIGNMENT (label);
865 max_alignment = align_flags::max (max_alignment, alignment);
867 rtx_jump_table_data *table = jump_table_for_label (label);
868 if (!table)
870 align_flags alignment = LABEL_ALIGN (label);
871 max_alignment = align_flags::max (max_alignment, alignment);
873 /* ADDR_VECs only take room if read-only data goes into the text
874 section. */
875 if ((JUMP_TABLES_IN_TEXT_SECTION
876 || readonly_data_section == text_section)
877 && table)
879 align_flags alignment = align_flags (ADDR_VEC_ALIGN (table));
880 max_alignment = align_flags::max (max_alignment, alignment);
882 LABEL_TO_ALIGNMENT (label) = max_alignment;
883 max_alignment = align_flags ();
885 else if (BARRIER_P (insn))
887 rtx_insn *label;
889 for (label = insn; label && ! INSN_P (label);
890 label = NEXT_INSN (label))
891 if (LABEL_P (label))
893 align_flags alignment
894 = align_flags (LABEL_ALIGN_AFTER_BARRIER (insn));
895 max_alignment = align_flags::max (max_alignment, alignment);
896 break;
900 if (!HAVE_ATTR_length)
901 return;
903 /* Allocate the rest of the arrays. */
904 insn_lengths = XNEWVEC (int, max_uid);
905 insn_lengths_max_uid = max_uid;
906 /* Syntax errors can lead to labels being outside of the main insn stream.
907 Initialize insn_addresses, so that we get reproducible results. */
908 INSN_ADDRESSES_ALLOC (max_uid);
910 varying_length = XCNEWVEC (char, max_uid);
912 /* Initialize uid_align. We scan instructions
913 from end to start, and keep in align_tab[n] the last seen insn
914 that does an alignment of at least n+1, i.e. the successor
915 in the alignment chain for an insn that does / has a known
916 alignment of n. */
917 uid_align = XCNEWVEC (rtx, max_uid);
919 for (i = MAX_CODE_ALIGN + 1; --i >= 0;)
920 align_tab[i] = NULL_RTX;
921 seq = get_last_insn ();
922 for (; seq; seq = PREV_INSN (seq))
924 int uid = INSN_UID (seq);
925 int log;
926 log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq).levels[0].log : 0);
927 uid_align[uid] = align_tab[0];
928 if (log)
930 /* Found an alignment label. */
931 gcc_checking_assert (log < MAX_CODE_ALIGN + 1);
932 uid_align[uid] = align_tab[log];
933 for (i = log - 1; i >= 0; i--)
934 align_tab[i] = seq;
938 /* When optimizing, we start assuming minimum length, and keep increasing
939 lengths as we find the need for this, till nothing changes.
940 When not optimizing, we start assuming maximum lengths, and
941 do a single pass to update the lengths. */
942 bool increasing = optimize != 0;
944 #ifdef CASE_VECTOR_SHORTEN_MODE
945 if (optimize)
947 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
948 label fields. */
950 int min_shuid = INSN_SHUID (get_insns ()) - 1;
951 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
952 int rel;
954 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
956 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
957 int len, i, min, max, insn_shuid;
958 int min_align;
959 addr_diff_vec_flags flags;
961 if (! JUMP_TABLE_DATA_P (insn)
962 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
963 continue;
964 pat = PATTERN (insn);
965 len = XVECLEN (pat, 1);
966 gcc_assert (len > 0);
967 min_align = MAX_CODE_ALIGN;
968 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
970 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
971 int shuid = INSN_SHUID (lab);
972 if (shuid < min)
974 min = shuid;
975 min_lab = lab;
977 if (shuid > max)
979 max = shuid;
980 max_lab = lab;
983 int label_alignment = LABEL_TO_ALIGNMENT (lab).levels[0].log;
984 if (min_align > label_alignment)
985 min_align = label_alignment;
987 XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
988 XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
989 insn_shuid = INSN_SHUID (insn);
990 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
991 memset (&flags, 0, sizeof (flags));
992 flags.min_align = min_align;
993 flags.base_after_vec = rel > insn_shuid;
994 flags.min_after_vec = min > insn_shuid;
995 flags.max_after_vec = max > insn_shuid;
996 flags.min_after_base = min > rel;
997 flags.max_after_base = max > rel;
998 ADDR_DIFF_VEC_FLAGS (pat) = flags;
1000 if (increasing)
1001 PUT_MODE (pat, CASE_VECTOR_SHORTEN_MODE (0, 0, pat));
1004 #endif /* CASE_VECTOR_SHORTEN_MODE */
1006 /* Compute initial lengths, addresses, and varying flags for each insn. */
1007 int (*length_fun) (rtx_insn *) = increasing ? insn_min_length : insn_default_length;
1009 for (insn_current_address = 0, insn = first;
1010 insn != 0;
1011 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1013 uid = INSN_UID (insn);
1015 insn_lengths[uid] = 0;
1017 if (LABEL_P (insn))
1019 int log = LABEL_TO_ALIGNMENT (insn).levels[0].log;
1020 if (log)
1022 int align = 1 << log;
1023 int new_address = (insn_current_address + align - 1) & -align;
1024 insn_lengths[uid] = new_address - insn_current_address;
1028 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1030 if (NOTE_P (insn) || BARRIER_P (insn)
1031 || LABEL_P (insn) || DEBUG_INSN_P (insn))
1032 continue;
1033 if (insn->deleted ())
1034 continue;
1036 body = PATTERN (insn);
1037 if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
1039 /* This only takes room if read-only data goes into the text
1040 section. */
1041 if (JUMP_TABLES_IN_TEXT_SECTION
1042 || readonly_data_section == text_section)
1043 insn_lengths[uid] = (XVECLEN (body,
1044 GET_CODE (body) == ADDR_DIFF_VEC)
1045 * GET_MODE_SIZE (table->get_data_mode ()));
1046 /* Alignment is handled by ADDR_VEC_ALIGN. */
1048 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1049 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1050 else if (rtx_sequence *body_seq = dyn_cast <rtx_sequence *> (body))
1052 int i;
1053 int const_delay_slots;
1054 if (DELAY_SLOTS)
1055 const_delay_slots = const_num_delay_slots (body_seq->insn (0));
1056 else
1057 const_delay_slots = 0;
1059 int (*inner_length_fun) (rtx_insn *)
1060 = const_delay_slots ? length_fun : insn_default_length;
1061 /* Inside a delay slot sequence, we do not do any branch shortening
1062 if the shortening could change the number of delay slots
1063 of the branch. */
1064 for (i = 0; i < body_seq->len (); i++)
1066 rtx_insn *inner_insn = body_seq->insn (i);
1067 int inner_uid = INSN_UID (inner_insn);
1068 int inner_length;
1070 if (GET_CODE (PATTERN (inner_insn)) == ASM_INPUT
1071 || asm_noperands (PATTERN (inner_insn)) >= 0)
1072 inner_length = (asm_insn_count (PATTERN (inner_insn))
1073 * insn_default_length (inner_insn));
1074 else
1075 inner_length = inner_length_fun (inner_insn);
1077 insn_lengths[inner_uid] = inner_length;
1078 if (const_delay_slots)
1080 if ((varying_length[inner_uid]
1081 = insn_variable_length_p (inner_insn)) != 0)
1082 varying_length[uid] = 1;
1083 INSN_ADDRESSES (inner_uid) = (insn_current_address
1084 + insn_lengths[uid]);
1086 else
1087 varying_length[inner_uid] = 0;
1088 insn_lengths[uid] += inner_length;
1091 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1093 insn_lengths[uid] = length_fun (insn);
1094 varying_length[uid] = insn_variable_length_p (insn);
1097 /* If needed, do any adjustment. */
1098 #ifdef ADJUST_INSN_LENGTH
1099 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1100 if (insn_lengths[uid] < 0)
1101 fatal_insn ("negative insn length", insn);
1102 #endif
1105 /* Now loop over all the insns finding varying length insns. For each,
1106 get the current insn length. If it has changed, reflect the change.
1107 When nothing changes for a full pass, we are done. */
1109 while (something_changed)
1111 something_changed = false;
1112 insn_current_align = MAX_CODE_ALIGN - 1;
1113 for (insn_current_address = 0, insn = first;
1114 insn != 0;
1115 insn = NEXT_INSN (insn))
1117 int new_length;
1118 #ifdef ADJUST_INSN_LENGTH
1119 int tmp_length;
1120 #endif
1121 int length_align;
1123 uid = INSN_UID (insn);
1125 if (rtx_code_label *label = dyn_cast <rtx_code_label *> (insn))
1127 int log = LABEL_TO_ALIGNMENT (label).levels[0].log;
1129 #ifdef CASE_VECTOR_SHORTEN_MODE
1130 /* If the mode of a following jump table was changed, we
1131 may need to update the alignment of this label. */
1133 if (JUMP_TABLES_IN_TEXT_SECTION
1134 || readonly_data_section == text_section)
1136 rtx_jump_table_data *table = jump_table_for_label (label);
1137 if (table)
1139 int newlog = ADDR_VEC_ALIGN (table);
1140 if (newlog != log)
1142 log = newlog;
1143 LABEL_TO_ALIGNMENT (insn) = log;
1144 something_changed = true;
1148 #endif
1150 if (log > insn_current_align)
1152 int align = 1 << log;
1153 int new_address= (insn_current_address + align - 1) & -align;
1154 insn_lengths[uid] = new_address - insn_current_address;
1155 insn_current_align = log;
1156 insn_current_address = new_address;
1158 else
1159 insn_lengths[uid] = 0;
1160 INSN_ADDRESSES (uid) = insn_current_address;
1161 continue;
1164 length_align = INSN_LENGTH_ALIGNMENT (insn);
1165 if (length_align < insn_current_align)
1166 insn_current_align = length_align;
1168 insn_last_address = INSN_ADDRESSES (uid);
1169 INSN_ADDRESSES (uid) = insn_current_address;
1171 #ifdef CASE_VECTOR_SHORTEN_MODE
1172 if (optimize
1173 && JUMP_TABLE_DATA_P (insn)
1174 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1176 rtx_jump_table_data *table = as_a <rtx_jump_table_data *> (insn);
1177 rtx body = PATTERN (insn);
1178 int old_length = insn_lengths[uid];
1179 rtx_insn *rel_lab =
1180 safe_as_a <rtx_insn *> (XEXP (XEXP (body, 0), 0));
1181 rtx min_lab = XEXP (XEXP (body, 2), 0);
1182 rtx max_lab = XEXP (XEXP (body, 3), 0);
1183 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1184 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1185 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1186 rtx_insn *prev;
1187 int rel_align = 0;
1188 addr_diff_vec_flags flags;
1189 scalar_int_mode vec_mode;
1191 /* Avoid automatic aggregate initialization. */
1192 flags = ADDR_DIFF_VEC_FLAGS (body);
1194 /* Try to find a known alignment for rel_lab. */
1195 for (prev = rel_lab;
1196 prev
1197 && ! insn_lengths[INSN_UID (prev)]
1198 && ! (varying_length[INSN_UID (prev)] & 1);
1199 prev = PREV_INSN (prev))
1200 if (varying_length[INSN_UID (prev)] & 2)
1202 rel_align = LABEL_TO_ALIGNMENT (prev).levels[0].log;
1203 break;
1206 /* See the comment on addr_diff_vec_flags in rtl.h for the
1207 meaning of the flags values. base: REL_LAB vec: INSN */
1208 /* Anything after INSN has still addresses from the last
1209 pass; adjust these so that they reflect our current
1210 estimate for this pass. */
1211 if (flags.base_after_vec)
1212 rel_addr += insn_current_address - insn_last_address;
1213 if (flags.min_after_vec)
1214 min_addr += insn_current_address - insn_last_address;
1215 if (flags.max_after_vec)
1216 max_addr += insn_current_address - insn_last_address;
1217 /* We want to know the worst case, i.e. lowest possible value
1218 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1219 its offset is positive, and we have to be wary of code shrink;
1220 otherwise, it is negative, and we have to be vary of code
1221 size increase. */
1222 if (flags.min_after_base)
1224 /* If INSN is between REL_LAB and MIN_LAB, the size
1225 changes we are about to make can change the alignment
1226 within the observed offset, therefore we have to break
1227 it up into two parts that are independent. */
1228 if (! flags.base_after_vec && flags.min_after_vec)
1230 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1231 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1233 else
1234 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1236 else
1238 if (flags.base_after_vec && ! flags.min_after_vec)
1240 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1241 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1243 else
1244 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1246 /* Likewise, determine the highest lowest possible value
1247 for the offset of MAX_LAB. */
1248 if (flags.max_after_base)
1250 if (! flags.base_after_vec && flags.max_after_vec)
1252 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1253 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1255 else
1256 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1258 else
1260 if (flags.base_after_vec && ! flags.max_after_vec)
1262 max_addr += align_fuzz (max_lab, insn, 0, 0);
1263 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1265 else
1266 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1268 vec_mode = CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1269 max_addr - rel_addr, body);
1270 if (!increasing
1271 || (GET_MODE_SIZE (vec_mode)
1272 >= GET_MODE_SIZE (table->get_data_mode ())))
1273 PUT_MODE (body, vec_mode);
1274 if (JUMP_TABLES_IN_TEXT_SECTION
1275 || readonly_data_section == text_section)
1277 insn_lengths[uid]
1278 = (XVECLEN (body, 1)
1279 * GET_MODE_SIZE (table->get_data_mode ()));
1280 insn_current_address += insn_lengths[uid];
1281 if (insn_lengths[uid] != old_length)
1282 something_changed = true;
1285 continue;
1287 #endif /* CASE_VECTOR_SHORTEN_MODE */
1289 if (! (varying_length[uid]))
1291 if (NONJUMP_INSN_P (insn)
1292 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1294 int i;
1296 body = PATTERN (insn);
1297 for (i = 0; i < XVECLEN (body, 0); i++)
1299 rtx inner_insn = XVECEXP (body, 0, i);
1300 int inner_uid = INSN_UID (inner_insn);
1302 INSN_ADDRESSES (inner_uid) = insn_current_address;
1304 insn_current_address += insn_lengths[inner_uid];
1307 else
1308 insn_current_address += insn_lengths[uid];
1310 continue;
1313 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1315 rtx_sequence *seqn = as_a <rtx_sequence *> (PATTERN (insn));
1316 int i;
1318 body = PATTERN (insn);
1319 new_length = 0;
1320 for (i = 0; i < seqn->len (); i++)
1322 rtx_insn *inner_insn = seqn->insn (i);
1323 int inner_uid = INSN_UID (inner_insn);
1324 int inner_length;
1326 INSN_ADDRESSES (inner_uid) = insn_current_address;
1328 /* insn_current_length returns 0 for insns with a
1329 non-varying length. */
1330 if (! varying_length[inner_uid])
1331 inner_length = insn_lengths[inner_uid];
1332 else
1333 inner_length = insn_current_length (inner_insn);
1335 if (inner_length != insn_lengths[inner_uid])
1337 if (!increasing || inner_length > insn_lengths[inner_uid])
1339 insn_lengths[inner_uid] = inner_length;
1340 something_changed = true;
1342 else
1343 inner_length = insn_lengths[inner_uid];
1345 insn_current_address += inner_length;
1346 new_length += inner_length;
1349 else
1351 new_length = insn_current_length (insn);
1352 insn_current_address += new_length;
1355 #ifdef ADJUST_INSN_LENGTH
1356 /* If needed, do any adjustment. */
1357 tmp_length = new_length;
1358 ADJUST_INSN_LENGTH (insn, new_length);
1359 insn_current_address += (new_length - tmp_length);
1360 #endif
1362 if (new_length != insn_lengths[uid]
1363 && (!increasing || new_length > insn_lengths[uid]))
1365 insn_lengths[uid] = new_length;
1366 something_changed = true;
1368 else
1369 insn_current_address += insn_lengths[uid] - new_length;
1371 /* For a non-optimizing compile, do only a single pass. */
1372 if (!increasing)
1373 break;
1375 crtl->max_insn_address = insn_current_address;
1376 free (varying_length);
1379 /* Given the body of an INSN known to be generated by an ASM statement, return
1380 the number of machine instructions likely to be generated for this insn.
1381 This is used to compute its length. */
1383 static int
1384 asm_insn_count (rtx body)
1386 const char *templ;
1388 if (GET_CODE (body) == ASM_INPUT)
1389 templ = XSTR (body, 0);
1390 else
1391 templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1393 return asm_str_count (templ);
1396 /* Return the number of machine instructions likely to be generated for the
1397 inline-asm template. */
1399 asm_str_count (const char *templ)
1401 int count = 1;
1403 if (!*templ)
1404 return 0;
1406 for (; *templ; templ++)
1407 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1408 || *templ == '\n')
1409 count++;
1411 return count;
1414 /* Return true if DWARF2 debug info can be emitted for DECL. */
1416 static bool
1417 dwarf2_debug_info_emitted_p (tree decl)
1419 /* When DWARF2 debug info is not generated internally. */
1420 if (!dwarf_debuginfo_p () && !dwarf_based_debuginfo_p ())
1421 return false;
1423 if (DECL_IGNORED_P (decl))
1424 return false;
1426 return true;
1429 /* Return scope resulting from combination of S1 and S2. */
1430 static tree
1431 choose_inner_scope (tree s1, tree s2)
1433 if (!s1)
1434 return s2;
1435 if (!s2)
1436 return s1;
1437 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
1438 return s1;
1439 return s2;
1442 /* Emit lexical block notes needed to change scope from S1 to S2. */
1444 static void
1445 change_scope (rtx_insn *orig_insn, tree s1, tree s2)
1447 rtx_insn *insn = orig_insn;
1448 tree com = NULL_TREE;
1449 tree ts1 = s1, ts2 = s2;
1450 tree s;
1452 while (ts1 != ts2)
1454 gcc_assert (ts1 && ts2);
1455 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
1456 ts1 = BLOCK_SUPERCONTEXT (ts1);
1457 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
1458 ts2 = BLOCK_SUPERCONTEXT (ts2);
1459 else
1461 ts1 = BLOCK_SUPERCONTEXT (ts1);
1462 ts2 = BLOCK_SUPERCONTEXT (ts2);
1465 com = ts1;
1467 /* Close scopes. */
1468 s = s1;
1469 while (s != com)
1471 rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1472 NOTE_BLOCK (note) = s;
1473 s = BLOCK_SUPERCONTEXT (s);
1476 /* Open scopes. */
1477 s = s2;
1478 while (s != com)
1480 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
1481 NOTE_BLOCK (insn) = s;
1482 s = BLOCK_SUPERCONTEXT (s);
1486 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1487 on the scope tree and the newly reordered instructions. */
1489 static void
1490 reemit_insn_block_notes (void)
1492 tree cur_block = DECL_INITIAL (cfun->decl);
1493 rtx_insn *insn;
1495 insn = get_insns ();
1496 for (; insn; insn = NEXT_INSN (insn))
1498 tree this_block;
1500 /* Prevent lexical blocks from straddling section boundaries. */
1501 if (NOTE_P (insn))
1502 switch (NOTE_KIND (insn))
1504 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1506 for (tree s = cur_block; s != DECL_INITIAL (cfun->decl);
1507 s = BLOCK_SUPERCONTEXT (s))
1509 rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1510 NOTE_BLOCK (note) = s;
1511 note = emit_note_after (NOTE_INSN_BLOCK_BEG, insn);
1512 NOTE_BLOCK (note) = s;
1515 break;
1517 case NOTE_INSN_BEGIN_STMT:
1518 case NOTE_INSN_INLINE_ENTRY:
1519 this_block = LOCATION_BLOCK (NOTE_MARKER_LOCATION (insn));
1520 if (!this_block)
1521 continue;
1522 goto set_cur_block_to_this_block;
1524 default:
1525 continue;
1528 if (!active_insn_p (insn))
1529 continue;
1531 /* Avoid putting scope notes between jump table and its label. */
1532 if (JUMP_TABLE_DATA_P (insn))
1533 continue;
1535 this_block = insn_scope (insn);
1536 /* For sequences compute scope resulting from merging all scopes
1537 of instructions nested inside. */
1538 if (rtx_sequence *body = dyn_cast <rtx_sequence *> (PATTERN (insn)))
1540 int i;
1542 this_block = NULL;
1543 for (i = 0; i < body->len (); i++)
1544 this_block = choose_inner_scope (this_block,
1545 insn_scope (body->insn (i)));
1547 if (! this_block)
1549 if (INSN_LOCATION (insn) == UNKNOWN_LOCATION)
1550 continue;
1551 else
1552 this_block = DECL_INITIAL (cfun->decl);
1555 set_cur_block_to_this_block:
1556 if (this_block != cur_block)
1558 change_scope (insn, cur_block, this_block);
1559 cur_block = this_block;
1563 /* change_scope emits before the insn, not after. */
1564 rtx_note *note = emit_note (NOTE_INSN_DELETED);
1565 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
1566 delete_insn (note);
1568 reorder_blocks ();
1571 static const char *some_local_dynamic_name;
1573 /* Locate some local-dynamic symbol still in use by this function
1574 so that we can print its name in local-dynamic base patterns.
1575 Return null if there are no local-dynamic references. */
1577 const char *
1578 get_some_local_dynamic_name ()
1580 subrtx_iterator::array_type array;
1581 rtx_insn *insn;
1583 if (some_local_dynamic_name)
1584 return some_local_dynamic_name;
1586 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1587 if (NONDEBUG_INSN_P (insn))
1588 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
1590 const_rtx x = *iter;
1591 if (GET_CODE (x) == SYMBOL_REF)
1593 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
1594 return some_local_dynamic_name = XSTR (x, 0);
1595 if (CONSTANT_POOL_ADDRESS_P (x))
1596 iter.substitute (get_pool_constant (x));
1600 return 0;
1603 /* Arrange for us to emit a source location note before any further
1604 real insns or section changes, by setting the SEEN_NEXT_VIEW bit in
1605 *SEEN, as long as we are keeping track of location views. The bit
1606 indicates we have referenced the next view at the current PC, so we
1607 have to emit it. This should be called next to the var_location
1608 debug hook. */
1610 static inline void
1611 set_next_view_needed (int *seen)
1613 if (debug_variable_location_views)
1614 *seen |= SEEN_NEXT_VIEW;
1617 /* Clear the flag in *SEEN indicating we need to emit the next view.
1618 This should be called next to the source_line debug hook. */
1620 static inline void
1621 clear_next_view_needed (int *seen)
1623 *seen &= ~SEEN_NEXT_VIEW;
1626 /* Test whether we have a pending request to emit the next view in
1627 *SEEN, and emit it if needed, clearing the request bit. */
1629 static inline void
1630 maybe_output_next_view (int *seen)
1632 if ((*seen & SEEN_NEXT_VIEW) != 0)
1634 clear_next_view_needed (seen);
1635 (*debug_hooks->source_line) (last_linenum, last_columnnum,
1636 last_filename, last_discriminator,
1637 false);
1641 /* We want to emit param bindings (before the first begin_stmt) in the
1642 initial view, if we are emitting views. To that end, we may
1643 consume initial notes in the function, processing them in
1644 final_start_function, before signaling the beginning of the
1645 prologue, rather than in final.
1647 We don't test whether the DECLs are PARM_DECLs: the assumption is
1648 that there will be a NOTE_INSN_BEGIN_STMT marker before any
1649 non-parameter NOTE_INSN_VAR_LOCATION. It's ok if the marker is not
1650 there, we'll just have more variable locations bound in the initial
1651 view, which is consistent with their being bound without any code
1652 that would give them a value. */
1654 static inline bool
1655 in_initial_view_p (rtx_insn *insn)
1657 return (!DECL_IGNORED_P (current_function_decl)
1658 && debug_variable_location_views
1659 && insn && GET_CODE (insn) == NOTE
1660 && (NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
1661 || NOTE_KIND (insn) == NOTE_INSN_DELETED));
1664 /* Output assembler code for the start of a function,
1665 and initialize some of the variables in this file
1666 for the new function. The label for the function and associated
1667 assembler pseudo-ops have already been output in `assemble_start_function'.
1669 FIRST is the first insn of the rtl for the function being compiled.
1670 FILE is the file to write assembler code to.
1671 SEEN should be initially set to zero, and it may be updated to
1672 indicate we have references to the next location view, that would
1673 require us to emit it at the current PC.
1674 OPTIMIZE_P is nonzero if we should eliminate redundant
1675 test and compare insns. */
1677 static void
1678 final_start_function_1 (rtx_insn **firstp, FILE *file, int *seen,
1679 int optimize_p ATTRIBUTE_UNUSED)
1681 block_depth = 0;
1683 this_is_asm_operands = 0;
1685 need_profile_function = false;
1687 last_filename = LOCATION_FILE (prologue_location);
1688 last_linenum = LOCATION_LINE (prologue_location);
1689 last_columnnum = LOCATION_COLUMN (prologue_location);
1690 last_discriminator = 0;
1691 force_source_line = false;
1693 high_block_linenum = high_function_linenum = last_linenum;
1695 rtx_insn *first = *firstp;
1696 if (in_initial_view_p (first))
1700 final_scan_insn (first, file, 0, 0, seen);
1701 first = NEXT_INSN (first);
1703 while (in_initial_view_p (first));
1704 *firstp = first;
1707 if (!DECL_IGNORED_P (current_function_decl))
1708 debug_hooks->begin_prologue (last_linenum, last_columnnum,
1709 last_filename);
1711 if (!dwarf2_debug_info_emitted_p (current_function_decl))
1712 dwarf2out_begin_prologue (0, 0, NULL);
1714 if (DECL_IGNORED_P (current_function_decl) && last_linenum && last_filename)
1715 debug_hooks->set_ignored_loc (last_linenum, last_columnnum, last_filename);
1717 #ifdef LEAF_REG_REMAP
1718 if (crtl->uses_only_leaf_regs)
1719 leaf_renumber_regs (first);
1720 #endif
1722 /* The Sun386i and perhaps other machines don't work right
1723 if the profiling code comes after the prologue. */
1724 if (targetm.profile_before_prologue () && crtl->profile)
1726 if (targetm.asm_out.function_prologue == default_function_pro_epilogue
1727 && targetm.have_prologue ())
1729 rtx_insn *insn;
1730 for (insn = first; insn; insn = NEXT_INSN (insn))
1731 if (!NOTE_P (insn))
1733 insn = NULL;
1734 break;
1736 else if (NOTE_KIND (insn) == NOTE_INSN_BASIC_BLOCK
1737 || NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
1738 break;
1739 else if (NOTE_KIND (insn) == NOTE_INSN_DELETED
1740 || NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
1741 continue;
1742 else
1744 insn = NULL;
1745 break;
1748 if (insn)
1749 need_profile_function = true;
1750 else
1751 profile_function (file);
1753 else
1754 profile_function (file);
1757 /* If debugging, assign block numbers to all of the blocks in this
1758 function. */
1759 if (write_symbols)
1761 reemit_insn_block_notes ();
1762 number_blocks (current_function_decl);
1763 /* We never actually put out begin/end notes for the top-level
1764 block in the function. But, conceptually, that block is
1765 always needed. */
1766 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1769 unsigned HOST_WIDE_INT min_frame_size
1770 = constant_lower_bound (get_frame_size ());
1771 if (min_frame_size > (unsigned HOST_WIDE_INT) warn_frame_larger_than_size)
1773 /* Issue a warning */
1774 warning (OPT_Wframe_larger_than_,
1775 "the frame size of %wu bytes is larger than %wu bytes",
1776 min_frame_size, warn_frame_larger_than_size);
1779 /* First output the function prologue: code to set up the stack frame. */
1780 targetm.asm_out.function_prologue (file);
1782 /* If the machine represents the prologue as RTL, the profiling code must
1783 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1784 if (! targetm.have_prologue ())
1785 profile_after_prologue (file);
1788 /* This is an exported final_start_function_1, callable without SEEN. */
1790 void
1791 final_start_function (rtx_insn *first, FILE *file,
1792 int optimize_p ATTRIBUTE_UNUSED)
1794 int seen = 0;
1795 final_start_function_1 (&first, file, &seen, optimize_p);
1796 gcc_assert (seen == 0);
1799 static void
1800 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1802 if (!targetm.profile_before_prologue () && crtl->profile)
1803 profile_function (file);
1806 static void
1807 profile_function (FILE *file ATTRIBUTE_UNUSED)
1809 #ifndef NO_PROFILE_COUNTERS
1810 # define NO_PROFILE_COUNTERS 0
1811 #endif
1812 #ifdef ASM_OUTPUT_REG_PUSH
1813 rtx sval = NULL, chain = NULL;
1815 if (cfun->returns_struct)
1816 sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl),
1817 true);
1818 if (cfun->static_chain_decl)
1819 chain = targetm.calls.static_chain (current_function_decl, true);
1820 #endif /* ASM_OUTPUT_REG_PUSH */
1822 if (! NO_PROFILE_COUNTERS)
1824 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1825 switch_to_section (data_section);
1826 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1827 targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1828 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1831 switch_to_section (current_function_section ());
1833 #ifdef ASM_OUTPUT_REG_PUSH
1834 if (sval && REG_P (sval))
1835 ASM_OUTPUT_REG_PUSH (file, REGNO (sval));
1836 if (chain && REG_P (chain))
1837 ASM_OUTPUT_REG_PUSH (file, REGNO (chain));
1838 #endif
1840 FUNCTION_PROFILER (file, current_function_funcdef_no);
1842 #ifdef ASM_OUTPUT_REG_PUSH
1843 if (chain && REG_P (chain))
1844 ASM_OUTPUT_REG_POP (file, REGNO (chain));
1845 if (sval && REG_P (sval))
1846 ASM_OUTPUT_REG_POP (file, REGNO (sval));
1847 #endif
1850 /* Output assembler code for the end of a function.
1851 For clarity, args are same as those of `final_start_function'
1852 even though not all of them are needed. */
1854 void
1855 final_end_function (void)
1857 app_disable ();
1859 if (!DECL_IGNORED_P (current_function_decl))
1860 debug_hooks->end_function (high_function_linenum);
1862 /* Finally, output the function epilogue:
1863 code to restore the stack frame and return to the caller. */
1864 targetm.asm_out.function_epilogue (asm_out_file);
1866 /* And debug output. */
1867 if (!DECL_IGNORED_P (current_function_decl))
1868 debug_hooks->end_epilogue (last_linenum, last_filename);
1870 if (!dwarf2_debug_info_emitted_p (current_function_decl)
1871 && dwarf2out_do_frame ())
1872 dwarf2out_end_epilogue (last_linenum, last_filename);
1874 some_local_dynamic_name = 0;
1878 /* Dumper helper for basic block information. FILE is the assembly
1879 output file, and INSN is the instruction being emitted. */
1881 static void
1882 dump_basic_block_info (FILE *file, rtx_insn *insn, basic_block *start_to_bb,
1883 basic_block *end_to_bb, int bb_map_size, int *bb_seqn)
1885 basic_block bb;
1887 if (!flag_debug_asm)
1888 return;
1890 if (INSN_UID (insn) < bb_map_size
1891 && (bb = start_to_bb[INSN_UID (insn)]) != NULL)
1893 edge e;
1894 edge_iterator ei;
1896 fprintf (file, "%s BLOCK %d", ASM_COMMENT_START, bb->index);
1897 if (bb->count.initialized_p ())
1899 fprintf (file, ", count:");
1900 bb->count.dump (file);
1902 fprintf (file, " seq:%d", (*bb_seqn)++);
1903 fprintf (file, "\n%s PRED:", ASM_COMMENT_START);
1904 FOR_EACH_EDGE (e, ei, bb->preds)
1906 dump_edge_info (file, e, TDF_DETAILS, 0);
1908 fprintf (file, "\n");
1910 if (INSN_UID (insn) < bb_map_size
1911 && (bb = end_to_bb[INSN_UID (insn)]) != NULL)
1913 edge e;
1914 edge_iterator ei;
1916 fprintf (asm_out_file, "%s SUCC:", ASM_COMMENT_START);
1917 FOR_EACH_EDGE (e, ei, bb->succs)
1919 dump_edge_info (asm_out_file, e, TDF_DETAILS, 1);
1921 fprintf (file, "\n");
1925 /* Output assembler code for some insns: all or part of a function.
1926 For description of args, see `final_start_function', above. */
1928 static void
1929 final_1 (rtx_insn *first, FILE *file, int seen, int optimize_p)
1931 rtx_insn *insn, *next;
1933 /* Used for -dA dump. */
1934 basic_block *start_to_bb = NULL;
1935 basic_block *end_to_bb = NULL;
1936 int bb_map_size = 0;
1937 int bb_seqn = 0;
1939 last_ignored_compare = 0;
1941 init_recog ();
1943 CC_STATUS_INIT;
1945 if (flag_debug_asm)
1947 basic_block bb;
1949 bb_map_size = get_max_uid () + 1;
1950 start_to_bb = XCNEWVEC (basic_block, bb_map_size);
1951 end_to_bb = XCNEWVEC (basic_block, bb_map_size);
1953 /* There is no cfg for a thunk. */
1954 if (!cfun->is_thunk)
1955 FOR_EACH_BB_REVERSE_FN (bb, cfun)
1957 start_to_bb[INSN_UID (BB_HEAD (bb))] = bb;
1958 end_to_bb[INSN_UID (BB_END (bb))] = bb;
1962 /* Output the insns. */
1963 for (insn = first; insn;)
1965 if (HAVE_ATTR_length)
1967 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1969 /* This can be triggered by bugs elsewhere in the compiler if
1970 new insns are created after init_insn_lengths is called. */
1971 gcc_assert (NOTE_P (insn));
1972 insn_current_address = -1;
1974 else
1975 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1976 /* final can be seen as an iteration of shorten_branches that
1977 does nothing (since a fixed point has already been reached). */
1978 insn_last_address = insn_current_address;
1981 dump_basic_block_info (file, insn, start_to_bb, end_to_bb,
1982 bb_map_size, &bb_seqn);
1983 insn = final_scan_insn (insn, file, optimize_p, 0, &seen);
1986 maybe_output_next_view (&seen);
1988 if (flag_debug_asm)
1990 free (start_to_bb);
1991 free (end_to_bb);
1994 /* Remove CFI notes, to avoid compare-debug failures. */
1995 for (insn = first; insn; insn = next)
1997 next = NEXT_INSN (insn);
1998 if (NOTE_P (insn)
1999 && (NOTE_KIND (insn) == NOTE_INSN_CFI
2000 || NOTE_KIND (insn) == NOTE_INSN_CFI_LABEL))
2001 delete_insn (insn);
2005 /* This is an exported final_1, callable without SEEN. */
2007 void
2008 final (rtx_insn *first, FILE *file, int optimize_p)
2010 /* Those that use the internal final_start_function_1/final_1 API
2011 skip initial debug bind notes in final_start_function_1, and pass
2012 the modified FIRST to final_1. But those that use the public
2013 final_start_function/final APIs, final_start_function can't move
2014 FIRST because it's not passed by reference, so if they were
2015 skipped there, skip them again here. */
2016 while (in_initial_view_p (first))
2017 first = NEXT_INSN (first);
2019 final_1 (first, file, 0, optimize_p);
2022 const char *
2023 get_insn_template (int code, rtx_insn *insn)
2025 switch (insn_data[code].output_format)
2027 case INSN_OUTPUT_FORMAT_SINGLE:
2028 return insn_data[code].output.single;
2029 case INSN_OUTPUT_FORMAT_MULTI:
2030 return insn_data[code].output.multi[which_alternative];
2031 case INSN_OUTPUT_FORMAT_FUNCTION:
2032 gcc_assert (insn);
2033 return (*insn_data[code].output.function) (recog_data.operand, insn);
2035 default:
2036 gcc_unreachable ();
2040 /* Emit the appropriate declaration for an alternate-entry-point
2041 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
2042 LABEL_KIND != LABEL_NORMAL.
2044 The case fall-through in this function is intentional. */
2045 static void
2046 output_alternate_entry_point (FILE *file, rtx_insn *insn)
2048 const char *name = LABEL_NAME (insn);
2050 switch (LABEL_KIND (insn))
2052 case LABEL_WEAK_ENTRY:
2053 #ifdef ASM_WEAKEN_LABEL
2054 ASM_WEAKEN_LABEL (file, name);
2055 gcc_fallthrough ();
2056 #endif
2057 case LABEL_GLOBAL_ENTRY:
2058 targetm.asm_out.globalize_label (file, name);
2059 gcc_fallthrough ();
2060 case LABEL_STATIC_ENTRY:
2061 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
2062 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
2063 #endif
2064 ASM_OUTPUT_LABEL (file, name);
2065 break;
2067 case LABEL_NORMAL:
2068 default:
2069 gcc_unreachable ();
2073 /* Given a CALL_INSN, find and return the nested CALL. */
2074 static rtx
2075 call_from_call_insn (rtx_call_insn *insn)
2077 rtx x;
2078 gcc_assert (CALL_P (insn));
2079 x = PATTERN (insn);
2081 while (GET_CODE (x) != CALL)
2083 switch (GET_CODE (x))
2085 default:
2086 gcc_unreachable ();
2087 case COND_EXEC:
2088 x = COND_EXEC_CODE (x);
2089 break;
2090 case PARALLEL:
2091 x = XVECEXP (x, 0, 0);
2092 break;
2093 case SET:
2094 x = XEXP (x, 1);
2095 break;
2098 return x;
2101 /* Print a comment into the asm showing FILENAME, LINENUM, and the
2102 corresponding source line, if available. */
2104 static void
2105 asm_show_source (const char *filename, int linenum)
2107 if (!filename)
2108 return;
2110 char_span line
2111 = global_dc->get_file_cache ().get_source_line (filename, linenum);
2112 if (!line)
2113 return;
2115 fprintf (asm_out_file, "%s %s:%i: ", ASM_COMMENT_START, filename, linenum);
2116 /* "line" is not 0-terminated, so we must use its length. */
2117 fwrite (line.get_buffer (), 1, line.length (), asm_out_file);
2118 fputc ('\n', asm_out_file);
2121 /* Judge if an absolute jump table is relocatable. */
2123 bool
2124 jumptable_relocatable (void)
2126 bool relocatable = false;
2128 if (!CASE_VECTOR_PC_RELATIVE
2129 && !targetm.asm_out.generate_pic_addr_diff_vec ()
2130 && targetm_common.have_named_sections)
2131 relocatable = targetm.asm_out.reloc_rw_mask ();
2133 return relocatable;
2136 /* The final scan for one insn, INSN.
2137 Args are same as in `final', except that INSN
2138 is the insn being scanned.
2139 Value returned is the next insn to be scanned.
2141 NOPEEPHOLES is the flag to disallow peephole processing (currently
2142 used for within delayed branch sequence output).
2144 SEEN is used to track the end of the prologue, for emitting
2145 debug information. We force the emission of a line note after
2146 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG. */
2148 static rtx_insn *
2149 final_scan_insn_1 (rtx_insn *insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
2150 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
2152 rtx_insn *next;
2153 rtx_jump_table_data *table;
2155 insn_counter++;
2157 /* Ignore deleted insns. These can occur when we split insns (due to a
2158 template of "#") while not optimizing. */
2159 if (insn->deleted ())
2160 return NEXT_INSN (insn);
2162 switch (GET_CODE (insn))
2164 case NOTE:
2165 switch (NOTE_KIND (insn))
2167 case NOTE_INSN_DELETED:
2168 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
2169 break;
2171 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
2172 maybe_output_next_view (seen);
2174 output_function_exception_table (0);
2176 if (targetm.asm_out.unwind_emit)
2177 targetm.asm_out.unwind_emit (asm_out_file, insn);
2179 in_cold_section_p = !in_cold_section_p;
2181 gcc_checking_assert (in_cold_section_p);
2182 if (in_cold_section_p)
2183 cold_function_name
2184 = clone_function_name (current_function_decl, "cold");
2186 if (dwarf2out_do_frame ())
2188 dwarf2out_switch_text_section ();
2189 if (!dwarf2_debug_info_emitted_p (current_function_decl)
2190 && !DECL_IGNORED_P (current_function_decl))
2191 debug_hooks->switch_text_section ();
2193 else if (!DECL_IGNORED_P (current_function_decl))
2194 debug_hooks->switch_text_section ();
2195 if (DECL_IGNORED_P (current_function_decl) && last_linenum
2196 && last_filename)
2197 debug_hooks->set_ignored_loc (last_linenum, last_columnnum,
2198 last_filename);
2200 switch_to_section (current_function_section ());
2201 targetm.asm_out.function_switched_text_sections (asm_out_file,
2202 current_function_decl,
2203 in_cold_section_p);
2204 /* Emit a label for the split cold section. Form label name by
2205 suffixing "cold" to the original function's name. */
2206 if (in_cold_section_p)
2208 #ifdef ASM_DECLARE_COLD_FUNCTION_NAME
2209 ASM_DECLARE_COLD_FUNCTION_NAME (asm_out_file,
2210 IDENTIFIER_POINTER
2211 (cold_function_name),
2212 current_function_decl);
2213 #else
2214 ASM_OUTPUT_LABEL (asm_out_file,
2215 IDENTIFIER_POINTER (cold_function_name));
2216 #endif
2217 if (dwarf2out_do_frame ()
2218 && cfun->fde->dw_fde_second_begin != NULL)
2219 ASM_OUTPUT_LABEL (asm_out_file, cfun->fde->dw_fde_second_begin);
2221 break;
2223 case NOTE_INSN_BASIC_BLOCK:
2224 if (need_profile_function)
2226 profile_function (asm_out_file);
2227 need_profile_function = false;
2230 if (targetm.asm_out.unwind_emit)
2231 targetm.asm_out.unwind_emit (asm_out_file, insn);
2233 break;
2235 case NOTE_INSN_EH_REGION_BEG:
2236 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
2237 NOTE_EH_HANDLER (insn));
2238 break;
2240 case NOTE_INSN_EH_REGION_END:
2241 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
2242 NOTE_EH_HANDLER (insn));
2243 break;
2245 case NOTE_INSN_PROLOGUE_END:
2246 targetm.asm_out.function_end_prologue (file);
2247 profile_after_prologue (file);
2249 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2251 *seen |= SEEN_EMITTED;
2252 force_source_line = true;
2254 else
2255 *seen |= SEEN_NOTE;
2257 break;
2259 case NOTE_INSN_EPILOGUE_BEG:
2260 if (!DECL_IGNORED_P (current_function_decl))
2261 (*debug_hooks->begin_epilogue) (last_linenum, last_filename);
2262 targetm.asm_out.function_begin_epilogue (file);
2263 break;
2265 case NOTE_INSN_CFI:
2266 dwarf2out_emit_cfi (NOTE_CFI (insn));
2267 break;
2269 case NOTE_INSN_CFI_LABEL:
2270 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LCFI",
2271 NOTE_LABEL_NUMBER (insn));
2272 break;
2274 case NOTE_INSN_FUNCTION_BEG:
2275 if (need_profile_function)
2277 profile_function (asm_out_file);
2278 need_profile_function = false;
2281 app_disable ();
2282 if (!DECL_IGNORED_P (current_function_decl))
2283 debug_hooks->end_prologue (last_linenum, last_filename);
2285 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2287 *seen |= SEEN_EMITTED;
2288 force_source_line = true;
2290 else
2291 *seen |= SEEN_NOTE;
2293 break;
2295 case NOTE_INSN_BLOCK_BEG:
2296 if (debug_info_level >= DINFO_LEVEL_NORMAL
2297 || dwarf_debuginfo_p ()
2298 || write_symbols == VMS_DEBUG)
2300 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2302 app_disable ();
2303 ++block_depth;
2304 high_block_linenum = last_linenum;
2306 /* Output debugging info about the symbol-block beginning. */
2307 if (!DECL_IGNORED_P (current_function_decl))
2308 debug_hooks->begin_block (last_linenum, n, NOTE_BLOCK (insn));
2310 /* Mark this block as output. */
2311 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
2312 BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn)) = in_cold_section_p;
2314 break;
2316 case NOTE_INSN_BLOCK_END:
2317 maybe_output_next_view (seen);
2319 if (debug_info_level >= DINFO_LEVEL_NORMAL
2320 || dwarf_debuginfo_p ()
2321 || write_symbols == VMS_DEBUG)
2323 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2325 app_disable ();
2327 /* End of a symbol-block. */
2328 --block_depth;
2329 gcc_assert (block_depth >= 0);
2331 if (!DECL_IGNORED_P (current_function_decl))
2332 debug_hooks->end_block (high_block_linenum, n);
2333 gcc_assert (BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn))
2334 == in_cold_section_p);
2336 break;
2338 case NOTE_INSN_DELETED_LABEL:
2339 /* Emit the label. We may have deleted the CODE_LABEL because
2340 the label could be proved to be unreachable, though still
2341 referenced (in the form of having its address taken. */
2342 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
2343 break;
2345 case NOTE_INSN_DELETED_DEBUG_LABEL:
2346 /* Similarly, but need to use different namespace for it. */
2347 if (CODE_LABEL_NUMBER (insn) != -1)
2348 ASM_OUTPUT_DEBUG_LABEL (file, "LDL", CODE_LABEL_NUMBER (insn));
2349 break;
2351 case NOTE_INSN_VAR_LOCATION:
2352 if (!DECL_IGNORED_P (current_function_decl))
2354 debug_hooks->var_location (insn);
2355 set_next_view_needed (seen);
2357 break;
2359 case NOTE_INSN_BEGIN_STMT:
2360 gcc_checking_assert (cfun->debug_nonbind_markers);
2361 if (!DECL_IGNORED_P (current_function_decl)
2362 && notice_source_line (insn, NULL))
2364 output_source_line:
2365 (*debug_hooks->source_line) (last_linenum, last_columnnum,
2366 last_filename, last_discriminator,
2367 true);
2368 clear_next_view_needed (seen);
2370 break;
2372 case NOTE_INSN_INLINE_ENTRY:
2373 gcc_checking_assert (cfun->debug_nonbind_markers);
2374 if (!DECL_IGNORED_P (current_function_decl)
2375 && notice_source_line (insn, NULL))
2377 (*debug_hooks->inline_entry) (LOCATION_BLOCK
2378 (NOTE_MARKER_LOCATION (insn)));
2379 goto output_source_line;
2381 break;
2383 default:
2384 gcc_unreachable ();
2385 break;
2387 break;
2389 case BARRIER:
2390 break;
2392 case CODE_LABEL:
2393 /* The target port might emit labels in the output function for
2394 some insn, e.g. sh.cc output_branchy_insn. */
2395 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2397 align_flags alignment = LABEL_TO_ALIGNMENT (insn);
2398 if (alignment.levels[0].log && NEXT_INSN (insn))
2400 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2401 /* Output both primary and secondary alignment. */
2402 ASM_OUTPUT_MAX_SKIP_ALIGN (file, alignment.levels[0].log,
2403 alignment.levels[0].maxskip);
2404 ASM_OUTPUT_MAX_SKIP_ALIGN (file, alignment.levels[1].log,
2405 alignment.levels[1].maxskip);
2406 #else
2407 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2408 ASM_OUTPUT_ALIGN_WITH_NOP (file, alignment.levels[0].log);
2409 #else
2410 ASM_OUTPUT_ALIGN (file, alignment.levels[0].log);
2411 #endif
2412 #endif
2415 CC_STATUS_INIT;
2417 if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
2418 debug_hooks->label (as_a <rtx_code_label *> (insn));
2420 app_disable ();
2422 /* If this label is followed by a jump-table, make sure we put
2423 the label in the read-only section. Also possibly write the
2424 label and jump table together. */
2425 table = jump_table_for_label (as_a <rtx_code_label *> (insn));
2426 if (table)
2428 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2429 /* In this case, the case vector is being moved by the
2430 target, so don't output the label at all. Leave that
2431 to the back end macros. */
2432 #else
2433 if (! JUMP_TABLES_IN_TEXT_SECTION)
2435 int log_align;
2437 switch_to_section (targetm.asm_out.function_rodata_section
2438 (current_function_decl,
2439 jumptable_relocatable ()));
2441 #ifdef ADDR_VEC_ALIGN
2442 log_align = ADDR_VEC_ALIGN (table);
2443 #else
2444 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2445 #endif
2446 ASM_OUTPUT_ALIGN (file, log_align);
2448 else
2449 switch_to_section (current_function_section ());
2451 #ifdef ASM_OUTPUT_CASE_LABEL
2452 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn), table);
2453 #else
2454 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2455 #endif
2456 #endif
2457 break;
2459 if (LABEL_ALT_ENTRY_P (insn))
2460 output_alternate_entry_point (file, insn);
2461 else
2462 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2463 break;
2465 default:
2467 rtx body = PATTERN (insn);
2468 int insn_code_number;
2469 const char *templ;
2470 bool is_stmt, *is_stmt_p;
2472 if (MAY_HAVE_DEBUG_MARKER_INSNS && cfun->debug_nonbind_markers)
2474 is_stmt = false;
2475 is_stmt_p = NULL;
2477 else
2478 is_stmt_p = &is_stmt;
2480 /* Reset this early so it is correct for ASM statements. */
2481 current_insn_predicate = NULL_RTX;
2483 /* An INSN, JUMP_INSN or CALL_INSN.
2484 First check for special kinds that recog doesn't recognize. */
2486 if (GET_CODE (body) == USE /* These are just declarations. */
2487 || GET_CODE (body) == CLOBBER)
2488 break;
2490 /* Detect insns that are really jump-tables
2491 and output them as such. */
2493 if (JUMP_TABLE_DATA_P (insn))
2495 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2496 int vlen, idx;
2497 #endif
2499 if (! JUMP_TABLES_IN_TEXT_SECTION)
2500 switch_to_section (targetm.asm_out.function_rodata_section
2501 (current_function_decl,
2502 jumptable_relocatable ()));
2503 else
2504 switch_to_section (current_function_section ());
2506 app_disable ();
2508 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2509 if (GET_CODE (body) == ADDR_VEC)
2511 #ifdef ASM_OUTPUT_ADDR_VEC
2512 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2513 #else
2514 gcc_unreachable ();
2515 #endif
2517 else
2519 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2520 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2521 #else
2522 gcc_unreachable ();
2523 #endif
2525 #else
2526 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2527 for (idx = 0; idx < vlen; idx++)
2529 if (GET_CODE (body) == ADDR_VEC)
2531 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2532 ASM_OUTPUT_ADDR_VEC_ELT
2533 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2534 #else
2535 gcc_unreachable ();
2536 #endif
2538 else
2540 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2541 ASM_OUTPUT_ADDR_DIFF_ELT
2542 (file,
2543 body,
2544 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2545 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2546 #else
2547 gcc_unreachable ();
2548 #endif
2551 #ifdef ASM_OUTPUT_CASE_END
2552 ASM_OUTPUT_CASE_END (file,
2553 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2554 insn);
2555 #endif
2556 #endif
2558 switch_to_section (current_function_section ());
2560 if (debug_variable_location_views
2561 && !DECL_IGNORED_P (current_function_decl))
2562 debug_hooks->var_location (insn);
2564 break;
2566 /* Output this line note if it is the first or the last line
2567 note in a row. */
2568 if (!DECL_IGNORED_P (current_function_decl)
2569 && notice_source_line (insn, is_stmt_p))
2571 if (flag_verbose_asm)
2572 asm_show_source (last_filename, last_linenum);
2573 (*debug_hooks->source_line) (last_linenum, last_columnnum,
2574 last_filename, last_discriminator,
2575 is_stmt);
2576 clear_next_view_needed (seen);
2578 else
2579 maybe_output_next_view (seen);
2581 gcc_checking_assert (!DEBUG_INSN_P (insn));
2583 if (GET_CODE (body) == PARALLEL
2584 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2585 body = XVECEXP (body, 0, 0);
2587 if (GET_CODE (body) == ASM_INPUT)
2589 const char *string = XSTR (body, 0);
2591 /* There's no telling what that did to the condition codes. */
2592 CC_STATUS_INIT;
2594 if (string[0])
2596 expanded_location loc;
2598 app_enable ();
2599 loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2600 if (*loc.file && loc.line)
2601 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2602 ASM_COMMENT_START, loc.line, loc.file);
2603 fprintf (asm_out_file, "\t%s\n", string);
2604 #if HAVE_AS_LINE_ZERO
2605 if (*loc.file && loc.line)
2606 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2607 #endif
2609 break;
2612 /* Detect `asm' construct with operands. */
2613 if (asm_noperands (body) >= 0)
2615 unsigned int noperands = asm_noperands (body);
2616 rtx *ops = XALLOCAVEC (rtx, noperands);
2617 const char *string;
2618 location_t loc;
2619 expanded_location expanded;
2621 /* There's no telling what that did to the condition codes. */
2622 CC_STATUS_INIT;
2624 /* Get out the operand values. */
2625 string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2626 /* Inhibit dying on what would otherwise be compiler bugs. */
2627 insn_noperands = noperands;
2628 this_is_asm_operands = insn;
2629 expanded = expand_location (loc);
2631 #ifdef FINAL_PRESCAN_INSN
2632 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2633 #endif
2635 /* Output the insn using them. */
2636 if (string[0])
2638 app_enable ();
2639 if (expanded.file && expanded.line)
2640 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2641 ASM_COMMENT_START, expanded.line, expanded.file);
2642 output_asm_insn (string, ops);
2643 #if HAVE_AS_LINE_ZERO
2644 if (expanded.file && expanded.line)
2645 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2646 #endif
2649 if (targetm.asm_out.final_postscan_insn)
2650 targetm.asm_out.final_postscan_insn (file, insn, ops,
2651 insn_noperands);
2653 this_is_asm_operands = 0;
2654 break;
2657 app_disable ();
2659 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
2661 /* A delayed-branch sequence */
2662 int i;
2664 final_sequence = seq;
2666 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2667 force the restoration of a comparison that was previously
2668 thought unnecessary. If that happens, cancel this sequence
2669 and cause that insn to be restored. */
2671 next = final_scan_insn (seq->insn (0), file, 0, 1, seen);
2672 if (next != seq->insn (1))
2674 final_sequence = 0;
2675 return next;
2678 for (i = 1; i < seq->len (); i++)
2680 rtx_insn *insn = seq->insn (i);
2681 rtx_insn *next = NEXT_INSN (insn);
2682 /* We loop in case any instruction in a delay slot gets
2683 split. */
2685 insn = final_scan_insn (insn, file, 0, 1, seen);
2686 while (insn != next);
2688 #ifdef DBR_OUTPUT_SEQEND
2689 DBR_OUTPUT_SEQEND (file);
2690 #endif
2691 final_sequence = 0;
2693 /* If the insn requiring the delay slot was a CALL_INSN, the
2694 insns in the delay slot are actually executed before the
2695 called function. Hence we don't preserve any CC-setting
2696 actions in these insns and the CC must be marked as being
2697 clobbered by the function. */
2698 if (CALL_P (seq->insn (0)))
2700 CC_STATUS_INIT;
2702 break;
2705 /* We have a real machine instruction as rtl. */
2707 body = PATTERN (insn);
2709 /* Do machine-specific peephole optimizations if desired. */
2711 if (HAVE_peephole && optimize_p && !flag_no_peephole && !nopeepholes)
2713 rtx_insn *next = peephole (insn);
2714 /* When peepholing, if there were notes within the peephole,
2715 emit them before the peephole. */
2716 if (next != 0 && next != NEXT_INSN (insn))
2718 rtx_insn *note, *prev = PREV_INSN (insn);
2720 for (note = NEXT_INSN (insn); note != next;
2721 note = NEXT_INSN (note))
2722 final_scan_insn (note, file, optimize_p, nopeepholes, seen);
2724 /* Put the notes in the proper position for a later
2725 rescan. For example, the SH target can do this
2726 when generating a far jump in a delayed branch
2727 sequence. */
2728 note = NEXT_INSN (insn);
2729 SET_PREV_INSN (note) = prev;
2730 SET_NEXT_INSN (prev) = note;
2731 SET_NEXT_INSN (PREV_INSN (next)) = insn;
2732 SET_PREV_INSN (insn) = PREV_INSN (next);
2733 SET_NEXT_INSN (insn) = next;
2734 SET_PREV_INSN (next) = insn;
2737 /* PEEPHOLE might have changed this. */
2738 body = PATTERN (insn);
2741 /* Try to recognize the instruction.
2742 If successful, verify that the operands satisfy the
2743 constraints for the instruction. Crash if they don't,
2744 since `reload' should have changed them so that they do. */
2746 insn_code_number = recog_memoized (insn);
2747 cleanup_subreg_operands (insn);
2749 /* Dump the insn in the assembly for debugging (-dAP).
2750 If the final dump is requested as slim RTL, dump slim
2751 RTL to the assembly file also. */
2752 if (flag_dump_rtl_in_asm)
2754 print_rtx_head = ASM_COMMENT_START;
2755 if (! (dump_flags & TDF_SLIM))
2756 print_rtl_single (asm_out_file, insn);
2757 else
2758 dump_insn_slim (asm_out_file, insn);
2759 print_rtx_head = "";
2762 if (! constrain_operands_cached (insn, 1))
2763 fatal_insn_not_found (insn);
2765 /* Some target machines need to prescan each insn before
2766 it is output. */
2768 #ifdef FINAL_PRESCAN_INSN
2769 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2770 #endif
2772 if (targetm.have_conditional_execution ()
2773 && GET_CODE (PATTERN (insn)) == COND_EXEC)
2774 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2776 current_output_insn = debug_insn = insn;
2778 /* Find the proper template for this insn. */
2779 templ = get_insn_template (insn_code_number, insn);
2781 /* If the C code returns 0, it means that it is a jump insn
2782 which follows a deleted test insn, and that test insn
2783 needs to be reinserted. */
2784 if (templ == 0)
2786 rtx_insn *prev;
2788 gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2790 /* We have already processed the notes between the setter and
2791 the user. Make sure we don't process them again, this is
2792 particularly important if one of the notes is a block
2793 scope note or an EH note. */
2794 for (prev = insn;
2795 prev != last_ignored_compare;
2796 prev = PREV_INSN (prev))
2798 if (NOTE_P (prev))
2799 delete_insn (prev); /* Use delete_note. */
2802 return prev;
2805 /* If the template is the string "#", it means that this insn must
2806 be split. */
2807 if (templ[0] == '#' && templ[1] == '\0')
2809 rtx_insn *new_rtx = try_split (body, insn, 0);
2811 /* If we didn't split the insn, go away. */
2812 if (new_rtx == insn && PATTERN (new_rtx) == body)
2813 fatal_insn ("could not split insn", insn);
2815 /* If we have a length attribute, this instruction should have
2816 been split in shorten_branches, to ensure that we would have
2817 valid length info for the splitees. */
2818 gcc_assert (!HAVE_ATTR_length);
2820 return new_rtx;
2823 /* ??? This will put the directives in the wrong place if
2824 get_insn_template outputs assembly directly. However calling it
2825 before get_insn_template breaks if the insns is split. */
2826 if (targetm.asm_out.unwind_emit_before_insn
2827 && targetm.asm_out.unwind_emit)
2828 targetm.asm_out.unwind_emit (asm_out_file, insn);
2830 rtx_call_insn *call_insn = dyn_cast <rtx_call_insn *> (insn);
2831 if (call_insn != NULL)
2833 rtx x = call_from_call_insn (call_insn);
2834 x = XEXP (x, 0);
2835 if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2837 tree t;
2838 x = XEXP (x, 0);
2839 t = SYMBOL_REF_DECL (x);
2840 if (t)
2841 assemble_external (t);
2845 /* Output assembler code from the template. */
2846 output_asm_insn (templ, recog_data.operand);
2848 /* Some target machines need to postscan each insn after
2849 it is output. */
2850 if (targetm.asm_out.final_postscan_insn)
2851 targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
2852 recog_data.n_operands);
2854 if (!targetm.asm_out.unwind_emit_before_insn
2855 && targetm.asm_out.unwind_emit)
2856 targetm.asm_out.unwind_emit (asm_out_file, insn);
2858 /* Let the debug info back-end know about this call. We do this only
2859 after the instruction has been emitted because labels that may be
2860 created to reference the call instruction must appear after it. */
2861 if ((debug_variable_location_views || call_insn != NULL)
2862 && !DECL_IGNORED_P (current_function_decl))
2863 debug_hooks->var_location (insn);
2865 current_output_insn = debug_insn = 0;
2868 return NEXT_INSN (insn);
2871 /* This is a wrapper around final_scan_insn_1 that allows ports to
2872 call it recursively without a known value for SEEN. The value is
2873 saved at the outermost call, and recovered for recursive calls.
2874 Recursive calls MUST pass NULL, or the same pointer if they can
2875 otherwise get to it. */
2877 rtx_insn *
2878 final_scan_insn (rtx_insn *insn, FILE *file, int optimize_p,
2879 int nopeepholes, int *seen)
2881 static int *enclosing_seen;
2882 static int recursion_counter;
2884 gcc_assert (seen || recursion_counter);
2885 gcc_assert (!recursion_counter || !seen || seen == enclosing_seen);
2887 if (!recursion_counter++)
2888 enclosing_seen = seen;
2889 else if (!seen)
2890 seen = enclosing_seen;
2892 rtx_insn *ret = final_scan_insn_1 (insn, file, optimize_p, nopeepholes, seen);
2894 if (!--recursion_counter)
2895 enclosing_seen = NULL;
2897 return ret;
2902 /* Map DECLs to instance discriminators. This is allocated and
2903 defined in ada/gcc-interfaces/trans.cc, when compiling with -gnateS.
2904 Mappings from this table are saved and restored for LTO, so
2905 link-time compilation will have this map set, at least in
2906 partitions containing at least one DECL with an associated instance
2907 discriminator. */
2909 decl_to_instance_map_t *decl_to_instance_map;
2911 /* Return the instance number assigned to DECL. */
2913 static inline int
2914 map_decl_to_instance (const_tree decl)
2916 int *inst;
2918 if (!decl_to_instance_map || !decl || !DECL_P (decl))
2919 return 0;
2921 inst = decl_to_instance_map->get (decl);
2923 if (!inst)
2924 return 0;
2926 return *inst;
2929 /* Set DISCRIMINATOR to the appropriate value, possibly derived from LOC. */
2931 static inline int
2932 compute_discriminator (location_t loc)
2934 int discriminator;
2936 if (!decl_to_instance_map)
2937 discriminator = get_discriminator_from_loc (loc);
2938 else
2940 tree block = LOCATION_BLOCK (loc);
2942 while (block && TREE_CODE (block) == BLOCK
2943 && !inlined_function_outer_scope_p (block))
2944 block = BLOCK_SUPERCONTEXT (block);
2946 tree decl;
2948 if (!block)
2949 decl = current_function_decl;
2950 else if (DECL_P (block))
2951 decl = block;
2952 else
2953 decl = block_ultimate_origin (block);
2955 discriminator = map_decl_to_instance (decl);
2958 return discriminator;
2961 /* Return discriminator of the statement that produced this insn. */
2963 insn_discriminator (const rtx_insn *insn)
2965 return compute_discriminator (INSN_LOCATION (insn));
2968 /* Return whether a source line note needs to be emitted before INSN.
2969 Sets IS_STMT to TRUE if the line should be marked as a possible
2970 breakpoint location. */
2972 static bool
2973 notice_source_line (rtx_insn *insn, bool *is_stmt)
2975 const char *filename;
2976 int linenum, columnnum;
2977 int discriminator;
2979 if (NOTE_MARKER_P (insn))
2981 location_t loc = NOTE_MARKER_LOCATION (insn);
2982 expanded_location xloc = expand_location (loc);
2983 if (xloc.line == 0
2984 && (LOCATION_LOCUS (loc) == UNKNOWN_LOCATION
2985 || LOCATION_LOCUS (loc) == BUILTINS_LOCATION))
2986 return false;
2988 filename = xloc.file;
2989 linenum = xloc.line;
2990 columnnum = xloc.column;
2991 discriminator = compute_discriminator (loc);
2992 force_source_line = true;
2994 else if (override_filename)
2996 filename = override_filename;
2997 linenum = override_linenum;
2998 columnnum = override_columnnum;
2999 discriminator = override_discriminator;
3001 else if (INSN_HAS_LOCATION (insn))
3003 expanded_location xloc = insn_location (insn);
3004 filename = xloc.file;
3005 linenum = xloc.line;
3006 columnnum = xloc.column;
3007 discriminator = insn_discriminator (insn);
3009 else
3011 filename = NULL;
3012 linenum = 0;
3013 columnnum = 0;
3014 discriminator = 0;
3017 if (filename == NULL)
3018 return false;
3020 if (force_source_line
3021 || filename != last_filename
3022 || last_linenum != linenum
3023 || (debug_column_info && last_columnnum != columnnum))
3025 force_source_line = false;
3026 last_filename = filename;
3027 last_linenum = linenum;
3028 last_columnnum = columnnum;
3029 last_discriminator = discriminator;
3030 if (is_stmt)
3031 *is_stmt = true;
3032 high_block_linenum = MAX (last_linenum, high_block_linenum);
3033 high_function_linenum = MAX (last_linenum, high_function_linenum);
3034 return true;
3037 if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator)
3039 /* If the discriminator changed, but the line number did not,
3040 output the line table entry with is_stmt false so the
3041 debugger does not treat this as a breakpoint location. */
3042 last_discriminator = discriminator;
3043 if (is_stmt)
3044 *is_stmt = false;
3045 return true;
3048 return false;
3051 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
3052 directly to the desired hard register. */
3054 void
3055 cleanup_subreg_operands (rtx_insn *insn)
3057 int i;
3058 bool changed = false;
3059 extract_insn_cached (insn);
3060 for (i = 0; i < recog_data.n_operands; i++)
3062 /* The following test cannot use recog_data.operand when testing
3063 for a SUBREG: the underlying object might have been changed
3064 already if we are inside a match_operator expression that
3065 matches the else clause. Instead we test the underlying
3066 expression directly. */
3067 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
3069 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i], true);
3070 changed = true;
3072 else if (GET_CODE (recog_data.operand[i]) == PLUS
3073 || GET_CODE (recog_data.operand[i]) == MULT
3074 || MEM_P (recog_data.operand[i]))
3075 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
3078 for (i = 0; i < recog_data.n_dups; i++)
3080 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
3082 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i], true);
3083 changed = true;
3085 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
3086 || GET_CODE (*recog_data.dup_loc[i]) == MULT
3087 || MEM_P (*recog_data.dup_loc[i]))
3088 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
3090 if (changed)
3091 df_insn_rescan (insn);
3094 /* If X is a SUBREG, try to replace it with a REG or a MEM, based on
3095 the thing it is a subreg of. Do it anyway if FINAL_P. */
3098 alter_subreg (rtx *xp, bool final_p)
3100 rtx x = *xp;
3101 rtx y = SUBREG_REG (x);
3103 /* simplify_subreg does not remove subreg from volatile references.
3104 We are required to. */
3105 if (MEM_P (y))
3107 poly_int64 offset = SUBREG_BYTE (x);
3109 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
3110 contains 0 instead of the proper offset. See simplify_subreg. */
3111 if (paradoxical_subreg_p (x))
3112 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3114 if (final_p)
3115 *xp = adjust_address (y, GET_MODE (x), offset);
3116 else
3117 *xp = adjust_address_nv (y, GET_MODE (x), offset);
3119 else if (REG_P (y) && HARD_REGISTER_P (y))
3121 rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
3122 SUBREG_BYTE (x));
3124 if (new_rtx != 0)
3125 *xp = new_rtx;
3126 else if (final_p && REG_P (y))
3128 /* Simplify_subreg can't handle some REG cases, but we have to. */
3129 unsigned int regno;
3130 poly_int64 offset;
3132 regno = subreg_regno (x);
3133 if (subreg_lowpart_p (x))
3134 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3135 else
3136 offset = SUBREG_BYTE (x);
3137 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
3141 return *xp;
3144 /* Do alter_subreg on all the SUBREGs contained in X. */
3146 static rtx
3147 walk_alter_subreg (rtx *xp, bool *changed)
3149 rtx x = *xp;
3150 switch (GET_CODE (x))
3152 case PLUS:
3153 case MULT:
3154 case AND:
3155 case ASHIFT:
3156 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3157 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
3158 break;
3160 case MEM:
3161 case ZERO_EXTEND:
3162 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3163 break;
3165 case SUBREG:
3166 *changed = true;
3167 return alter_subreg (xp, true);
3169 default:
3170 break;
3173 return *xp;
3176 /* Report inconsistency between the assembler template and the operands.
3177 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3179 void
3180 output_operand_lossage (const char *cmsgid, ...)
3182 char *fmt_string;
3183 char *new_message;
3184 const char *pfx_str;
3185 va_list ap;
3187 va_start (ap, cmsgid);
3189 pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3190 fmt_string = xasprintf ("%s%s", pfx_str, _(cmsgid));
3191 new_message = xvasprintf (fmt_string, ap);
3193 if (this_is_asm_operands)
3194 error_for_asm (this_is_asm_operands, "%s", new_message);
3195 else
3196 internal_error ("%s", new_message);
3198 free (fmt_string);
3199 free (new_message);
3200 va_end (ap);
3203 /* Output of assembler code from a template, and its subroutines. */
3205 /* Annotate the assembly with a comment describing the pattern and
3206 alternative used. */
3208 static void
3209 output_asm_name (void)
3211 if (debug_insn)
3213 fprintf (asm_out_file, "\t%s %d\t",
3214 ASM_COMMENT_START, INSN_UID (debug_insn));
3216 fprintf (asm_out_file, "[c=%d",
3217 insn_cost (debug_insn, optimize_insn_for_speed_p ()));
3218 if (HAVE_ATTR_length)
3219 fprintf (asm_out_file, " l=%d",
3220 get_attr_length (debug_insn));
3221 fprintf (asm_out_file, "] ");
3223 int num = INSN_CODE (debug_insn);
3224 fprintf (asm_out_file, "%s", insn_data[num].name);
3225 if (insn_data[num].n_alternatives > 1)
3226 fprintf (asm_out_file, "/%d", which_alternative);
3228 /* Clear this so only the first assembler insn
3229 of any rtl insn will get the special comment for -dp. */
3230 debug_insn = 0;
3234 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3235 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3236 corresponds to the address of the object and 0 if to the object. */
3238 static tree
3239 get_mem_expr_from_op (rtx op, int *paddressp)
3241 tree expr;
3242 int inner_addressp;
3244 *paddressp = 0;
3246 if (REG_P (op))
3247 return REG_EXPR (op);
3248 else if (!MEM_P (op))
3249 return 0;
3251 if (MEM_EXPR (op) != 0)
3252 return MEM_EXPR (op);
3254 /* Otherwise we have an address, so indicate it and look at the address. */
3255 *paddressp = 1;
3256 op = XEXP (op, 0);
3258 /* First check if we have a decl for the address, then look at the right side
3259 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3260 But don't allow the address to itself be indirect. */
3261 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3262 return expr;
3263 else if (GET_CODE (op) == PLUS
3264 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3265 return expr;
3267 while (UNARY_P (op)
3268 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3269 op = XEXP (op, 0);
3271 expr = get_mem_expr_from_op (op, &inner_addressp);
3272 return inner_addressp ? 0 : expr;
3275 /* Output operand names for assembler instructions. OPERANDS is the
3276 operand vector, OPORDER is the order to write the operands, and NOPS
3277 is the number of operands to write. */
3279 static void
3280 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3282 int wrote = 0;
3283 int i;
3285 for (i = 0; i < nops; i++)
3287 int addressp;
3288 rtx op = operands[oporder[i]];
3289 tree expr = get_mem_expr_from_op (op, &addressp);
3291 fprintf (asm_out_file, "%c%s",
3292 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3293 wrote = 1;
3294 if (expr)
3296 fprintf (asm_out_file, "%s",
3297 addressp ? "*" : "");
3298 print_mem_expr (asm_out_file, expr);
3299 wrote = 1;
3301 else if (REG_P (op) && ORIGINAL_REGNO (op)
3302 && ORIGINAL_REGNO (op) != REGNO (op))
3303 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3307 #ifdef ASSEMBLER_DIALECT
3308 /* Helper function to parse assembler dialects in the asm string.
3309 This is called from output_asm_insn and asm_fprintf. */
3310 static const char *
3311 do_assembler_dialects (const char *p, int *dialect)
3313 char c = *(p - 1);
3315 switch (c)
3317 case '{':
3319 int i;
3321 if (*dialect)
3322 output_operand_lossage ("nested assembly dialect alternatives");
3323 else
3324 *dialect = 1;
3326 /* If we want the first dialect, do nothing. Otherwise, skip
3327 DIALECT_NUMBER of strings ending with '|'. */
3328 for (i = 0; i < dialect_number; i++)
3330 while (*p && *p != '}')
3332 if (*p == '|')
3334 p++;
3335 break;
3338 /* Skip over any character after a percent sign. */
3339 if (*p == '%')
3340 p++;
3341 if (*p)
3342 p++;
3345 if (*p == '}')
3346 break;
3349 if (*p == '\0')
3350 output_operand_lossage ("unterminated assembly dialect alternative");
3352 break;
3354 case '|':
3355 if (*dialect)
3357 /* Skip to close brace. */
3360 if (*p == '\0')
3362 output_operand_lossage ("unterminated assembly dialect alternative");
3363 break;
3366 /* Skip over any character after a percent sign. */
3367 if (*p == '%' && p[1])
3369 p += 2;
3370 continue;
3373 if (*p++ == '}')
3374 break;
3376 while (1);
3378 *dialect = 0;
3380 else
3381 putc (c, asm_out_file);
3382 break;
3384 case '}':
3385 if (! *dialect)
3386 putc (c, asm_out_file);
3387 *dialect = 0;
3388 break;
3389 default:
3390 gcc_unreachable ();
3393 return p;
3395 #endif
3397 /* Output text from TEMPLATE to the assembler output file,
3398 obeying %-directions to substitute operands taken from
3399 the vector OPERANDS.
3401 %N (for N a digit) means print operand N in usual manner.
3402 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3403 and print the label name with no punctuation.
3404 %cN means require operand N to be a constant
3405 and print the constant expression with no punctuation.
3406 %aN means expect operand N to be a memory address
3407 (not a memory reference!) and print a reference
3408 to that address.
3409 %nN means expect operand N to be a constant
3410 and print a constant expression for minus the value
3411 of the operand, with no other punctuation. */
3413 void
3414 output_asm_insn (const char *templ, rtx *operands)
3416 const char *p;
3417 int c;
3418 #ifdef ASSEMBLER_DIALECT
3419 int dialect = 0;
3420 #endif
3421 int oporder[MAX_RECOG_OPERANDS];
3422 char opoutput[MAX_RECOG_OPERANDS];
3423 int ops = 0;
3425 /* An insn may return a null string template
3426 in a case where no assembler code is needed. */
3427 if (*templ == 0)
3428 return;
3430 memset (opoutput, 0, sizeof opoutput);
3431 p = templ;
3432 putc ('\t', asm_out_file);
3434 #ifdef ASM_OUTPUT_OPCODE
3435 ASM_OUTPUT_OPCODE (asm_out_file, p);
3436 #endif
3438 while ((c = *p++))
3439 switch (c)
3441 case '\n':
3442 if (flag_verbose_asm)
3443 output_asm_operand_names (operands, oporder, ops);
3444 if (flag_print_asm_name)
3445 output_asm_name ();
3447 ops = 0;
3448 memset (opoutput, 0, sizeof opoutput);
3450 putc (c, asm_out_file);
3451 #ifdef ASM_OUTPUT_OPCODE
3452 while ((c = *p) == '\t')
3454 putc (c, asm_out_file);
3455 p++;
3457 ASM_OUTPUT_OPCODE (asm_out_file, p);
3458 #endif
3459 break;
3461 #ifdef ASSEMBLER_DIALECT
3462 case '{':
3463 case '}':
3464 case '|':
3465 p = do_assembler_dialects (p, &dialect);
3466 break;
3467 #endif
3469 case '%':
3470 /* %% outputs a single %. %{, %} and %| print {, } and | respectively
3471 if ASSEMBLER_DIALECT defined and these characters have a special
3472 meaning as dialect delimiters.*/
3473 if (*p == '%'
3474 #ifdef ASSEMBLER_DIALECT
3475 || *p == '{' || *p == '}' || *p == '|'
3476 #endif
3479 putc (*p, asm_out_file);
3480 p++;
3482 /* %= outputs a number which is unique to each insn in the entire
3483 compilation. This is useful for making local labels that are
3484 referred to more than once in a given insn. */
3485 else if (*p == '=')
3487 p++;
3488 fprintf (asm_out_file, "%d", insn_counter);
3490 /* % followed by a letter and some digits
3491 outputs an operand in a special way depending on the letter.
3492 Letters `acln' are implemented directly.
3493 Other letters are passed to `output_operand' so that
3494 the TARGET_PRINT_OPERAND hook can define them. */
3495 else if (ISALPHA (*p))
3497 int letter = *p++;
3498 unsigned long opnum;
3499 char *endptr;
3500 int letter2 = 0;
3502 if (letter == 'c' && *p == 'c')
3503 letter2 = *p++;
3504 opnum = strtoul (p, &endptr, 10);
3506 if (endptr == p)
3507 output_operand_lossage ("operand number missing "
3508 "after %%-letter");
3509 else if (this_is_asm_operands && opnum >= insn_noperands)
3510 output_operand_lossage ("operand number out of range");
3511 else if (letter == 'l')
3512 output_asm_label (operands[opnum]);
3513 else if (letter == 'a')
3514 output_address (VOIDmode, operands[opnum]);
3515 else if (letter == 'c')
3517 if (letter2 == 'c' || CONSTANT_ADDRESS_P (operands[opnum]))
3518 output_addr_const (asm_out_file, operands[opnum]);
3519 else
3520 output_operand (operands[opnum], 'c');
3522 else if (letter == 'n')
3524 if (CONST_INT_P (operands[opnum]))
3525 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3526 - INTVAL (operands[opnum]));
3527 else
3529 putc ('-', asm_out_file);
3530 output_addr_const (asm_out_file, operands[opnum]);
3533 else
3534 output_operand (operands[opnum], letter);
3536 if (!opoutput[opnum])
3537 oporder[ops++] = opnum;
3538 opoutput[opnum] = 1;
3540 p = endptr;
3541 c = *p;
3543 /* % followed by a digit outputs an operand the default way. */
3544 else if (ISDIGIT (*p))
3546 unsigned long opnum;
3547 char *endptr;
3549 opnum = strtoul (p, &endptr, 10);
3550 if (this_is_asm_operands && opnum >= insn_noperands)
3551 output_operand_lossage ("operand number out of range");
3552 else
3553 output_operand (operands[opnum], 0);
3555 if (!opoutput[opnum])
3556 oporder[ops++] = opnum;
3557 opoutput[opnum] = 1;
3559 p = endptr;
3560 c = *p;
3562 /* % followed by punctuation: output something for that
3563 punctuation character alone, with no operand. The
3564 TARGET_PRINT_OPERAND hook decides what is actually done. */
3565 else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p))
3566 output_operand (NULL_RTX, *p++);
3567 else
3568 output_operand_lossage ("invalid %%-code");
3569 break;
3571 default:
3572 putc (c, asm_out_file);
3575 /* Try to keep the asm a bit more readable. */
3576 if ((flag_verbose_asm || flag_print_asm_name) && strlen (templ) < 9)
3577 putc ('\t', asm_out_file);
3579 /* Write out the variable names for operands, if we know them. */
3580 if (flag_verbose_asm)
3581 output_asm_operand_names (operands, oporder, ops);
3582 if (flag_print_asm_name)
3583 output_asm_name ();
3585 putc ('\n', asm_out_file);
3588 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3590 void
3591 output_asm_label (rtx x)
3593 char buf[256];
3595 if (GET_CODE (x) == LABEL_REF)
3596 x = label_ref_label (x);
3597 if (LABEL_P (x)
3598 || (NOTE_P (x)
3599 && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3600 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3601 else
3602 output_operand_lossage ("'%%l' operand isn't a label");
3604 assemble_name (asm_out_file, buf);
3607 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3609 void
3610 mark_symbol_refs_as_used (rtx x)
3612 subrtx_iterator::array_type array;
3613 FOR_EACH_SUBRTX (iter, array, x, ALL)
3615 const_rtx x = *iter;
3616 if (GET_CODE (x) == SYMBOL_REF)
3617 if (tree t = SYMBOL_REF_DECL (x))
3618 assemble_external (t);
3622 /* Print operand X using machine-dependent assembler syntax.
3623 CODE is a non-digit that preceded the operand-number in the % spec,
3624 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3625 between the % and the digits.
3626 When CODE is a non-letter, X is 0.
3628 The meanings of the letters are machine-dependent and controlled
3629 by TARGET_PRINT_OPERAND. */
3631 void
3632 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3634 if (x && GET_CODE (x) == SUBREG)
3635 x = alter_subreg (&x, true);
3637 /* X must not be a pseudo reg. */
3638 if (!targetm.no_register_allocation)
3639 gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3641 targetm.asm_out.print_operand (asm_out_file, x, code);
3643 if (x == NULL_RTX)
3644 return;
3646 mark_symbol_refs_as_used (x);
3649 /* Print a memory reference operand for address X using
3650 machine-dependent assembler syntax. */
3652 void
3653 output_address (machine_mode mode, rtx x)
3655 bool changed = false;
3656 walk_alter_subreg (&x, &changed);
3657 targetm.asm_out.print_operand_address (asm_out_file, mode, x);
3660 /* Print an integer constant expression in assembler syntax.
3661 Addition and subtraction are the only arithmetic
3662 that may appear in these expressions. */
3664 void
3665 output_addr_const (FILE *file, rtx x)
3667 char buf[256];
3669 restart:
3670 switch (GET_CODE (x))
3672 case PC:
3673 putc ('.', file);
3674 break;
3676 case SYMBOL_REF:
3677 if (SYMBOL_REF_DECL (x))
3678 assemble_external (SYMBOL_REF_DECL (x));
3679 #ifdef ASM_OUTPUT_SYMBOL_REF
3680 ASM_OUTPUT_SYMBOL_REF (file, x);
3681 #else
3682 assemble_name (file, XSTR (x, 0));
3683 #endif
3684 break;
3686 case LABEL_REF:
3687 x = label_ref_label (x);
3688 /* Fall through. */
3689 case CODE_LABEL:
3690 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3691 #ifdef ASM_OUTPUT_LABEL_REF
3692 ASM_OUTPUT_LABEL_REF (file, buf);
3693 #else
3694 assemble_name (file, buf);
3695 #endif
3696 break;
3698 case CONST_INT:
3699 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3700 break;
3702 case CONST:
3703 /* This used to output parentheses around the expression,
3704 but that does not work on the 386 (either ATT or BSD assembler). */
3705 output_addr_const (file, XEXP (x, 0));
3706 break;
3708 case CONST_WIDE_INT:
3709 /* We do not know the mode here so we have to use a round about
3710 way to build a wide-int to get it printed properly. */
3712 wide_int w = wide_int::from_array (&CONST_WIDE_INT_ELT (x, 0),
3713 CONST_WIDE_INT_NUNITS (x),
3714 CONST_WIDE_INT_NUNITS (x)
3715 * HOST_BITS_PER_WIDE_INT,
3716 false);
3717 print_decs (w, file);
3719 break;
3721 case CONST_DOUBLE:
3722 if (CONST_DOUBLE_AS_INT_P (x))
3724 /* We can use %d if the number is one word and positive. */
3725 if (CONST_DOUBLE_HIGH (x))
3726 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3727 (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
3728 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3729 else if (CONST_DOUBLE_LOW (x) < 0)
3730 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3731 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3732 else
3733 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3735 else
3736 /* We can't handle floating point constants;
3737 PRINT_OPERAND must handle them. */
3738 output_operand_lossage ("floating constant misused");
3739 break;
3741 case CONST_FIXED:
3742 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_FIXED_VALUE_LOW (x));
3743 break;
3745 case PLUS:
3746 /* Some assemblers need integer constants to appear last (eg masm). */
3747 if (CONST_INT_P (XEXP (x, 0)))
3749 output_addr_const (file, XEXP (x, 1));
3750 if (INTVAL (XEXP (x, 0)) >= 0)
3751 fprintf (file, "+");
3752 output_addr_const (file, XEXP (x, 0));
3754 else
3756 output_addr_const (file, XEXP (x, 0));
3757 if (!CONST_INT_P (XEXP (x, 1))
3758 || INTVAL (XEXP (x, 1)) >= 0)
3759 fprintf (file, "+");
3760 output_addr_const (file, XEXP (x, 1));
3762 break;
3764 case MINUS:
3765 /* Avoid outputting things like x-x or x+5-x,
3766 since some assemblers can't handle that. */
3767 x = simplify_subtraction (x);
3768 if (GET_CODE (x) != MINUS)
3769 goto restart;
3771 output_addr_const (file, XEXP (x, 0));
3772 fprintf (file, "-");
3773 if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
3774 || GET_CODE (XEXP (x, 1)) == PC
3775 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3776 output_addr_const (file, XEXP (x, 1));
3777 else
3779 fputs (targetm.asm_out.open_paren, file);
3780 output_addr_const (file, XEXP (x, 1));
3781 fputs (targetm.asm_out.close_paren, file);
3783 break;
3785 case ZERO_EXTEND:
3786 case SIGN_EXTEND:
3787 case SUBREG:
3788 case TRUNCATE:
3789 output_addr_const (file, XEXP (x, 0));
3790 break;
3792 default:
3793 if (targetm.asm_out.output_addr_const_extra (file, x))
3794 break;
3796 output_operand_lossage ("invalid expression as operand");
3800 /* Output a quoted string. */
3802 void
3803 output_quoted_string (FILE *asm_file, const char *string)
3805 #ifdef OUTPUT_QUOTED_STRING
3806 OUTPUT_QUOTED_STRING (asm_file, string);
3807 #else
3808 char c;
3810 putc ('\"', asm_file);
3811 while ((c = *string++) != 0)
3813 if (ISPRINT (c))
3815 if (c == '\"' || c == '\\')
3816 putc ('\\', asm_file);
3817 putc (c, asm_file);
3819 else
3820 fprintf (asm_file, "\\%03o", (unsigned char) c);
3822 putc ('\"', asm_file);
3823 #endif
3826 /* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
3828 void
3829 fprint_whex (FILE *f, unsigned HOST_WIDE_INT value)
3831 char buf[2 + CHAR_BIT * sizeof (value) / 4];
3832 if (value == 0)
3833 putc ('0', f);
3834 else
3836 char *p = buf + sizeof (buf);
3838 *--p = "0123456789abcdef"[value % 16];
3839 while ((value /= 16) != 0);
3840 *--p = 'x';
3841 *--p = '0';
3842 fwrite (p, 1, buf + sizeof (buf) - p, f);
3846 /* Internal function that prints an unsigned long in decimal in reverse.
3847 The output string IS NOT null-terminated. */
3849 static int
3850 sprint_ul_rev (char *s, unsigned long value)
3852 int i = 0;
3855 s[i] = "0123456789"[value % 10];
3856 value /= 10;
3857 i++;
3858 /* alternate version, without modulo */
3859 /* oldval = value; */
3860 /* value /= 10; */
3861 /* s[i] = "0123456789" [oldval - 10*value]; */
3862 /* i++ */
3864 while (value != 0);
3865 return i;
3868 /* Write an unsigned long as decimal to a file, fast. */
3870 void
3871 fprint_ul (FILE *f, unsigned long value)
3873 /* python says: len(str(2**64)) == 20 */
3874 char s[20];
3875 int i;
3877 i = sprint_ul_rev (s, value);
3879 /* It's probably too small to bother with string reversal and fputs. */
3882 i--;
3883 putc (s[i], f);
3885 while (i != 0);
3888 /* Write an unsigned long as decimal to a string, fast.
3889 s must be wide enough to not overflow, at least 21 chars.
3890 Returns the length of the string (without terminating '\0'). */
3893 sprint_ul (char *s, unsigned long value)
3895 int len = sprint_ul_rev (s, value);
3896 s[len] = '\0';
3898 std::reverse (s, s + len);
3899 return len;
3902 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3903 %R prints the value of REGISTER_PREFIX.
3904 %L prints the value of LOCAL_LABEL_PREFIX.
3905 %U prints the value of USER_LABEL_PREFIX.
3906 %I prints the value of IMMEDIATE_PREFIX.
3907 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3908 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3910 We handle alternate assembler dialects here, just like output_asm_insn. */
3912 void
3913 asm_fprintf (FILE *file, const char *p, ...)
3915 char buf[10];
3916 char *q, c;
3917 #ifdef ASSEMBLER_DIALECT
3918 int dialect = 0;
3919 #endif
3920 va_list argptr;
3922 va_start (argptr, p);
3924 buf[0] = '%';
3926 while ((c = *p++))
3927 switch (c)
3929 #ifdef ASSEMBLER_DIALECT
3930 case '{':
3931 case '}':
3932 case '|':
3933 p = do_assembler_dialects (p, &dialect);
3934 break;
3935 #endif
3937 case '%':
3938 c = *p++;
3939 q = &buf[1];
3940 while (strchr ("-+ #0", c))
3942 *q++ = c;
3943 c = *p++;
3945 while (ISDIGIT (c) || c == '.')
3947 *q++ = c;
3948 c = *p++;
3950 switch (c)
3952 case '%':
3953 putc ('%', file);
3954 break;
3956 case 'd': case 'i': case 'u':
3957 case 'x': case 'X': case 'o':
3958 case 'c':
3959 *q++ = c;
3960 *q = 0;
3961 fprintf (file, buf, va_arg (argptr, int));
3962 break;
3964 case 'w':
3965 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3966 'o' cases, but we do not check for those cases. It
3967 means that the value is a HOST_WIDE_INT, which may be
3968 either `long' or `long long'. */
3969 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
3970 q += strlen (HOST_WIDE_INT_PRINT);
3971 *q++ = *p++;
3972 *q = 0;
3973 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
3974 break;
3976 case 'l':
3977 *q++ = c;
3978 #ifdef HAVE_LONG_LONG
3979 if (*p == 'l')
3981 *q++ = *p++;
3982 *q++ = *p++;
3983 *q = 0;
3984 fprintf (file, buf, va_arg (argptr, long long));
3986 else
3987 #endif
3989 *q++ = *p++;
3990 *q = 0;
3991 fprintf (file, buf, va_arg (argptr, long));
3994 break;
3996 case 's':
3997 *q++ = c;
3998 *q = 0;
3999 fprintf (file, buf, va_arg (argptr, char *));
4000 break;
4002 case 'O':
4003 #ifdef ASM_OUTPUT_OPCODE
4004 ASM_OUTPUT_OPCODE (asm_out_file, p);
4005 #endif
4006 break;
4008 case 'R':
4009 #ifdef REGISTER_PREFIX
4010 fprintf (file, "%s", REGISTER_PREFIX);
4011 #endif
4012 break;
4014 case 'I':
4015 #ifdef IMMEDIATE_PREFIX
4016 fprintf (file, "%s", IMMEDIATE_PREFIX);
4017 #endif
4018 break;
4020 case 'L':
4021 #ifdef LOCAL_LABEL_PREFIX
4022 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
4023 #endif
4024 break;
4026 case 'U':
4027 fputs (user_label_prefix, file);
4028 break;
4030 #ifdef ASM_FPRINTF_EXTENSIONS
4031 /* Uppercase letters are reserved for general use by asm_fprintf
4032 and so are not available to target specific code. In order to
4033 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4034 they are defined here. As they get turned into real extensions
4035 to asm_fprintf they should be removed from this list. */
4036 case 'A': case 'B': case 'C': case 'D': case 'E':
4037 case 'F': case 'G': case 'H': case 'J': case 'K':
4038 case 'M': case 'N': case 'P': case 'Q': case 'S':
4039 case 'T': case 'V': case 'W': case 'Y': case 'Z':
4040 break;
4042 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
4043 #endif
4044 default:
4045 gcc_unreachable ();
4047 break;
4049 default:
4050 putc (c, file);
4052 va_end (argptr);
4055 /* Return true if this function has no function calls. */
4057 bool
4058 leaf_function_p (void)
4060 rtx_insn *insn;
4062 /* Ensure we walk the entire function body. */
4063 gcc_assert (!in_sequence_p ());
4065 /* Some back-ends (e.g. s390) want leaf functions to stay leaf
4066 functions even if they call mcount. */
4067 if (crtl->profile && !targetm.keep_leaf_when_profiled ())
4068 return false;
4070 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4072 if (CALL_P (insn)
4073 && ! SIBLING_CALL_P (insn)
4074 && ! FAKE_CALL_P (insn))
4075 return false;
4076 if (NONJUMP_INSN_P (insn)
4077 && GET_CODE (PATTERN (insn)) == SEQUENCE
4078 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4079 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4080 return false;
4083 return true;
4086 /* Return true if branch is a forward branch.
4087 Uses insn_shuid array, so it works only in the final pass. May be used by
4088 output templates to customary add branch prediction hints.
4090 bool
4091 final_forward_branch_p (rtx_insn *insn)
4093 int insn_id, label_id;
4095 gcc_assert (uid_shuid);
4096 insn_id = INSN_SHUID (insn);
4097 label_id = INSN_SHUID (JUMP_LABEL (insn));
4098 /* We've hit some insns that does not have id information available. */
4099 gcc_assert (insn_id && label_id);
4100 return insn_id < label_id;
4103 /* On some machines, a function with no call insns
4104 can run faster if it doesn't create its own register window.
4105 When output, the leaf function should use only the "output"
4106 registers. Ordinarily, the function would be compiled to use
4107 the "input" registers to find its arguments; it is a candidate
4108 for leaf treatment if it uses only the "input" registers.
4109 Leaf function treatment means renumbering so the function
4110 uses the "output" registers instead. */
4112 #ifdef LEAF_REGISTERS
4114 /* Return bool if this function uses only the registers that can be
4115 safely renumbered. */
4117 bool
4118 only_leaf_regs_used (void)
4120 int i;
4121 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4123 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4124 if ((df_regs_ever_live_p (i) || global_regs[i])
4125 && ! permitted_reg_in_leaf_functions[i])
4126 return false;
4128 if (crtl->uses_pic_offset_table
4129 && pic_offset_table_rtx != 0
4130 && REG_P (pic_offset_table_rtx)
4131 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4132 return false;
4134 return true;
4137 /* Scan all instructions and renumber all registers into those
4138 available in leaf functions. */
4140 static void
4141 leaf_renumber_regs (rtx_insn *first)
4143 rtx_insn *insn;
4145 /* Renumber only the actual patterns.
4146 The reg-notes can contain frame pointer refs,
4147 and renumbering them could crash, and should not be needed. */
4148 for (insn = first; insn; insn = NEXT_INSN (insn))
4149 if (INSN_P (insn))
4150 leaf_renumber_regs_insn (PATTERN (insn));
4153 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4154 available in leaf functions. */
4156 void
4157 leaf_renumber_regs_insn (rtx in_rtx)
4159 int i, j;
4160 const char *format_ptr;
4162 if (in_rtx == 0)
4163 return;
4165 /* Renumber all input-registers into output-registers.
4166 renumbered_regs would be 1 for an output-register;
4167 they */
4169 if (REG_P (in_rtx))
4171 int newreg;
4173 /* Don't renumber the same reg twice. */
4174 if (in_rtx->used)
4175 return;
4177 newreg = REGNO (in_rtx);
4178 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4179 to reach here as part of a REG_NOTE. */
4180 if (newreg >= FIRST_PSEUDO_REGISTER)
4182 in_rtx->used = 1;
4183 return;
4185 newreg = LEAF_REG_REMAP (newreg);
4186 gcc_assert (newreg >= 0);
4187 df_set_regs_ever_live (REGNO (in_rtx), false);
4188 df_set_regs_ever_live (newreg, true);
4189 SET_REGNO (in_rtx, newreg);
4190 in_rtx->used = 1;
4191 return;
4194 if (INSN_P (in_rtx))
4196 /* Inside a SEQUENCE, we find insns.
4197 Renumber just the patterns of these insns,
4198 just as we do for the top-level insns. */
4199 leaf_renumber_regs_insn (PATTERN (in_rtx));
4200 return;
4203 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4205 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4206 switch (*format_ptr++)
4208 case 'e':
4209 leaf_renumber_regs_insn (XEXP (in_rtx, i));
4210 break;
4212 case 'E':
4213 if (XVEC (in_rtx, i) != NULL)
4214 for (j = 0; j < XVECLEN (in_rtx, i); j++)
4215 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4216 break;
4218 case 'S':
4219 case 's':
4220 case '0':
4221 case 'i':
4222 case 'L':
4223 case 'w':
4224 case 'p':
4225 case 'n':
4226 case 'u':
4227 break;
4229 default:
4230 gcc_unreachable ();
4233 #endif
4235 /* Turn the RTL into assembly. */
4236 static unsigned int
4237 rest_of_handle_final (void)
4239 const char *fnname = get_fnname_from_decl (current_function_decl);
4241 /* Turn debug markers into notes if the var-tracking pass has not
4242 been invoked. */
4243 if (!flag_var_tracking && MAY_HAVE_DEBUG_MARKER_INSNS)
4244 delete_vta_debug_insns (false);
4246 assemble_start_function (current_function_decl, fnname);
4247 rtx_insn *first = get_insns ();
4248 int seen = 0;
4249 final_start_function_1 (&first, asm_out_file, &seen, optimize);
4250 final_1 (first, asm_out_file, seen, optimize);
4251 if (flag_ipa_ra
4252 && !lookup_attribute ("noipa", DECL_ATTRIBUTES (current_function_decl))
4253 /* Functions with naked attributes are supported only with basic asm
4254 statements in the body, thus for supported use cases the information
4255 on clobbered registers is not available. */
4256 && !lookup_attribute ("naked", DECL_ATTRIBUTES (current_function_decl)))
4257 collect_fn_hard_reg_usage ();
4258 final_end_function ();
4260 /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4261 directive that closes the procedure descriptor. Similarly, for x64 SEH.
4262 Otherwise it's not strictly necessary, but it doesn't hurt either. */
4263 output_function_exception_table (crtl->has_bb_partition ? 1 : 0);
4265 assemble_end_function (current_function_decl, fnname);
4267 /* Free up reg info memory. */
4268 free_reg_info ();
4270 if (! quiet_flag)
4271 fflush (asm_out_file);
4273 /* Note that for those inline functions where we don't initially
4274 know for certain that we will be generating an out-of-line copy,
4275 the first invocation of this routine (rest_of_compilation) will
4276 skip over this code by doing a `goto exit_rest_of_compilation;'.
4277 Later on, wrapup_global_declarations will (indirectly) call
4278 rest_of_compilation again for those inline functions that need
4279 to have out-of-line copies generated. During that call, we
4280 *will* be routed past here. */
4282 timevar_push (TV_SYMOUT);
4283 if (!DECL_IGNORED_P (current_function_decl))
4284 debug_hooks->function_decl (current_function_decl);
4285 timevar_pop (TV_SYMOUT);
4287 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4288 DECL_INITIAL (current_function_decl) = error_mark_node;
4290 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4291 && targetm.have_ctors_dtors)
4292 targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4293 decl_init_priority_lookup
4294 (current_function_decl));
4295 if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4296 && targetm.have_ctors_dtors)
4297 targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4298 decl_fini_priority_lookup
4299 (current_function_decl));
4300 return 0;
4303 namespace {
4305 const pass_data pass_data_final =
4307 RTL_PASS, /* type */
4308 "final", /* name */
4309 OPTGROUP_NONE, /* optinfo_flags */
4310 TV_FINAL, /* tv_id */
4311 0, /* properties_required */
4312 0, /* properties_provided */
4313 0, /* properties_destroyed */
4314 0, /* todo_flags_start */
4315 0, /* todo_flags_finish */
4318 class pass_final : public rtl_opt_pass
4320 public:
4321 pass_final (gcc::context *ctxt)
4322 : rtl_opt_pass (pass_data_final, ctxt)
4325 /* opt_pass methods: */
4326 unsigned int execute (function *) final override
4328 return rest_of_handle_final ();
4331 }; // class pass_final
4333 } // anon namespace
4335 rtl_opt_pass *
4336 make_pass_final (gcc::context *ctxt)
4338 return new pass_final (ctxt);
4342 static unsigned int
4343 rest_of_handle_shorten_branches (void)
4345 /* Shorten branches. */
4346 shorten_branches (get_insns ());
4347 return 0;
4350 namespace {
4352 const pass_data pass_data_shorten_branches =
4354 RTL_PASS, /* type */
4355 "shorten", /* name */
4356 OPTGROUP_NONE, /* optinfo_flags */
4357 TV_SHORTEN_BRANCH, /* tv_id */
4358 0, /* properties_required */
4359 0, /* properties_provided */
4360 0, /* properties_destroyed */
4361 0, /* todo_flags_start */
4362 0, /* todo_flags_finish */
4365 class pass_shorten_branches : public rtl_opt_pass
4367 public:
4368 pass_shorten_branches (gcc::context *ctxt)
4369 : rtl_opt_pass (pass_data_shorten_branches, ctxt)
4372 /* opt_pass methods: */
4373 unsigned int execute (function *) final override
4375 return rest_of_handle_shorten_branches ();
4378 }; // class pass_shorten_branches
4380 } // anon namespace
4382 rtl_opt_pass *
4383 make_pass_shorten_branches (gcc::context *ctxt)
4385 return new pass_shorten_branches (ctxt);
4389 static unsigned int
4390 rest_of_clean_state (void)
4392 rtx_insn *insn, *next;
4393 FILE *final_output = NULL;
4394 int save_unnumbered = flag_dump_unnumbered;
4395 int save_noaddr = flag_dump_noaddr;
4397 if (flag_dump_final_insns)
4399 final_output = fopen (flag_dump_final_insns, "a");
4400 if (!final_output)
4402 error ("could not open final insn dump file %qs: %m",
4403 flag_dump_final_insns);
4404 flag_dump_final_insns = NULL;
4406 else
4408 flag_dump_noaddr = flag_dump_unnumbered = 1;
4409 if (flag_compare_debug_opt || flag_compare_debug)
4410 dump_flags |= TDF_NOUID | TDF_COMPARE_DEBUG;
4411 dump_function_header (final_output, current_function_decl,
4412 dump_flags);
4413 final_insns_dump_p = true;
4415 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4416 if (LABEL_P (insn))
4417 INSN_UID (insn) = CODE_LABEL_NUMBER (insn);
4418 else
4420 if (NOTE_P (insn))
4421 set_block_for_insn (insn, NULL);
4422 INSN_UID (insn) = 0;
4427 /* It is very important to decompose the RTL instruction chain here:
4428 debug information keeps pointing into CODE_LABEL insns inside the function
4429 body. If these remain pointing to the other insns, we end up preserving
4430 whole RTL chain and attached detailed debug info in memory. */
4431 for (insn = get_insns (); insn; insn = next)
4433 next = NEXT_INSN (insn);
4434 SET_NEXT_INSN (insn) = NULL;
4435 SET_PREV_INSN (insn) = NULL;
4437 rtx_insn *call_insn = insn;
4438 if (NONJUMP_INSN_P (call_insn)
4439 && GET_CODE (PATTERN (call_insn)) == SEQUENCE)
4441 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (call_insn));
4442 call_insn = seq->insn (0);
4444 if (CALL_P (call_insn))
4446 rtx note
4447 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
4448 if (note)
4449 remove_note (call_insn, note);
4452 if (final_output
4453 && (!NOTE_P (insn)
4454 || (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION
4455 && NOTE_KIND (insn) != NOTE_INSN_BEGIN_STMT
4456 && NOTE_KIND (insn) != NOTE_INSN_INLINE_ENTRY
4457 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG
4458 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END
4459 && NOTE_KIND (insn) != NOTE_INSN_DELETED_DEBUG_LABEL)))
4460 print_rtl_single (final_output, insn);
4463 if (final_output)
4465 flag_dump_noaddr = save_noaddr;
4466 flag_dump_unnumbered = save_unnumbered;
4467 final_insns_dump_p = false;
4469 if (fclose (final_output))
4471 error ("could not close final insn dump file %qs: %m",
4472 flag_dump_final_insns);
4473 flag_dump_final_insns = NULL;
4477 flag_rerun_cse_after_global_opts = 0;
4478 reload_completed = 0;
4479 epilogue_completed = 0;
4480 #ifdef STACK_REGS
4481 regstack_completed = 0;
4482 #endif
4484 /* Clear out the insn_length contents now that they are no
4485 longer valid. */
4486 init_insn_lengths ();
4488 /* Show no temporary slots allocated. */
4489 init_temp_slots ();
4491 free_bb_for_insn ();
4493 if (cfun->gimple_df)
4494 delete_tree_ssa (cfun);
4496 /* We can reduce stack alignment on call site only when we are sure that
4497 the function body just produced will be actually used in the final
4498 executable. */
4499 if (flag_ipa_stack_alignment
4500 && decl_binds_to_current_def_p (current_function_decl))
4502 unsigned int pref = crtl->preferred_stack_boundary;
4503 if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4504 pref = crtl->stack_alignment_needed;
4505 cgraph_node::rtl_info (current_function_decl)
4506 ->preferred_incoming_stack_boundary = pref;
4509 /* Make sure volatile mem refs aren't considered valid operands for
4510 arithmetic insns. We must call this here if this is a nested inline
4511 function, since the above code leaves us in the init_recog state,
4512 and the function context push/pop code does not save/restore volatile_ok.
4514 ??? Maybe it isn't necessary for expand_start_function to call this
4515 anymore if we do it here? */
4517 init_recog_no_volatile ();
4519 /* We're done with this function. Free up memory if we can. */
4520 free_after_parsing (cfun);
4521 free_after_compilation (cfun);
4522 return 0;
4525 namespace {
4527 const pass_data pass_data_clean_state =
4529 RTL_PASS, /* type */
4530 "*clean_state", /* name */
4531 OPTGROUP_NONE, /* optinfo_flags */
4532 TV_FINAL, /* tv_id */
4533 0, /* properties_required */
4534 0, /* properties_provided */
4535 PROP_rtl, /* properties_destroyed */
4536 0, /* todo_flags_start */
4537 0, /* todo_flags_finish */
4540 class pass_clean_state : public rtl_opt_pass
4542 public:
4543 pass_clean_state (gcc::context *ctxt)
4544 : rtl_opt_pass (pass_data_clean_state, ctxt)
4547 /* opt_pass methods: */
4548 unsigned int execute (function *) final override
4550 return rest_of_clean_state ();
4553 }; // class pass_clean_state
4555 } // anon namespace
4557 rtl_opt_pass *
4558 make_pass_clean_state (gcc::context *ctxt)
4560 return new pass_clean_state (ctxt);
4563 /* Return true if INSN is a call to the current function. */
4565 static bool
4566 self_recursive_call_p (rtx_insn *insn)
4568 tree fndecl = get_call_fndecl (insn);
4569 return (fndecl == current_function_decl
4570 && decl_binds_to_current_def_p (fndecl));
4573 /* Collect hard register usage for the current function. */
4575 static void
4576 collect_fn_hard_reg_usage (void)
4578 rtx_insn *insn;
4579 #ifdef STACK_REGS
4580 int i;
4581 #endif
4582 struct cgraph_rtl_info *node;
4583 HARD_REG_SET function_used_regs;
4585 /* ??? To be removed when all the ports have been fixed. */
4586 if (!targetm.call_fusage_contains_non_callee_clobbers)
4587 return;
4589 /* Be conservative - mark fixed and global registers as used. */
4590 function_used_regs = fixed_reg_set;
4592 #ifdef STACK_REGS
4593 /* Handle STACK_REGS conservatively, since the df-framework does not
4594 provide accurate information for them. */
4596 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
4597 SET_HARD_REG_BIT (function_used_regs, i);
4598 #endif
4600 for (insn = get_insns (); insn != NULL_RTX; insn = next_insn (insn))
4602 HARD_REG_SET insn_used_regs;
4604 if (!NONDEBUG_INSN_P (insn))
4605 continue;
4607 if (CALL_P (insn)
4608 && !self_recursive_call_p (insn))
4609 function_used_regs
4610 |= insn_callee_abi (insn).full_and_partial_reg_clobbers ();
4612 find_all_hard_reg_sets (insn, &insn_used_regs, false);
4613 function_used_regs |= insn_used_regs;
4615 if (hard_reg_set_subset_p (crtl->abi->full_and_partial_reg_clobbers (),
4616 function_used_regs))
4617 return;
4620 /* Mask out fully-saved registers, so that they don't affect equality
4621 comparisons between function_abis. */
4622 function_used_regs &= crtl->abi->full_and_partial_reg_clobbers ();
4624 node = cgraph_node::rtl_info (current_function_decl);
4625 gcc_assert (node != NULL);
4627 node->function_used_regs = function_used_regs;