1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 1, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause.
35 `expand_function_start' is called at the beginning of a function,
36 before the function body is parsed, and `expand_function_end' is
37 called after parsing the body.
39 Call `assign_stack_local' to allocate a stack slot for a local variable.
40 This is usually done during the RTL generation for the function body,
41 but it can also be done in the reload pass when a pseudo-register does
42 not get a hard register.
44 Call `put_var_into_stack' when you learn, belatedly, that a variable
45 previously given a pseudo-register must in fact go in the stack.
46 This function changes the DECL_RTL to be a stack slot instead of a reg
47 then scans all the RTL instructions so far generated to correct them. */
56 #include "insn-flags.h"
57 #include "insn-config.h"
58 #include "insn-codes.h"
61 #include "hard-reg-set.h"
65 #define obstack_chunk_alloc xmalloc
66 #define obstack_chunk_free free
67 struct obstack stmt_obstack
;
69 extern int xmalloc ();
72 #define MAX(x,y) (((x) > (y)) ? (x) : (y))
73 #define MIN(x,y) (((x) < (y)) ? (x) : (y))
75 /* Nonzero if function being compiled pops its args on return.
76 May affect compilation of return insn or of function epilogue. */
78 int current_function_pops_args
;
80 /* Nonzero if function being compiled needs to be given an address
81 where the value should be stored. */
83 int current_function_returns_struct
;
85 /* Nonzero if function being compiled needs to
86 return the address of where it has put a structure value. */
88 int current_function_returns_pcc_struct
;
90 /* Nonzero if function being compiled needs to be passed a static chain. */
92 int current_function_needs_context
;
94 /* Nonzero if function being compiled can call setjmp. */
96 int current_function_calls_setjmp
;
98 /* Nonzero if function being compiled can call alloca,
99 either as a subroutine or builtin. */
101 int current_function_calls_alloca
;
103 /* Nonzero if the current function returns a pointer type */
105 int current_function_returns_pointer
;
107 /* If function's args have a fixed size, this is that size, in bytes.
109 May affect compilation of return insn or of function epilogue. */
111 int current_function_args_size
;
113 /* # bytes the prologue should push and pretend that the caller pushed them.
114 The prologue must do this, but only if parms can be passed in registers. */
116 int current_function_pretend_args_size
;
118 /* Name of function now being compiled. */
120 char *current_function_name
;
122 /* Label that will go on parm cleanup code, if any.
123 Jumping to this label runs cleanup code for parameters, if
124 such code must be run. Following this code is the logical return label. */
128 /* Label that will go on function epilogue.
129 Jumping to this label serves as a "return" instruction
130 on machines which require execution of the epilogue on all returns. */
134 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
135 So we can mark them all live at the end of the function, if nonopt. */
138 /* List (chain of EXPR_LISTs) of all stack slots in this function.
139 Made for the sake of unshare_all_rtl. */
142 /* Filename and line number of last line-number note,
143 whether we actually emitted it or not. */
147 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
148 static rtx parm_birth_insn
;
150 /* The FUNCTION_DECL node for the function being compiled. */
152 static tree this_function
;
154 /* Offset to end of allocated area of stack frame.
155 If stack grows down, this is the address of the last stack slot allocated.
156 If stack grows up, this is the address for the next slot. */
157 static int frame_offset
;
159 /* Nonzero if a stack slot has been generated whose address is not
160 actually valid. It means that the generated rtl must all be scanned
161 to detect and correct the invalid addresses where they occur. */
162 static int invalid_stack_slot
;
164 /* Label to jump back to for tail recursion, or 0 if we have
165 not yet needed one for this function. */
166 static rtx tail_recursion_label
;
168 /* Place after which to insert the tail_recursion_label if we need one. */
169 static rtx tail_recursion_reentry
;
171 /* Each time we expand an expression-statement,
172 record the expr's type and its RTL value here. */
174 static tree last_expr_type
;
175 static rtx last_expr_value
;
177 /* Number of binding contours started so far in this function. */
179 int block_start_count
;
181 /* Chain of all RTL_EXPRs that have insns in them. */
182 static tree rtl_expr_chain
;
184 /* Last insn of those whose job was to put parms into their nominal homes. */
185 static rtx last_parm_insn
;
187 /* Cleanup lists are required for binding levels regardless of whether
188 that binding level has cleanups or not. This node satisfies that
189 need for all binding levels. */
190 static tree empty_cleanup_list
;
192 /* Functions and data structures for expanding case statements. */
194 /* Case label structure, used to hold info on labels within case
195 statements. We handle "range" labels; for a single-value label
196 as in C, the high and low limits are the same. */
200 struct case_node
*left
;
201 struct case_node
*right
;
202 struct case_node
*parent
;
209 typedef struct case_node case_node
;
210 typedef struct case_node
*case_node_ptr
;
212 extern void balance_case_nodes ();
213 extern void emit_case_nodes ();
214 extern void group_case_nodes ();
215 extern void emit_jump_if_reachable ();
217 /* Stack of control and binding constructs we are currently inside.
219 These constructs begin when you call `expand_start_WHATEVER'
220 and end when you call `expand_end_WHATEVER'. This stack records
221 info about how the construct began that tells the end-function
222 what to do. It also may provide information about the construct
223 to alter the behavior of other constructs within the body.
224 For example, they may affect the behavior of C `break' and `continue'.
226 Each construct gets one `struct nesting' object.
227 All of these objects are chained through the `all' field.
228 `nesting_stack' points to the first object (innermost construct).
229 The position of an entry on `nesting_stack' is in its `depth' field.
231 Each type of construct has its own individual stack.
232 For example, loops have `loop_stack'. Each object points to the
233 next object of the same type through the `next' field.
235 Some constructs are visible to `break' exit-statements and others
236 are not. Which constructs are visible depends on the language.
237 Therefore, the data structure allows each construct to be visible
238 or not, according to the args given when the construct is started.
239 The construct is visible if the `exit_label' field is non-null.
240 In that case, the value should be a CODE_LABEL rtx. */
245 struct nesting
*next
;
250 /* For conds (if-then and if-then-else statements). */
253 /* Label on the else-part, if any, else 0. */
255 /* Label at the end of the whole construct. */
261 /* Label at the top of the loop; place to loop back to. */
263 /* Label at the end of the whole construct. */
265 /* Label for `continue' statement to jump to;
266 this is in front of the stepper of the loop. */
269 /* For variable binding contours. */
272 /* Sequence number of this binding contour within the function,
273 in order of entry. */
274 int block_start_count
;
275 /* Nonzero => value to restore stack to on exit. */
277 /* The NOTE that starts this contour.
278 Used by expand_goto to check whether the destination
279 is within each contour or not. */
281 /* Innermost containing binding contour that has a stack level. */
282 struct nesting
*innermost_stack_block
;
283 /* List of cleanups to be run on exit from this contour.
284 This is a list of expressions to be evaluated.
285 The TREE_PURPOSE of each link is the ..._DECL node
286 which the cleanup pertains to. */
288 /* List of cleanup-lists of blocks containing this block,
289 as they were at the locus where this block appears.
290 There is an element for each containing block,
291 ordered innermost containing block first, once there are
293 The element's TREE_VALUE is the cleanup-list of that block,
294 which may be null. */
296 /* Chain of labels defined inside this binding contour.
297 For contours that have stack levels or cleanups. */
298 struct label_chain
*label_chain
;
300 /* For switch (C) or case (Pascal) statements,
301 and also for dummies (see `expand_start_case_dummy'). */
304 /* The insn after which the case dispatch should finally
305 be emitted. Zero for a dummy. */
307 /* A list of case labels, kept in ascending order by value
308 as the list is built.
309 During expand_end_case, this list may be rearranged into a
310 nearly balanced binary tree. */
311 struct case_node
*case_list
;
312 /* Label to jump to if no case matches. */
314 /* The expression to be dispatched on. */
316 /* Type that INDEX_EXPR should be converted to. */
318 /* Number of range exprs in case statement. */
321 /* For exception contours. */
324 /* List of exceptions raised. This is a TREE_LIST
325 of whatever you want. */
327 /* List of exceptions caught. This is also a TREE_LIST
328 of whatever you want. As a special case, it has the
329 value `void_type_node' if it handles default exceptions. */
332 /* First insn of TRY block, in case resumptive model is needed. */
334 /* Label for the catch clauses. */
336 /* Label for unhandled exceptions. */
338 /* Label at the end of whole construct. */
340 /* Label which "escapes" the exception construct.
341 Like EXIT_LABEL for BREAK construct, but for exceptions. */
347 /* Chain of all pending binding contours. */
348 struct nesting
*block_stack
;
350 /* Chain of all pending binding contours that restore stack levels
352 struct nesting
*stack_block_stack
;
354 /* Chain of all pending conditional statements. */
355 struct nesting
*cond_stack
;
357 /* Chain of all pending loops. */
358 struct nesting
*loop_stack
;
360 /* Chain of all pending case or switch statements. */
361 struct nesting
*case_stack
;
363 /* Chain of all pending exception contours. */
364 struct nesting
*except_stack
;
366 /* Separate chain including all of the above,
367 chained through the `all' field. */
368 struct nesting
*nesting_stack
;
370 /* Number of entries on nesting_stack now. */
373 /* Pop one of the sub-stacks, such as `loop_stack' or `cond_stack';
374 and pop off `nesting_stack' down to the same level. */
376 #define POPSTACK(STACK) \
377 do { int initial_depth = nesting_stack->depth; \
378 do { struct nesting *this = STACK; \
379 STACK = this->next; \
380 nesting_stack = this->all; \
381 nesting_depth = this->depth; \
382 obstack_free (&stmt_obstack, this); } \
383 while (nesting_depth > initial_depth); } while (0)
385 static int warn_if_unused_value ();
386 static void expand_goto_internal ();
387 static int expand_fixup ();
388 static void fixup_gotos ();
389 static void expand_cleanups ();
390 static void fixup_cleanups ();
391 static void expand_null_return_1 ();
392 static int tail_recursion_args ();
393 static void fixup_stack_slots ();
394 static rtx
fixup_stack_1 ();
395 static rtx
fixup_memory_subreg ();
396 static rtx
walk_fixup_memory_subreg ();
397 static void fixup_var_refs ();
398 static void fixup_var_refs_insns ();
399 static rtx
fixup_var_refs_1 ();
400 static rtx
parm_stack_loc ();
401 static void optimize_bit_field ();
402 static void do_jump_if_equal ();
404 /* Emit a no-op instruction. */
409 rtx last_insn
= get_last_insn ();
411 && (GET_CODE (last_insn
) == CODE_LABEL
412 || prev_real_insn (last_insn
) == 0))
413 emit_insn (gen_nop ());
416 /* Return the rtx-label that corresponds to a LABEL_DECL,
417 creating it if necessary. */
419 rtx
/* @@ non-static for case.c. */
423 if (TREE_CODE (label
) != LABEL_DECL
)
426 if (DECL_RTL (label
))
427 return DECL_RTL (label
);
429 return DECL_RTL (label
) = gen_label_rtx ();
432 /* Add an unconditional jump to LABEL as the next sequential instruction. */
438 do_pending_stack_adjust ();
439 emit_jump_insn (gen_jump (label
));
443 /* Handle goto statements and the labels that they can go to. */
445 /* In some cases it is impossible to generate code for a forward goto
446 until the label definition is seen. This happens when it may be necessary
447 for the goto to reset the stack pointer: we don't yet know how to do that.
448 So expand_goto puts an entry on this fixup list.
449 Each time a binding contour that resets the stack is exited,
451 If the target label has now been defined, we can insert the proper code. */
455 /* Points to following fixup. */
456 struct goto_fixup
*next
;
457 /* Points to the insn before the jump insn.
458 If more code must be inserted, it goes after this insn. */
460 /* The LABEL_DECL that this jump is jumping to, or 0
461 for break, continue or return. */
463 /* The CODE_LABEL rtx that this is jumping to. */
465 /* Number of binding contours started in current function
466 before the label reference. */
467 int block_start_count
;
468 /* The outermost stack level that should be restored for this jump.
469 Each time a binding contour that resets the stack is exited,
470 if the target label is *not* yet defined, this slot is updated. */
472 /* List of lists of cleanup expressions to be run by this goto.
473 There is one element for each block that this goto is within,
474 once there are any cleanups at all.
475 The TREE_VALUE contains the cleanup list of that block as of the
476 time this goto was seen.
477 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
478 tree cleanup_list_list
;
481 static struct goto_fixup
*goto_fixup_chain
;
483 /* Within any binding contour that must restore a stack level,
484 all labels are recorded with a chain of these structures. */
488 /* Points to following fixup. */
489 struct label_chain
*next
;
493 /* Specify the location in the RTL code of a label BODY,
494 which is a LABEL_DECL tree node.
496 This is used for the kind of label that the user can jump to with a
497 goto statement, and for alternatives of a switch or case statement.
498 RTL labels generated for loops and conditionals don't go through here;
499 they are generated directly at the RTL level, by other functions below.
501 Note that this has nothing to do with defining label *names*.
502 Languages vary in how they do that and what that even means. */
508 struct label_chain
*p
;
510 do_pending_stack_adjust ();
511 emit_label (label_rtx (body
));
513 if (stack_block_stack
!= 0)
515 p
= (struct label_chain
*) oballoc (sizeof (struct label_chain
));
516 p
->next
= stack_block_stack
->data
.block
.label_chain
;
517 stack_block_stack
->data
.block
.label_chain
= p
;
522 /* Generate RTL code for a `goto' statement with target label BODY.
523 BODY should be a LABEL_DECL tree node that was or will later be
524 defined with `expand_label'. */
530 expand_goto_internal (body
, label_rtx (body
), 0);
533 /* Generate RTL code for a `goto' statement with target label BODY.
534 LABEL should be a LABEL_REF.
535 LAST_INSN, if non-0, is the rtx we should consider as the last
536 insn emitted (for the purposes of cleaning up a return). */
539 expand_goto_internal (body
, label
, last_insn
)
544 struct nesting
*block
;
547 if (GET_CODE (label
) != CODE_LABEL
)
550 /* If label has already been defined, we can tell now
551 whether and how we must alter the stack level. */
553 if (PREV_INSN (label
) != 0)
555 /* Find the innermost pending block that contains the label.
556 (Check containment by comparing insn-uids.)
557 Then restore the outermost stack level within that block,
558 and do cleanups of all blocks contained in it. */
559 for (block
= block_stack
; block
; block
= block
->next
)
561 if (INSN_UID (block
->data
.block
.first_insn
) < INSN_UID (label
))
563 if (block
->data
.block
.stack_level
!= 0)
564 stack_level
= block
->data
.block
.stack_level
;
565 /* Execute the cleanups for blocks we are exiting. */
566 if (block
->data
.block
.cleanups
!= 0)
568 expand_cleanups (block
->data
.block
.cleanups
, 0);
569 do_pending_stack_adjust ();
574 emit_move_insn (stack_pointer_rtx
, stack_level
);
576 if (body
!= 0 && TREE_PACKED (body
))
577 error ("jump to `%s' invalidly jumps into binding contour",
578 IDENTIFIER_POINTER (DECL_NAME (body
)));
580 /* Label not yet defined: may need to put this goto
581 on the fixup list. */
582 else if (! expand_fixup (body
, label
, last_insn
))
584 /* No fixup needed. Record that the label is the target
585 of at least one goto that has no fixup. */
587 TREE_ADDRESSABLE (body
) = 1;
593 /* Return truth-value of whether there are any cleanups from
594 the current binding contour to the end of the current function's
597 any_pending_cleanups ()
599 struct nesting
*block
;
601 if (block_stack
->data
.block
.cleanups
== 0
602 || block_stack
->data
.block
.outer_cleanups
== empty_cleanup_list
)
605 for (block
= block_stack
->next
; block
; block
= block
->next
)
606 if (block
->data
.block
.cleanups
!= 0)
612 /* Generate if necessary a fixup for a goto
613 whose target label in tree structure (if any) is TREE_LABEL
614 and whose target in rtl is RTL_LABEL.
616 If LAST_INSN is nonzero, we pretend that the jump appears
617 after insn LAST_INSN instead of at the current point in the insn stream.
619 The fixup will be used later to insert insns at this point
620 to restore the stack level as appropriate for the target label.
622 Value is nonzero if a fixup is made. */
625 expand_fixup (tree_label
, rtl_label
, last_insn
)
630 struct nesting
*block
, *end_block
;
632 /* See if we can recognize which block the label will be output in.
633 This is possible in some very common cases.
634 If we succeed, set END_BLOCK to that block.
635 Otherwise, set it to 0. */
638 && (rtl_label
== cond_stack
->data
.cond
.else_label
639 || rtl_label
== cond_stack
->data
.cond
.after_label
))
640 end_block
= cond_stack
;
641 /* If we are in a loop, recognize certain labels which
642 are likely targets. This reduces the number of fixups
643 we need to create. */
645 && (rtl_label
== loop_stack
->data
.loop
.start_label
646 || rtl_label
== loop_stack
->data
.loop
.end_label
647 || rtl_label
== loop_stack
->data
.loop
.continue_label
))
648 end_block
= loop_stack
;
652 /* Now set END_BLOCK to the binding level to which we will return. */
656 struct nesting
*next_block
= end_block
->all
;
659 /* First see if the END_BLOCK is inside the innermost binding level.
660 If so, then no cleanups or stack levels are relevant. */
661 while (next_block
&& next_block
!= block
)
662 next_block
= next_block
->all
;
667 /* Otherwise, set END_BLOCK to the innermost binding level
668 which is outside the relevant control-structure nesting. */
669 next_block
= block_stack
->next
;
670 for (block
= block_stack
; block
!= end_block
; block
= block
->all
)
671 if (block
== next_block
)
672 next_block
= next_block
->next
;
673 end_block
= next_block
;
676 /* Does any containing block have a stack level or cleanups?
677 If not, no fixup is needed, and that is the normal case
678 (the only case, for standard C). */
679 for (block
= block_stack
; block
!= end_block
; block
= block
->next
)
680 if (block
->data
.block
.stack_level
!= 0
681 || block
->data
.block
.cleanups
!= 0)
684 if (block
!= end_block
)
686 /* Ok, a fixup is needed. Add a fixup to the list of such. */
687 struct goto_fixup
*fixup
688 = (struct goto_fixup
*) oballoc (sizeof (struct goto_fixup
));
689 /* In case an old stack level is restored, make sure that comes
690 after any pending stack adjust. */
691 do_pending_stack_adjust ();
692 fixup
->before_jump
= last_insn
? last_insn
: get_last_insn ();
693 fixup
->target
= tree_label
;
694 fixup
->target_rtl
= rtl_label
;
695 fixup
->block_start_count
= block_start_count
;
696 fixup
->stack_level
= 0;
697 fixup
->cleanup_list_list
698 = (((block
->data
.block
.outer_cleanups
699 && block
->data
.block
.outer_cleanups
!= empty_cleanup_list
)
700 || block
->data
.block
.cleanups
)
701 ? tree_cons (0, block
->data
.block
.cleanups
,
702 block
->data
.block
.outer_cleanups
)
704 fixup
->next
= goto_fixup_chain
;
705 goto_fixup_chain
= fixup
;
711 /* When exiting a binding contour, process all pending gotos requiring fixups.
712 THISBLOCK is the structure that describes the block being exited.
713 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
714 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
715 FIRST_INSN is the insn that began this contour.
717 Gotos that jump out of this contour must restore the
718 stack level and do the cleanups before actually jumping.
720 DONT_JUMP_IN nonzero means report error there is a jump into this
721 contour from before the beginning of the contour.
722 This is also done if STACK_LEVEL is nonzero. */
725 fixup_gotos (thisblock
, stack_level
, cleanup_list
, first_insn
, dont_jump_in
)
726 struct nesting
*thisblock
;
732 register struct goto_fixup
*f
, *prev
;
734 /* F is the fixup we are considering; PREV is the previous one. */
736 for (prev
= 0, f
= goto_fixup_chain
; f
; prev
= f
, f
= f
->next
)
738 /* Test for a fixup that is inactive because it is already handled. */
739 if (f
->before_jump
== 0)
741 /* Delete inactive fixup from the chain, if that is easy to do. */
743 prev
->next
= f
->next
;
745 /* Has this fixup's target label been defined?
746 If so, we can finalize it. */
747 else if (PREV_INSN (f
->target_rtl
) != 0)
749 /* If this fixup jumped into this contour from before the beginning
750 of this contour, report an error. */
751 /* ??? Bug: this does not detect jumping in through intermediate
752 blocks that have stack levels or cleanups.
753 It detects only a problem with the innermost block
756 && (dont_jump_in
|| stack_level
|| cleanup_list
)
757 && INSN_UID (first_insn
) > INSN_UID (f
->before_jump
)
758 && ! TREE_ADDRESSABLE (f
->target
))
760 error_with_decl (f
->target
,
761 "label `%s' used before containing binding contour");
762 /* Prevent multiple errors for one label. */
763 TREE_ADDRESSABLE (f
->target
) = 1;
766 /* Execute cleanups for blocks this jump exits. */
767 if (f
->cleanup_list_list
)
770 for (lists
= f
->cleanup_list_list
; lists
; lists
= TREE_CHAIN (lists
))
771 /* Marked elements correspond to blocks that have been closed.
772 Do their cleanups. */
773 if (TREE_ADDRESSABLE (lists
)
774 && TREE_VALUE (lists
) != 0)
775 fixup_cleanups (TREE_VALUE (lists
), &f
->before_jump
);
778 /* Restore stack level for the biggest contour that this
779 jump jumps out of. */
781 emit_insn_after (gen_move_insn (stack_pointer_rtx
, f
->stack_level
),
785 /* Label has still not appeared. If we are exiting a block with
786 a stack level to restore, that started before the fixup,
787 mark this stack level as needing restoration
788 when the fixup is later finalized.
789 Also mark the cleanup_list_list element for F
790 that corresponds to this block, so that ultimately
791 this block's cleanups will be executed by the code above. */
792 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
793 it means the label is undefined. That's erroneous, but possible. */
794 else if (thisblock
!= 0
795 && (thisblock
->data
.block
.block_start_count
796 < f
->block_start_count
))
798 tree lists
= f
->cleanup_list_list
;
799 for (; lists
; lists
= TREE_CHAIN (lists
))
800 /* If the following elt. corresponds to our containing block
801 then the elt. must be for this block. */
802 if (TREE_CHAIN (lists
) == thisblock
->data
.block
.outer_cleanups
)
803 TREE_ADDRESSABLE (lists
) = 1;
806 f
->stack_level
= stack_level
;
811 /* Generate RTL for an asm statement (explicit assembler code).
812 BODY is a STRING_CST node containing the assembler code text. */
818 emit_insn (gen_rtx (ASM_INPUT
, VOIDmode
,
819 TREE_STRING_POINTER (body
)));
823 /* Generate RTL for an asm statement with arguments.
824 STRING is the instruction template.
825 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
826 Each output or input has an expression in the TREE_VALUE and
827 a constraint-string in the TREE_PURPOSE.
828 CLOBBERS is a list of STRING_CST nodes each naming a hard register
829 that is clobbered by this insn.
831 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
832 Some elements of OUTPUTS may be replaced with trees representing temporary
833 values. The caller should copy those temporary values to the originally
836 VOL nonzero means the insn is volatile; don't optimize it. */
839 expand_asm_operands (string
, outputs
, inputs
, clobbers
, vol
, filename
, line
)
840 tree string
, outputs
, inputs
, clobbers
;
845 rtvec argvec
, constraints
;
847 int ninputs
= list_length (inputs
);
848 int noutputs
= list_length (outputs
);
849 int nclobbers
= list_length (clobbers
);
852 /* Vector of RTX's of evaluated output operands. */
853 rtx
*output_rtx
= (rtx
*) alloca (noutputs
* sizeof (rtx
));
854 /* The insn we have emitted. */
859 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
861 tree val
= TREE_VALUE (tail
);
865 /* If there's an erroneous arg, emit no insn. */
866 if (TREE_TYPE (val
) == error_mark_node
)
869 /* Make sure constraint has `=' and does not have `+'. */
872 for (j
= 0; j
< TREE_STRING_LENGTH (TREE_PURPOSE (tail
)); j
++)
874 if (TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] == '+')
876 error ("output operand constraint contains `+'");
879 if (TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] == '=')
884 error ("output operand constraint lacks `='");
888 /* If an output operand is not a variable or indirect ref,
889 create a SAVE_EXPR which is a pseudo-reg
890 to act as an intermediate temporary.
891 Make the asm insn write into that, then copy it to
892 the real output operand. */
894 if (TREE_CODE (val
) != VAR_DECL
895 && TREE_CODE (val
) != PARM_DECL
896 && TREE_CODE (val
) != INDIRECT_REF
)
898 rtx reg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (val
)));
899 /* `build' isn't safe; it really expects args to be trees. */
900 tree t
= build_nt (SAVE_EXPR
, val
, reg
);
902 save_expr_regs
= gen_rtx (EXPR_LIST
, VOIDmode
, reg
, save_expr_regs
);
903 TREE_VALUE (tail
) = t
;
904 TREE_TYPE (t
) = TREE_TYPE (val
);
906 output_rtx
[i
] = expand_expr (TREE_VALUE (tail
), 0, VOIDmode
, 0);
909 if (ninputs
+ noutputs
> MAX_RECOG_OPERANDS
)
911 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS
);
915 /* Make vectors for the expression-rtx and constraint strings. */
917 argvec
= rtvec_alloc (ninputs
);
918 constraints
= rtvec_alloc (ninputs
);
920 body
= gen_rtx (ASM_OPERANDS
, VOIDmode
,
921 TREE_STRING_POINTER (string
), "", 0, argvec
, constraints
,
923 MEM_VOLATILE_P (body
) = vol
;
925 /* Eval the inputs and put them into ARGVEC.
926 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
929 for (tail
= inputs
; tail
; tail
= TREE_CHAIN (tail
))
933 /* If there's an erroneous arg, emit no insn,
934 because the ASM_INPUT would get VOIDmode
935 and that could cause a crash in reload. */
936 if (TREE_TYPE (TREE_VALUE (tail
)) == error_mark_node
)
938 if (TREE_PURPOSE (tail
) == NULL_TREE
)
940 error ("hard register `%s' listed as input operand to `asm'",
941 TREE_STRING_POINTER (TREE_VALUE (tail
)) );
945 /* Make sure constraint has neither `=' nor `+'. */
947 for (j
= 0; j
< TREE_STRING_LENGTH (TREE_PURPOSE (tail
)); j
++)
948 if (TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] == '='
949 || TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] == '+')
951 error ("input operand constraint contains `%c'",
952 TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
]);
956 XVECEXP (body
, 3, i
) /* argvec */
957 = expand_expr (TREE_VALUE (tail
), 0, VOIDmode
, 0);
958 XVECEXP (body
, 4, i
) /* constraints */
959 = gen_rtx (ASM_INPUT
, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail
))),
960 TREE_STRING_POINTER (TREE_PURPOSE (tail
)));
964 /* Protect all the operands from the queue,
965 now that they have all been evaluated. */
967 for (i
= 0; i
< ninputs
; i
++)
968 XVECEXP (body
, 3, i
) = protect_from_queue (XVECEXP (body
, 3, i
), 0);
970 for (i
= 0; i
< noutputs
; i
++)
971 output_rtx
[i
] = protect_from_queue (output_rtx
[i
], 1);
973 /* Now, for each output, construct an rtx
974 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
976 If there is more than one, put them inside a PARALLEL. */
978 if (noutputs
== 1 && nclobbers
== 0)
980 XSTR (body
, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs
));
981 insn
= emit_insn (gen_rtx (SET
, VOIDmode
, output_rtx
[0], body
));
983 else if (noutputs
== 0 && nclobbers
== 0)
985 /* No output operands: put in a raw ASM_OPERANDS rtx. */
986 insn
= emit_insn (body
);
992 if (num
== 0) num
= 1;
993 body
= gen_rtx (PARALLEL
, VOIDmode
, rtvec_alloc (num
+ nclobbers
));
995 /* For each output operand, store a SET. */
997 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1000 = gen_rtx (SET
, VOIDmode
,
1002 gen_rtx (ASM_OPERANDS
, VOIDmode
,
1003 TREE_STRING_POINTER (string
),
1004 TREE_STRING_POINTER (TREE_PURPOSE (tail
)),
1005 i
, argvec
, constraints
,
1007 MEM_VOLATILE_P (SET_SRC (XVECEXP (body
, 0, i
))) = vol
;
1010 /* If there are no outputs (but there are some clobbers)
1011 store the bare ASM_OPERANDS into the PARALLEL. */
1014 XVECEXP (body
, 0, i
++) = obody
;
1016 /* Store (clobber REG) for each clobbered register specified. */
1018 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1021 char *regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
1022 extern char *reg_names
[];
1024 for (j
= 0; j
< FIRST_PSEUDO_REGISTER
; j
++)
1025 if (!strcmp (regname
, reg_names
[j
]))
1028 if (j
== FIRST_PSEUDO_REGISTER
)
1030 error ("unknown register name `%s' in `asm'", regname
);
1034 /* Use QImode since that's guaranteed to clobber just one reg. */
1035 XVECEXP (body
, 0, i
)
1036 = gen_rtx (CLOBBER
, VOIDmode
, gen_rtx (REG
, QImode
, j
));
1039 insn
= emit_insn (body
);
1045 /* Nonzero if within a ({...}) grouping, in which case we must
1046 always compute a value for each expr-stmt in case it is the last one. */
1048 int expr_stmts_for_value
;
1050 /* Generate RTL to evaluate the expression EXP
1051 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1054 expand_expr_stmt (exp
)
1057 /* If -W, warn about statements with no side effects,
1058 except for statements explicitly casted to void (e.g. for assert()), and
1059 except inside a ({...}) where they may be useful. */
1060 if (expr_stmts_for_value
== 0 && exp
!= error_mark_node
)
1062 if (! TREE_VOLATILE (exp
)
1063 && !(TREE_CODE (exp
) == CONVERT_EXPR
1064 && TREE_TYPE (exp
) == void_type_node
)
1065 && (extra_warnings
|| warn_unused
))
1066 warning_with_file_and_line (emit_filename
, emit_lineno
,
1067 "statement with no effect");
1068 else if (warn_unused
)
1069 warn_if_unused_value (exp
);
1071 last_expr_type
= TREE_TYPE (exp
);
1072 if (! flag_syntax_only
)
1073 last_expr_value
= expand_expr (exp
, expr_stmts_for_value
? 0 : const0_rtx
,
1078 /* Warn if EXP contains any computations whose results are not used.
1079 Return 1 if a warning is printed; 0 otherwise. */
1082 warn_if_unused_value (exp
)
1085 switch (TREE_CODE (exp
))
1087 case PREINCREMENT_EXPR
:
1088 case POSTINCREMENT_EXPR
:
1089 case PREDECREMENT_EXPR
:
1090 case POSTDECREMENT_EXPR
:
1095 case METHOD_CALL_EXPR
:
1098 case ANTI_WRAPPER_EXPR
:
1099 case WITH_CLEANUP_EXPR
:
1100 /* We don't warn about COND_EXPR because it may be a useful
1101 construct if either arm contains a side effect. */
1105 /* These kinds of exprs are really stmts. What to do? */
1111 case TRUTH_ORIF_EXPR
:
1112 case TRUTH_ANDIF_EXPR
:
1113 /* In && or ||, warn if 2nd operand has no side effect. */
1114 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1117 if (warn_if_unused_value (TREE_OPERAND (exp
, 0)))
1119 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1123 /* Don't warn about values cast to void. */
1124 if (TREE_TYPE (exp
) == void_type_node
)
1126 /* Assignment to a cast results in a cast of a modify.
1127 Don't complain about that. */
1128 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == MODIFY_EXPR
)
1132 warning_with_file_and_line (emit_filename
, emit_lineno
,
1133 "value computed is not used");
1138 /* Clear out the memory of the last expression evaluated. */
1146 /* Begin a statement which will return a value.
1147 Return the RTL_EXPR for this statement expr.
1148 The caller must save that value and pass it to expand_end_stmt_expr. */
1151 expand_start_stmt_expr ()
1153 rtx save
= start_sequence ();
1154 /* Make the RTL_EXPR node temporary, not momentary,
1155 so that rtl_expr_chain doesn't become garbage. */
1156 int momentary
= suspend_momentary ();
1157 tree t
= make_node (RTL_EXPR
);
1158 resume_momentary (momentary
);
1159 RTL_EXPR_RTL (t
) = save
;
1161 expr_stmts_for_value
++;
1165 /* Restore the previous state at the end of a statement that returns a value.
1166 Returns a tree node representing the statement's value and the
1167 insns to compute the value.
1169 The nodes of that expression have been freed by now, so we cannot use them.
1170 But we don't want to do that anyway; the expression has already been
1171 evaluated and now we just want to use the value. So generate a RTL_EXPR
1172 with the proper type and RTL value.
1174 If the last substatement was not an expression,
1175 return something with type `void'. */
1178 expand_end_stmt_expr (t
)
1181 rtx saved
= RTL_EXPR_RTL (t
);
1185 if (last_expr_type
== 0)
1187 last_expr_type
= void_type_node
;
1188 last_expr_value
= const0_rtx
;
1190 TREE_TYPE (t
) = last_expr_type
;
1191 RTL_EXPR_RTL (t
) = last_expr_value
;
1192 RTL_EXPR_SEQUENCE (t
) = get_insns ();
1194 rtl_expr_chain
= tree_cons (NULL_TREE
, t
, rtl_expr_chain
);
1196 end_sequence (saved
);
1198 /* Don't consider deleting this expr or containing exprs at tree level. */
1199 TREE_VOLATILE (t
) = 1;
1200 /* Propagate volatility of the actual RTL expr. */
1201 TREE_THIS_VOLATILE (t
) = volatile_refs_p (last_expr_value
);
1204 expr_stmts_for_value
--;
1210 in_try_block (level
)
1213 struct nesting
*n
= except_stack
;
1216 while (n
&& n
->data
.except_stmt
.after_label
!= 0)
1228 in_except_block (level
)
1231 struct nesting
*n
= except_stack
;
1234 while (n
&& n
->data
.except_stmt
.after_label
== 0)
1246 in_exception_handler (level
)
1249 struct nesting
*n
= except_stack
;
1250 while (n
&& level
--)
1261 if (except_stack
== 0)
1263 raises_ptr
= &except_stack
->data
.except_stmt
.raised
;
1264 if (! value_member (ex
, *raises_ptr
))
1265 *raises_ptr
= tree_cons (NULL_TREE
, ex
, *raises_ptr
);
1269 /* Generate RTL for the start of a try block.
1271 TRY_CLAUSE is the condition to test to enter the try block. */
1273 expand_start_try (try_clause
, exitflag
, escapeflag
)
1278 struct nesting
*thishandler
1279 = (struct nesting
*) obstack_alloc (&stmt_obstack
, sizeof (struct nesting
));
1281 /* Make an entry on cond_stack for the cond we are entering. */
1283 thishandler
->next
= except_stack
;
1284 thishandler
->all
= nesting_stack
;
1285 thishandler
->depth
= ++nesting_depth
;
1286 thishandler
->data
.except_stmt
.raised
= 0;
1287 thishandler
->data
.except_stmt
.handled
= 0;
1288 thishandler
->data
.except_stmt
.first_insn
= get_insns ();
1289 thishandler
->data
.except_stmt
.except_label
= gen_label_rtx ();
1290 thishandler
->data
.except_stmt
.unhandled_label
= 0;
1291 thishandler
->data
.except_stmt
.after_label
= 0;
1292 thishandler
->data
.except_stmt
.escape_label
1293 = escapeflag
? thishandler
->data
.except_stmt
.except_label
: 0;
1294 thishandler
->exit_label
= exitflag
? gen_label_rtx () : 0;
1295 except_stack
= thishandler
;
1296 nesting_stack
= thishandler
;
1298 do_jump (try_clause
, thishandler
->data
.except_stmt
.except_label
, NULL
);
1301 /* End of a TRY block. Nothing to do for now. */
1305 except_stack
->data
.except_stmt
.after_label
= gen_label_rtx ();
1306 expand_goto_internal (NULL
, except_stack
->data
.except_stmt
.after_label
, 0);
1310 expand_start_except (exitflag
, escapeflag
)
1317 /* An `exit' from catch clauses goes out to next exit level,
1318 if there is one. Otherwise, it just goes to the end
1319 of the construct. */
1320 for (n
= except_stack
->next
; n
; n
= n
->next
)
1321 if (n
->exit_label
!= 0)
1323 except_stack
->exit_label
= n
->exit_label
;
1327 except_stack
->exit_label
= except_stack
->data
.except_stmt
.after_label
;
1332 /* An `escape' from catch clauses goes out to next escape level,
1333 if there is one. Otherwise, it just goes to the end
1334 of the construct. */
1335 for (n
= except_stack
->next
; n
; n
= n
->next
)
1336 if (n
->data
.except_stmt
.escape_label
!= 0)
1338 except_stack
->data
.except_stmt
.escape_label
1339 = n
->data
.except_stmt
.escape_label
;
1343 except_stack
->data
.except_stmt
.escape_label
1344 = except_stack
->data
.except_stmt
.after_label
;
1346 do_pending_stack_adjust ();
1347 emit_label (except_stack
->data
.except_stmt
.except_label
);
1351 expand_escape_except ()
1355 for (n
= except_stack
; n
; n
= n
->next
)
1356 if (n
->data
.except_stmt
.escape_label
!= 0)
1358 expand_goto_internal (0, n
->data
.except_stmt
.escape_label
, 0);
1366 expand_end_except ()
1369 tree raised
= NULL_TREE
;
1371 do_pending_stack_adjust ();
1372 emit_label (except_stack
->data
.except_stmt
.after_label
);
1374 n
= except_stack
->next
;
1377 /* Propagate exceptions raised but not handled to next
1379 tree handled
= except_stack
->data
.except_stmt
.raised
;
1380 if (handled
!= void_type_node
)
1382 tree prev
= NULL_TREE
;
1383 raised
= except_stack
->data
.except_stmt
.raised
;
1387 for (this_raise
= raised
, prev
= 0; this_raise
;
1388 this_raise
= TREE_CHAIN (this_raise
))
1390 if (value_member (TREE_VALUE (this_raise
), handled
))
1393 TREE_CHAIN (prev
) = TREE_CHAIN (this_raise
);
1396 raised
= TREE_CHAIN (raised
);
1397 if (raised
== NULL_TREE
)
1404 handled
= TREE_CHAIN (handled
);
1406 if (prev
== NULL_TREE
)
1409 TREE_CHAIN (prev
) = n
->data
.except_stmt
.raised
;
1411 n
->data
.except_stmt
.raised
= raised
;
1415 POPSTACK (except_stack
);
1426 if (except_stack
== 0)
1428 raises_ptr
= &except_stack
->data
.except_stmt
.handled
;
1429 if (*raises_ptr
!= void_type_node
1431 && ! value_member (ex
, *raises_ptr
))
1432 *raises_ptr
= tree_cons (NULL_TREE
, ex
, *raises_ptr
);
1437 expand_catch_default ()
1439 if (except_stack
== 0)
1441 except_stack
->data
.except_stmt
.handled
= void_type_node
;
1447 if (except_stack
== 0 || except_stack
->data
.except_stmt
.after_label
== 0)
1449 expand_goto_internal (0, except_stack
->data
.except_stmt
.after_label
, 0);
1453 /* Generate RTL for the start of an if-then. COND is the expression
1454 whose truth should be tested.
1456 If EXITFLAG is nonzero, this conditional is visible to
1457 `exit_something'. */
1460 expand_start_cond (cond
, exitflag
)
1464 struct nesting
*thiscond
1465 = (struct nesting
*) obstack_alloc (&stmt_obstack
, sizeof (struct nesting
));
1467 /* Make an entry on cond_stack for the cond we are entering. */
1469 thiscond
->next
= cond_stack
;
1470 thiscond
->all
= nesting_stack
;
1471 thiscond
->depth
= ++nesting_depth
;
1472 thiscond
->data
.cond
.after_label
= 0;
1473 thiscond
->data
.cond
.else_label
= gen_label_rtx ();
1474 thiscond
->exit_label
= exitflag
? thiscond
->data
.cond
.else_label
: 0;
1475 cond_stack
= thiscond
;
1476 nesting_stack
= thiscond
;
1478 do_jump (cond
, thiscond
->data
.cond
.else_label
, NULL
);
1481 /* Generate RTL for the end of an if-then with no else-clause.
1482 Pop the record for it off of cond_stack. */
1487 struct nesting
*thiscond
= cond_stack
;
1489 do_pending_stack_adjust ();
1490 emit_label (thiscond
->data
.cond
.else_label
);
1492 POPSTACK (cond_stack
);
1496 /* Generate RTL between the then-clause and the else-clause
1497 of an if-then-else. */
1500 expand_start_else ()
1502 cond_stack
->data
.cond
.after_label
= gen_label_rtx ();
1503 if (cond_stack
->exit_label
!= 0)
1504 cond_stack
->exit_label
= cond_stack
->data
.cond
.after_label
;
1505 emit_jump (cond_stack
->data
.cond
.after_label
);
1506 if (cond_stack
->data
.cond
.else_label
)
1507 emit_label (cond_stack
->data
.cond
.else_label
);
1510 /* Generate RTL for the end of an if-then-else.
1511 Pop the record for it off of cond_stack. */
1516 struct nesting
*thiscond
= cond_stack
;
1518 do_pending_stack_adjust ();
1519 /* Note: a syntax error can cause this to be called
1520 without first calling `expand_start_else'. */
1521 if (thiscond
->data
.cond
.after_label
)
1522 emit_label (thiscond
->data
.cond
.after_label
);
1524 POPSTACK (cond_stack
);
1528 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1529 loop should be exited by `exit_something'. This is a loop for which
1530 `expand_continue' will jump to the top of the loop.
1532 Make an entry on loop_stack to record the labels associated with
1536 expand_start_loop (exit_flag
)
1539 register struct nesting
*thisloop
1540 = (struct nesting
*) obstack_alloc (&stmt_obstack
, sizeof (struct nesting
));
1542 /* Make an entry on loop_stack for the loop we are entering. */
1544 thisloop
->next
= loop_stack
;
1545 thisloop
->all
= nesting_stack
;
1546 thisloop
->depth
= ++nesting_depth
;
1547 thisloop
->data
.loop
.start_label
= gen_label_rtx ();
1548 thisloop
->data
.loop
.end_label
= gen_label_rtx ();
1549 thisloop
->data
.loop
.continue_label
= thisloop
->data
.loop
.start_label
;
1550 thisloop
->exit_label
= exit_flag
? thisloop
->data
.loop
.end_label
: 0;
1551 loop_stack
= thisloop
;
1552 nesting_stack
= thisloop
;
1554 do_pending_stack_adjust ();
1556 emit_note (0, NOTE_INSN_LOOP_BEG
);
1557 emit_label (thisloop
->data
.loop
.start_label
);
1560 /* Like expand_start_loop but for a loop where the continuation point
1561 (for expand_continue_loop) will be specified explicitly. */
1564 expand_start_loop_continue_elsewhere (exit_flag
)
1567 expand_start_loop (exit_flag
);
1568 loop_stack
->data
.loop
.continue_label
= gen_label_rtx ();
1571 /* Specify the continuation point for a loop started with
1572 expand_start_loop_continue_elsewhere.
1573 Use this at the point in the code to which a continue statement
1577 expand_loop_continue_here ()
1579 do_pending_stack_adjust ();
1580 emit_note (0, NOTE_INSN_LOOP_CONT
);
1581 emit_label (loop_stack
->data
.loop
.continue_label
);
1584 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
1585 Pop the block off of loop_stack. */
1590 register rtx insn
= get_last_insn ();
1591 register rtx start_label
= loop_stack
->data
.loop
.start_label
;
1593 do_pending_stack_adjust ();
1595 /* If optimizing, perhaps reorder the loop. If the loop
1596 starts with a conditional exit, roll that to the end
1597 where it will optimize together with the jump back. */
1600 ! (GET_CODE (insn
) == JUMP_INSN
1601 && GET_CODE (PATTERN (insn
)) == SET
1602 && SET_DEST (PATTERN (insn
)) == pc_rtx
1603 && GET_CODE (SET_SRC (PATTERN (insn
))) == IF_THEN_ELSE
))
1605 /* Scan insns from the top of the loop looking for a qualified
1606 conditional exit. */
1607 for (insn
= loop_stack
->data
.loop
.start_label
; insn
; insn
= NEXT_INSN (insn
))
1608 if (GET_CODE (insn
) == JUMP_INSN
&& GET_CODE (PATTERN (insn
)) == SET
1609 && SET_DEST (PATTERN (insn
)) == pc_rtx
1610 && GET_CODE (SET_SRC (PATTERN (insn
))) == IF_THEN_ELSE
1612 ((GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 1)) == LABEL_REF
1613 && (XEXP (XEXP (SET_SRC (PATTERN (insn
)), 1), 0)
1614 == loop_stack
->data
.loop
.end_label
))
1616 (GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 2)) == LABEL_REF
1617 && (XEXP (XEXP (SET_SRC (PATTERN (insn
)), 2), 0)
1618 == loop_stack
->data
.loop
.end_label
))))
1622 /* We found one. Move everything from there up
1623 to the end of the loop, and add a jump into the loop
1624 to jump to there. */
1625 register rtx newstart_label
= gen_label_rtx ();
1627 emit_label_after (newstart_label
, PREV_INSN (start_label
));
1628 reorder_insns (start_label
, insn
, get_last_insn ());
1629 emit_jump_insn_after (gen_jump (start_label
), PREV_INSN (newstart_label
));
1630 emit_barrier_after (PREV_INSN (newstart_label
));
1631 start_label
= newstart_label
;
1635 emit_jump (start_label
);
1636 emit_note (0, NOTE_INSN_LOOP_END
);
1637 emit_label (loop_stack
->data
.loop
.end_label
);
1639 POPSTACK (loop_stack
);
1644 /* Generate a jump to the current loop's continue-point.
1645 This is usually the top of the loop, but may be specified
1646 explicitly elsewhere. If not currently inside a loop,
1647 return 0 and do nothing; caller will print an error message. */
1650 expand_continue_loop ()
1653 if (loop_stack
== 0)
1655 expand_goto_internal (0, loop_stack
->data
.loop
.continue_label
, 0);
1659 /* Generate a jump to exit the current loop. If not currently inside a loop,
1660 return 0 and do nothing; caller will print an error message. */
1666 if (loop_stack
== 0)
1668 expand_goto_internal (0, loop_stack
->data
.loop
.end_label
, 0);
1672 /* Generate a conditional jump to exit the current loop if COND
1673 evaluates to zero. If not currently inside a loop,
1674 return 0 and do nothing; caller will print an error message. */
1677 expand_exit_loop_if_false (cond
)
1681 if (loop_stack
== 0)
1683 do_jump (cond
, loop_stack
->data
.loop
.end_label
, NULL
);
1687 /* Return non-zero if currently inside a loop. */
1692 return loop_stack
!= 0;
1695 /* Generate a jump to exit the current loop, conditional, binding contour
1696 or case statement. Not all such constructs are visible to this function,
1697 only those started with EXIT_FLAG nonzero. Individual languages use
1698 the EXIT_FLAG parameter to control which kinds of constructs you can
1701 If not currently inside anything that can be exited,
1702 return 0 and do nothing; caller will print an error message. */
1705 expand_exit_something ()
1709 for (n
= nesting_stack
; n
; n
= n
->all
)
1710 if (n
->exit_label
!= 0)
1712 expand_goto_internal (0, n
->exit_label
, 0);
1719 /* Generate RTL to return from the current function, with no value.
1720 (That is, we do not do anything about returning any value.) */
1723 expand_null_return ()
1725 struct nesting
*block
= block_stack
;
1728 /* Does any pending block have cleanups? */
1730 while (block
&& block
->data
.block
.cleanups
== 0)
1731 block
= block
->next
;
1733 /* If yes, use a goto to return, since that runs cleanups. */
1735 expand_null_return_1 (last_insn
, block
!= 0);
1738 /* Output a return with no value. If LAST_INSN is nonzero,
1739 pretend that the return takes place after LAST_INSN.
1740 If USE_GOTO is nonzero then don't use a return instruction;
1741 go to the return label instead. This causes any cleanups
1742 of pending blocks to be executed normally. */
1745 expand_null_return_1 (last_insn
, use_goto
)
1749 rtx end_label
= cleanup_label
? cleanup_label
: return_label
;
1751 clear_pending_stack_adjust ();
1752 do_pending_stack_adjust ();
1755 /* PCC-struct return always uses an epilogue. */
1756 if (current_function_returns_pcc_struct
|| use_goto
)
1759 end_label
= return_label
= gen_label_rtx ();
1760 expand_goto_internal (0, end_label
, last_insn
);
1764 /* Otherwise output a simple return-insn if one is available,
1765 unless it won't do the job. */
1767 if (HAVE_return
&& cleanup_label
== 0)
1769 emit_jump_insn (gen_return ());
1775 /* Otherwise jump to the epilogue. */
1776 expand_goto_internal (0, end_label
, last_insn
);
1779 /* Generate RTL to evaluate the expression RETVAL and return it
1780 from the current function. */
1783 expand_return (retval
)
1786 /* If there are any cleanups to be performed, then they will
1787 be inserted following LAST_INSN. It is desirable
1788 that the last_insn, for such purposes, should be the
1789 last insn before computing the return value. Otherwise, cleanups
1790 which call functions can clobber the return value. */
1791 /* ??? rms: I think that is erroneous, because in C++ it would
1792 run destructors on variables that might be used in the subsequent
1793 computation of the return value. */
1795 register rtx val
= 0;
1799 struct nesting
*block
;
1801 /* Are any cleanups needed? E.g. C++ destructors to be run? */
1803 for (block
= block_stack
; block
; block
= block
->next
)
1804 if (block
->data
.block
.cleanups
!= 0)
1810 if (TREE_CODE (retval
) == RESULT_DECL
)
1811 retval_rhs
= retval
;
1812 else if ((TREE_CODE (retval
) == MODIFY_EXPR
|| TREE_CODE (retval
) == INIT_EXPR
)
1813 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
1814 retval_rhs
= TREE_OPERAND (retval
, 1);
1815 else if (TREE_TYPE (retval
) == void_type_node
)
1816 /* Recognize tail-recursive call to void function. */
1817 retval_rhs
= retval
;
1819 retval_rhs
= NULL_TREE
;
1821 /* Only use `last_insn' if there are cleanups which must be run. */
1822 if (cleanups
|| cleanup_label
!= 0)
1823 last_insn
= get_last_insn ();
1825 /* For tail-recursive call to current function,
1826 just jump back to the beginning.
1827 It's unsafe if any auto variable in this function
1828 has its address taken; for simplicity,
1829 require stack frame to be empty. */
1830 if (optimize
&& retval_rhs
!= 0
1831 && frame_offset
== STARTING_FRAME_OFFSET
1832 && TREE_CODE (retval_rhs
) == CALL_EXPR
1833 && TREE_CODE (TREE_OPERAND (retval_rhs
, 0)) == ADDR_EXPR
1834 && TREE_OPERAND (TREE_OPERAND (retval_rhs
, 0), 0) == this_function
1835 /* Finish checking validity, and if valid emit code
1836 to set the argument variables for the new call. */
1837 && tail_recursion_args (TREE_OPERAND (retval_rhs
, 1),
1838 DECL_ARGUMENTS (this_function
)))
1840 if (tail_recursion_label
== 0)
1842 tail_recursion_label
= gen_label_rtx ();
1843 emit_label_after (tail_recursion_label
,
1844 tail_recursion_reentry
);
1846 expand_goto_internal (0, tail_recursion_label
, last_insn
);
1851 /* This optimization is safe if there are local cleanups
1852 because expand_null_return takes care of them.
1853 ??? I think it should also be safe when there is a cleanup label,
1854 because expand_null_return takes care of them, too.
1855 Any reason why not? */
1856 if (HAVE_return
&& cleanup_label
== 0
1857 && ! current_function_returns_pcc_struct
)
1859 /* If this is return x == y; then generate
1860 if (x == y) return 1; else return 0;
1861 if we can do it with explicit return insns. */
1863 switch (TREE_CODE (retval_rhs
))
1871 case TRUTH_ANDIF_EXPR
:
1872 case TRUTH_ORIF_EXPR
:
1873 case TRUTH_AND_EXPR
:
1875 case TRUTH_NOT_EXPR
:
1876 op0
= gen_label_rtx ();
1877 val
= DECL_RTL (DECL_RESULT (this_function
));
1878 jumpifnot (retval_rhs
, op0
);
1879 emit_move_insn (val
, const1_rtx
);
1880 emit_insn (gen_rtx (USE
, VOIDmode
, val
));
1881 expand_null_return ();
1883 emit_move_insn (val
, const0_rtx
);
1884 emit_insn (gen_rtx (USE
, VOIDmode
, val
));
1885 expand_null_return ();
1889 #endif /* HAVE_return */
1893 && TREE_TYPE (retval_rhs
) != void_type_node
1894 && GET_CODE (DECL_RTL (DECL_RESULT (this_function
))) == REG
)
1897 /* Calculate the return value into a pseudo reg. */
1898 val
= expand_expr (retval_rhs
, 0, VOIDmode
, 0);
1900 /* Put the cleanups here. */
1901 last_insn
= get_last_insn ();
1902 /* Copy the value into hard return reg. */
1903 emit_move_insn (DECL_RTL (DECL_RESULT (this_function
)), val
);
1904 val
= DECL_RTL (DECL_RESULT (this_function
));
1906 if (GET_CODE (val
) == REG
)
1907 emit_insn (gen_rtx (USE
, VOIDmode
, val
));
1908 expand_null_return_1 (last_insn
, cleanups
);
1912 /* No cleanups or no hard reg used;
1913 calculate value into hard return reg
1914 and let cleanups come after. */
1915 val
= expand_expr (retval
, 0, VOIDmode
, 0);
1918 val
= DECL_RTL (DECL_RESULT (this_function
));
1919 if (val
&& GET_CODE (val
) == REG
)
1920 emit_insn (gen_rtx (USE
, VOIDmode
, val
));
1921 expand_null_return ();
1925 /* Return 1 if the end of the generated RTX is not a barrier.
1926 This means code already compiled can drop through. */
1929 drop_through_at_end_p ()
1931 rtx insn
= get_last_insn ();
1932 while (insn
&& GET_CODE (insn
) == NOTE
)
1933 insn
= PREV_INSN (insn
);
1934 return insn
&& GET_CODE (insn
) != BARRIER
;
1937 /* Emit code to alter this function's formal parms for a tail-recursive call.
1938 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
1939 FORMALS is the chain of decls of formals.
1940 Return 1 if this can be done;
1941 otherwise return 0 and do not emit any code. */
1944 tail_recursion_args (actuals
, formals
)
1945 tree actuals
, formals
;
1947 register tree a
= actuals
, f
= formals
;
1949 register rtx
*argvec
;
1951 /* Check that number and types of actuals are compatible
1952 with the formals. This is not always true in valid C code.
1953 Also check that no formal needs to be addressable
1954 and that all formals are scalars. */
1956 /* Also count the args. */
1958 for (a
= actuals
, f
= formals
, i
= 0; a
&& f
; a
= TREE_CHAIN (a
), f
= TREE_CHAIN (f
), i
++)
1960 if (TREE_TYPE (TREE_VALUE (a
)) != TREE_TYPE (f
))
1962 if (GET_CODE (DECL_RTL (f
)) != REG
|| DECL_MODE (f
) == BLKmode
)
1965 if (a
!= 0 || f
!= 0)
1968 /* Compute all the actuals. */
1970 argvec
= (rtx
*) alloca (i
* sizeof (rtx
));
1972 for (a
= actuals
, i
= 0; a
; a
= TREE_CHAIN (a
), i
++)
1973 argvec
[i
] = expand_expr (TREE_VALUE (a
), 0, VOIDmode
, 0);
1975 /* Find which actual values refer to current values of previous formals.
1976 Copy each of them now, before any formal is changed. */
1978 for (a
= actuals
, i
= 0; a
; a
= TREE_CHAIN (a
), i
++)
1982 for (f
= formals
, j
= 0; j
< i
; f
= TREE_CHAIN (f
), j
++)
1983 if (reg_mentioned_p (DECL_RTL (f
), argvec
[i
]))
1984 { copy
= 1; break; }
1986 argvec
[i
] = copy_to_reg (argvec
[i
]);
1989 /* Store the values of the actuals into the formals. */
1991 for (f
= formals
, a
= actuals
, i
= 0; f
;
1992 f
= TREE_CHAIN (f
), a
= TREE_CHAIN (a
), i
++)
1994 if (DECL_MODE (f
) == GET_MODE (argvec
[i
]))
1995 emit_move_insn (DECL_RTL (f
), argvec
[i
]);
1997 convert_move (DECL_RTL (f
), argvec
[i
],
1998 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a
))));
2004 /* Generate the RTL code for entering a binding contour.
2005 The variables are declared one by one, by calls to `expand_decl'.
2007 EXIT_FLAG is nonzero if this construct should be visible to
2008 `exit_something'. */
2011 expand_start_bindings (exit_flag
)
2014 struct nesting
*thisblock
2015 = (struct nesting
*) obstack_alloc (&stmt_obstack
, sizeof (struct nesting
));
2017 rtx note
= emit_note (0, NOTE_INSN_BLOCK_BEG
);
2019 /* Make an entry on block_stack for the block we are entering. */
2021 thisblock
->next
= block_stack
;
2022 thisblock
->all
= nesting_stack
;
2023 thisblock
->depth
= ++nesting_depth
;
2024 thisblock
->data
.block
.stack_level
= 0;
2025 thisblock
->data
.block
.cleanups
= 0;
2026 /* We build this even if the cleanups lists are empty
2027 because we rely on having an element in the chain
2028 for each block that is pending. */
2031 if (block_stack
->data
.block
.cleanups
== NULL_TREE
2032 && (block_stack
->data
.block
.outer_cleanups
== NULL_TREE
2033 || block_stack
->data
.block
.outer_cleanups
== empty_cleanup_list
))
2034 thisblock
->data
.block
.outer_cleanups
= empty_cleanup_list
;
2036 thisblock
->data
.block
.outer_cleanups
2037 = tree_cons (NULL_TREE
, block_stack
->data
.block
.cleanups
,
2038 block_stack
->data
.block
.outer_cleanups
);
2041 thisblock
->data
.block
.outer_cleanups
= 0;
2042 thisblock
->data
.block
.label_chain
= 0;
2043 thisblock
->data
.block
.innermost_stack_block
= stack_block_stack
;
2044 thisblock
->data
.block
.first_insn
= note
;
2045 thisblock
->data
.block
.block_start_count
= ++block_start_count
;
2046 thisblock
->exit_label
= exit_flag
? gen_label_rtx () : 0;
2047 block_stack
= thisblock
;
2048 nesting_stack
= thisblock
;
2051 /* Output a USE for any register use in RTL.
2052 This is used with -noreg to mark the extent of lifespan
2053 of any registers used in a user-visible variable's DECL_RTL. */
2059 if (GET_CODE (rtl
) == REG
)
2060 /* This is a register variable. */
2061 emit_insn (gen_rtx (USE
, VOIDmode
, rtl
));
2062 else if (GET_CODE (rtl
) == MEM
2063 && GET_CODE (XEXP (rtl
, 0)) == REG
2064 && XEXP (rtl
, 0) != frame_pointer_rtx
2065 && XEXP (rtl
, 0) != arg_pointer_rtx
)
2066 /* This is a variable-sized structure. */
2067 emit_insn (gen_rtx (USE
, VOIDmode
, XEXP (rtl
, 0)));
2070 /* Like use_variable except that it outputs the USEs after INSN
2071 instead of at the end of the insn-chain. */
2074 use_variable_after (rtl
, insn
)
2077 if (GET_CODE (rtl
) == REG
)
2078 /* This is a register variable. */
2079 emit_insn_after (gen_rtx (USE
, VOIDmode
, rtl
), insn
);
2080 else if (GET_CODE (rtl
) == MEM
2081 && GET_CODE (XEXP (rtl
, 0)) == REG
2082 && XEXP (rtl
, 0) != frame_pointer_rtx
2083 && XEXP (rtl
, 0) != arg_pointer_rtx
)
2084 /* This is a variable-sized structure. */
2085 emit_insn_after (gen_rtx (USE
, VOIDmode
, XEXP (rtl
, 0)), insn
);
2088 /* Generate RTL code to terminate a binding contour.
2089 VARS is the chain of VAR_DECL nodes
2090 for the variables bound in this contour.
2091 MARK_ENDS is nonzero if we should put a note at the beginning
2092 and end of this binding contour.
2094 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2095 (That is true automatically if the contour has a saved stack level.) */
2098 expand_end_bindings (vars
, mark_ends
, dont_jump_in
)
2103 register struct nesting
*thisblock
= block_stack
;
2107 for (decl
= vars
; decl
; decl
= TREE_CHAIN (decl
))
2108 if (! TREE_USED (decl
) && TREE_CODE (decl
) == VAR_DECL
)
2109 warning_with_decl (decl
, "unused variable `%s'");
2111 /* Mark the beginning and end of the scope if requested. */
2114 emit_note (0, NOTE_INSN_BLOCK_END
);
2116 /* Get rid of the beginning-mark if we don't make an end-mark. */
2117 NOTE_LINE_NUMBER (thisblock
->data
.block
.first_insn
) = NOTE_INSN_DELETED
;
2119 if (thisblock
->exit_label
)
2121 do_pending_stack_adjust ();
2122 emit_label (thisblock
->exit_label
);
2126 || thisblock
->data
.block
.stack_level
!= 0
2127 || thisblock
->data
.block
.cleanups
!= 0)
2129 struct label_chain
*chain
;
2131 /* Any labels in this block are no longer valid to go to.
2132 Mark them to cause an error message. */
2133 for (chain
= thisblock
->data
.block
.label_chain
; chain
; chain
= chain
->next
)
2135 TREE_PACKED (chain
->label
) = 1;
2136 /* If any goto without a fixup came to this label,
2137 that must be an error, because gotos without fixups
2138 come from outside all saved stack-levels and all cleanups. */
2139 if (TREE_ADDRESSABLE (chain
->label
))
2140 error_with_decl (chain
->label
,
2141 "label `%s' used before containing binding contour");
2145 /* Restore stack level in effect before the block
2146 (only if variable-size objects allocated). */
2148 if (thisblock
->data
.block
.stack_level
!= 0
2149 || thisblock
->data
.block
.cleanups
!= 0)
2151 /* Perform any cleanups associated with the block. */
2152 int old_expr_stmts_for_value
= expr_stmts_for_value
;
2153 rtx old_last_expr_value
= last_expr_value
;
2154 tree old_last_expr_type
= last_expr_type
;
2156 /* Don't let cleanups affect ({...}) constructs. */
2157 expr_stmts_for_value
= 0;
2158 expand_cleanups (thisblock
->data
.block
.cleanups
, 0);
2159 do_pending_stack_adjust ();
2160 expr_stmts_for_value
= old_expr_stmts_for_value
;
2161 last_expr_value
= old_last_expr_value
;
2162 last_expr_type
= old_last_expr_type
;
2164 /* Restore the stack level. */
2166 if (thisblock
->data
.block
.stack_level
!= 0)
2167 emit_move_insn (stack_pointer_rtx
,
2168 thisblock
->data
.block
.stack_level
);
2170 /* Any gotos out of this block must also do these things.
2171 Also report any gotos with fixups that came to labels in this level. */
2172 fixup_gotos (thisblock
,
2173 thisblock
->data
.block
.stack_level
,
2174 thisblock
->data
.block
.cleanups
,
2175 thisblock
->data
.block
.first_insn
,
2179 /* If doing stupid register allocation, make sure lives of all
2180 register variables declared here extend thru end of scope. */
2183 for (decl
= vars
; decl
; decl
= TREE_CHAIN (decl
))
2185 rtx rtl
= DECL_RTL (decl
);
2186 if (TREE_CODE (decl
) == VAR_DECL
&& rtl
!= 0)
2190 /* Restore block_stack level for containing block. */
2192 stack_block_stack
= thisblock
->data
.block
.innermost_stack_block
;
2193 POPSTACK (block_stack
);
2196 /* Generate RTL for the automatic variable declaration DECL.
2197 (Other kinds of declarations are simply ignored if seen here.)
2198 There is no special support here for C++ constructors.
2199 They should be handled by the proper code in DECL_INITIAL. */
2205 struct nesting
*thisblock
= block_stack
;
2206 tree type
= TREE_TYPE (decl
);
2208 /* Only automatic variables need any expansion done.
2209 Static and external variables, and external functions,
2210 will be handled by `assemble_variable' (called from finish_decl).
2211 TYPE_DECL and CONST_DECL require nothing.
2212 PARM_DECLs are handled in `assign_parms'. */
2214 if (TREE_CODE (decl
) != VAR_DECL
)
2216 if (TREE_STATIC (decl
) || TREE_EXTERNAL (decl
))
2219 /* Create the RTL representation for the variable. */
2221 if (type
== error_mark_node
)
2222 DECL_RTL (decl
) = gen_rtx (MEM
, BLKmode
, const0_rtx
);
2223 else if (DECL_SIZE (decl
) == 0)
2224 /* Variable with incomplete type. */
2226 if (DECL_INITIAL (decl
) == 0)
2227 /* Error message was already done; now avoid a crash. */
2228 DECL_RTL (decl
) = assign_stack_local (DECL_MODE (decl
), 0);
2230 /* An initializer is going to decide the size of this array.
2231 Until we know the size, represent its address with a reg. */
2232 DECL_RTL (decl
) = gen_rtx (MEM
, BLKmode
, gen_reg_rtx (Pmode
));
2234 else if (DECL_MODE (decl
) != BLKmode
2235 /* If -ffloat-store, don't put explicit float vars
2237 && !(flag_float_store
2238 && TREE_CODE (type
) == REAL_TYPE
)
2239 && ! TREE_VOLATILE (decl
)
2240 && ! TREE_ADDRESSABLE (decl
)
2241 && (TREE_REGDECL (decl
) || ! obey_regdecls
))
2243 /* Automatic variable that can go in a register. */
2244 DECL_RTL (decl
) = gen_reg_rtx (DECL_MODE (decl
));
2245 if (TREE_CODE (type
) == POINTER_TYPE
)
2246 mark_reg_pointer (DECL_RTL (decl
));
2247 REG_USERVAR_P (DECL_RTL (decl
)) = 1;
2249 else if (TREE_LITERAL (DECL_SIZE (decl
)))
2254 /* If we previously made RTL for this decl, it must be an array
2255 whose size was determined by the initializer.
2256 The old address was a register; set that register now
2257 to the proper address. */
2258 if (DECL_RTL (decl
) != 0)
2260 if (GET_CODE (DECL_RTL (decl
)) != MEM
2261 || GET_CODE (XEXP (DECL_RTL (decl
), 0)) != REG
)
2263 oldaddr
= XEXP (DECL_RTL (decl
), 0);
2266 /* Variable of fixed size that goes on the stack. */
2268 = assign_stack_local (DECL_MODE (decl
),
2269 (TREE_INT_CST_LOW (DECL_SIZE (decl
))
2270 * DECL_SIZE_UNIT (decl
)
2271 + BITS_PER_UNIT
- 1)
2275 addr
= force_operand (XEXP (DECL_RTL (decl
), 0), oldaddr
);
2276 emit_move_insn (oldaddr
, addr
);
2279 /* If this is a memory ref that contains aggregate components,
2280 mark it as such for cse and loop optimize. */
2281 MEM_IN_STRUCT_P (DECL_RTL (decl
))
2282 = (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
2283 || TREE_CODE (TREE_TYPE (decl
)) == RECORD_TYPE
2284 || TREE_CODE (TREE_TYPE (decl
)) == UNION_TYPE
);
2286 /* If this is in memory because of -ffloat-store,
2287 set the volatile bit, to prevent optimizations from
2288 undoing the effects. */
2289 if (flag_float_store
&& TREE_CODE (type
) == REAL_TYPE
)
2290 MEM_VOLATILE_P (DECL_RTL (decl
)) = 1;
2294 /* Dynamic-size object: must push space on the stack. */
2298 frame_pointer_needed
= 1;
2300 /* Record the stack pointer on entry to block, if have
2301 not already done so. */
2302 if (thisblock
->data
.block
.stack_level
== 0)
2304 do_pending_stack_adjust ();
2305 thisblock
->data
.block
.stack_level
2306 = copy_to_reg (stack_pointer_rtx
);
2307 stack_block_stack
= thisblock
;
2310 /* Compute the variable's size, in bytes. */
2311 size
= expand_expr (convert_units (DECL_SIZE (decl
),
2312 DECL_SIZE_UNIT (decl
),
2316 /* Round it up to this machine's required stack boundary. */
2317 #ifdef STACK_BOUNDARY
2318 /* Avoid extra code if we can prove it's a multiple already. */
2319 if (DECL_SIZE_UNIT (decl
) % STACK_BOUNDARY
)
2321 #ifdef STACK_POINTER_OFFSET
2322 /* Avoid extra code if we can prove that adding STACK_POINTER_OFFSET
2323 will not give this address invalid alignment. */
2324 if (DECL_ALIGN (decl
) > ((STACK_POINTER_OFFSET
* BITS_PER_UNIT
) % STACK_BOUNDARY
))
2325 size
= plus_constant (size
,
2326 STACK_POINTER_OFFSET
% (STACK_BOUNDARY
/ BITS_PER_UNIT
));
2328 size
= round_push (size
);
2330 #endif /* STACK_BOUNDARY */
2332 /* Make space on the stack, and get an rtx for the address of it. */
2333 #ifdef STACK_GROWS_DOWNWARD
2334 anti_adjust_stack (size
);
2336 address
= copy_to_reg (stack_pointer_rtx
);
2337 #ifdef STACK_POINTER_OFFSET
2339 /* If the contents of the stack pointer reg are offset from the
2340 actual top-of-stack address, add the offset here. */
2341 rtx sp_offset
= gen_rtx (CONST_INT
, VOIDmode
, STACK_POINTER_OFFSET
);
2342 #ifdef STACK_BOUNDARY
2343 #ifdef STACK_GROWS_DOWNWARD
2345 #else /* not STACK_GROWS_DOWNWARD */
2347 #endif /* not STACK_GROWS_DOWNWARD */
2348 if (DECL_ALIGN (decl
) > ((STACK_POINTER_OFFSET
* BITS_PER_UNIT
) % STACK_BOUNDARY
))
2349 sp_offset
= plus_constant (sp_offset
,
2350 (STACK_POINTER_OFFSET
2351 % (STACK_BOUNDARY
/ BITS_PER_UNIT
)
2353 #endif /* STACK_BOUNDARY */
2354 emit_insn (gen_add2_insn (address
, sp_offset
));
2356 #endif /* STACK_POINTER_OFFSET */
2357 #ifndef STACK_GROWS_DOWNWARD
2358 anti_adjust_stack (size
);
2361 /* Some systems require a particular insn to refer to the stack
2362 to make the pages exist. */
2365 emit_insn (gen_probe ());
2368 /* Reference the variable indirect through that rtx. */
2369 DECL_RTL (decl
) = gen_rtx (MEM
, DECL_MODE (decl
), address
);
2372 if (TREE_VOLATILE (decl
))
2373 MEM_VOLATILE_P (DECL_RTL (decl
)) = 1;
2374 if (TREE_READONLY (decl
))
2375 RTX_UNCHANGING_P (DECL_RTL (decl
)) = 1;
2377 /* If doing stupid register allocation, make sure life of any
2378 register variable starts here, at the start of its scope. */
2381 use_variable (DECL_RTL (decl
));
2384 /* Emit code to perform the initialization of a declaration DECL. */
2387 expand_decl_init (decl
)
2390 if (TREE_STATIC (decl
))
2393 /* Compute and store the initial value now. */
2395 if (DECL_INITIAL (decl
) == error_mark_node
)
2397 enum tree_code code
= TREE_CODE (TREE_TYPE (decl
));
2398 if (code
== INTEGER_TYPE
|| code
== REAL_TYPE
|| code
== ENUMERAL_TYPE
2399 || code
== POINTER_TYPE
)
2400 expand_assignment (decl
, convert (TREE_TYPE (decl
), integer_zero_node
),
2404 else if (DECL_INITIAL (decl
) && TREE_CODE (DECL_INITIAL (decl
)) != TREE_LIST
)
2406 emit_line_note (DECL_SOURCE_FILE (decl
), DECL_SOURCE_LINE (decl
));
2407 expand_assignment (decl
, DECL_INITIAL (decl
), 0, 0);
2412 /* CLEANUP is an expression to be executed at exit from this binding contour;
2413 for example, in C++, it might call the destructor for this variable.
2415 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2416 either before or after calling `expand_decl' but before compiling
2417 any subsequent expressions. This is because CLEANUP may be expanded
2418 more than once, on different branches of execution.
2419 For the same reason, CLEANUP may not contain a CALL_EXPR
2420 except as its topmost node--else `preexpand_calls' would get confused.
2422 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2423 that is not associated with any particular variable.
2425 Return 0 if such an expansion is invalid. Otherwise, return 1. */
2427 expand_decl_cleanup (decl
, cleanup
)
2429 struct nesting
*thisblock
= block_stack
;
2431 /* Record the cleanup if there is one. */
2438 thisblock
->data
.block
.cleanups
2439 = temp_tree_cons (decl
, cleanup
, thisblock
->data
.block
.cleanups
);
2440 /* If this block has a cleanup, it belongs in stack_block_stack. */
2441 stack_block_stack
= thisblock
;
2446 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2447 DECL_ELTS is the list of elements that belong to DECL's type.
2448 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2451 expand_anon_union_decl (decl
, cleanup
, decl_elts
)
2452 tree decl
, cleanup
, decl_elts
;
2454 struct nesting
*thisblock
= block_stack
;
2457 expand_decl (decl
, cleanup
);
2458 x
= DECL_RTL (decl
);
2462 tree decl_elt
= TREE_VALUE (decl_elts
);
2463 tree cleanup_elt
= TREE_PURPOSE (decl_elts
);
2464 enum machine_mode tmode
= TYPE_MODE (TREE_TYPE (decl_elt
));
2466 if (GET_CODE (x
) == MEM
)
2468 /* @@ calling `change_address' means that we cannot
2469 be at top-level, since `memory_address' might try
2470 to kick this address into a register, which won't
2471 work. Will this work? */
2472 rtx
new = gen_rtx (MEM
, tmode
, XEXP (x
, 0));
2473 DECL_RTL (decl_elt
) = new;
2474 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x
);
2475 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x
);
2476 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x
);
2479 DECL_RTL (decl_elt
) = gen_rtx (SUBREG
, tmode
, x
, 0);
2481 /* Record the cleanup if there is one. */
2484 thisblock
->data
.block
.cleanups
2485 = temp_tree_cons (decl_elt
, cleanup_elt
,
2486 thisblock
->data
.block
.cleanups
);
2488 decl_elts
= TREE_CHAIN (decl_elts
);
2492 /* Expand a list of cleanups LIST.
2493 Elements may be expressions or may be nested lists.
2495 If DONT_DO is nonnull, then any list-element
2496 whose TREE_PURPOSE matches DONT_DO is omitted.
2497 This is sometimes used to avoid a cleanup associated with
2498 a value that is being returned out of the scope. */
2501 expand_cleanups (list
, dont_do
)
2506 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
2507 if (dont_do
== 0 || TREE_PURPOSE (tail
) != dont_do
)
2509 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
2510 expand_cleanups (TREE_VALUE (tail
), dont_do
);
2512 expand_expr (TREE_VALUE (tail
), const0_rtx
, VOIDmode
, 0);
2516 /* Expand a list of cleanups for a goto fixup.
2517 The expansion is put into the insn chain after the insn *BEFORE_JUMP
2518 and *BEFORE_JUMP is set to the insn that now comes before the jump. */
2521 fixup_cleanups (list
, before_jump
)
2525 rtx beyond_jump
= get_last_insn ();
2526 rtx new_before_jump
;
2528 expand_cleanups (list
, 0);
2529 do_pending_stack_adjust ();
2531 new_before_jump
= get_last_insn ();
2533 if (beyond_jump
!= new_before_jump
)
2534 /* If cleanups expand to nothing, don't reorder. */
2535 reorder_insns (NEXT_INSN (beyond_jump
), new_before_jump
, *before_jump
);
2537 *before_jump
= new_before_jump
;
2540 /* Move all cleanups from the current block_stack
2541 to the containing block_stack, where they are assumed to
2542 have been created. If anything can cause a temporary to
2543 be created, but not expanded for more than one level of
2544 block_stacks, then this code will have to change. */
2549 struct nesting
*block
= block_stack
;
2550 struct nesting
*outer
= block
->next
;
2552 outer
->data
.block
.cleanups
2553 = chainon (block
->data
.block
.cleanups
,
2554 outer
->data
.block
.cleanups
);
2555 block
->data
.block
.cleanups
= 0;
2559 this_contour_has_cleanups_p ()
2561 return block_stack
&& block_stack
->data
.block
.cleanups
!= 0;
2564 /* Enter a case (Pascal) or switch (C) statement.
2565 Push a block onto case_stack and nesting_stack
2566 to accumulate the case-labels that are seen
2567 and to record the labels generated for the statement.
2569 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
2570 Otherwise, this construct is transparent for `exit_something'.
2572 EXPR is the index-expression to be dispatched on.
2573 TYPE is its nominal type. We could simply convert EXPR to this type,
2574 but instead we take short cuts. */
2577 expand_start_case (exit_flag
, expr
, type
)
2582 register struct nesting
*thiscase
2583 = (struct nesting
*) obstack_alloc (&stmt_obstack
, sizeof (struct nesting
));
2585 /* Make an entry on case_stack for the case we are entering. */
2587 thiscase
->next
= case_stack
;
2588 thiscase
->all
= nesting_stack
;
2589 thiscase
->depth
= ++nesting_depth
;
2590 thiscase
->exit_label
= exit_flag
? gen_label_rtx () : 0;
2591 thiscase
->data
.case_stmt
.case_list
= 0;
2592 thiscase
->data
.case_stmt
.index_expr
= expr
;
2593 thiscase
->data
.case_stmt
.nominal_type
= type
;
2594 thiscase
->data
.case_stmt
.default_label
= 0;
2595 thiscase
->data
.case_stmt
.num_ranges
= 0;
2596 case_stack
= thiscase
;
2597 nesting_stack
= thiscase
;
2599 do_pending_stack_adjust ();
2601 /* Make sure case_stmt.start points to something that won't
2602 need any transformation before expand_end_case. */
2603 emit_note (0, NOTE_INSN_DELETED
);
2605 thiscase
->data
.case_stmt
.start
= get_last_insn ();
2608 /* Start a "dummy case statement" within which case labels are invalid
2609 and are not connected to any larger real case statement.
2610 This can be used if you don't want to let a case statement jump
2611 into the middle of certain kinds of constructs. */
2614 expand_start_case_dummy ()
2616 register struct nesting
*thiscase
2617 = (struct nesting
*) obstack_alloc (&stmt_obstack
, sizeof (struct nesting
));
2619 /* Make an entry on case_stack for the dummy. */
2621 thiscase
->next
= case_stack
;
2622 thiscase
->all
= nesting_stack
;
2623 thiscase
->depth
= ++nesting_depth
;
2624 thiscase
->exit_label
= 0;
2625 thiscase
->data
.case_stmt
.case_list
= 0;
2626 thiscase
->data
.case_stmt
.start
= 0;
2627 thiscase
->data
.case_stmt
.nominal_type
= 0;
2628 thiscase
->data
.case_stmt
.default_label
= 0;
2629 thiscase
->data
.case_stmt
.num_ranges
= 0;
2630 case_stack
= thiscase
;
2631 nesting_stack
= thiscase
;
2634 /* End a dummy case statement. */
2637 expand_end_case_dummy ()
2639 POPSTACK (case_stack
);
2642 /* Accumulate one case or default label inside a case or switch statement.
2643 VALUE is the value of the case (a null pointer, for a default label).
2645 If not currently inside a case or switch statement, return 1 and do
2646 nothing. The caller will print a language-specific error message.
2647 If VALUE is a duplicate or overlaps, return 2 and do nothing.
2648 If VALUE is out of range, return 3 and do nothing.
2649 Return 0 on success.
2651 Extended to handle range statements, should they ever
2655 pushcase (value
, label
)
2656 register tree value
;
2657 register tree label
;
2659 register struct case_node
**l
;
2660 register struct case_node
*n
;
2664 /* Fail if not inside a real case statement. */
2665 if (! (case_stack
&& case_stack
->data
.case_stmt
.start
))
2668 index_type
= TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
2669 nominal_type
= case_stack
->data
.case_stmt
.nominal_type
;
2671 /* If the index is erroneous, avoid more problems: pretend to succeed. */
2672 if (index_type
== error_mark_node
)
2675 /* Convert VALUE to the type in which the comparisons are nominally done. */
2677 value
= convert (nominal_type
, value
);
2679 /* Fail if this value is out of range for the actual type of the index
2680 (which may be narrower than NOMINAL_TYPE). */
2681 if (value
!= 0 && ! int_fits_type_p (value
, index_type
))
2684 /* Fail if this is a duplicate or overlaps another entry. */
2687 if (case_stack
->data
.case_stmt
.default_label
!= 0)
2689 case_stack
->data
.case_stmt
.default_label
= label
;
2693 /* Find the elt in the chain before which to insert the new value,
2694 to keep the chain sorted in increasing order.
2695 But report an error if this element is a duplicate. */
2696 for (l
= &case_stack
->data
.case_stmt
.case_list
;
2697 /* Keep going past elements distinctly less than VALUE. */
2698 *l
!= 0 && tree_int_cst_lt ((*l
)->high
, value
);
2703 /* Element we will insert before must be distinctly greater;
2704 overlap means error. */
2705 if (! tree_int_cst_lt (value
, (*l
)->low
))
2709 /* Add this label to the chain, and succeed.
2710 Copy VALUE so it is on temporary rather than momentary
2711 obstack and will thus survive till the end of the case statement. */
2712 n
= (struct case_node
*) oballoc (sizeof (struct case_node
));
2715 n
->high
= n
->low
= copy_node (value
);
2716 n
->code_label
= label
;
2721 expand_label (label
);
2725 /* Like pushcase but this case applies to all values
2726 between VALUE1 and VALUE2 (inclusive).
2727 The return value is the same as that of pushcase
2728 but there is one additional error code:
2729 4 means the specified range was empty.
2731 Note that this does not currently work, since expand_end_case
2732 has yet to be extended to handle RANGE_EXPRs. */
2735 pushcase_range (value1
, value2
, label
)
2736 register tree value1
, value2
;
2737 register tree label
;
2739 register struct case_node
**l
;
2740 register struct case_node
*n
;
2744 /* Fail if not inside a real case statement. */
2745 if (! (case_stack
&& case_stack
->data
.case_stmt
.start
))
2748 index_type
= TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
2749 nominal_type
= case_stack
->data
.case_stmt
.nominal_type
;
2751 /* If the index is erroneous, avoid more problems: pretend to succeed. */
2752 if (index_type
== error_mark_node
)
2755 /* Convert VALUEs to type in which the comparisons are nominally done. */
2757 value1
= convert (nominal_type
, value1
);
2759 value2
= convert (nominal_type
, value2
);
2761 /* Fail if these values are out of range. */
2762 if (value1
!= 0 && ! int_fits_type_p (value1
, index_type
))
2765 if (value2
!= 0 && ! int_fits_type_p (value2
, index_type
))
2768 /* Fail if the range is empty. */
2769 if (tree_int_cst_lt (value2
, value1
))
2772 /* If the bounds are equal, turn this into the one-value case. */
2773 if (tree_int_cst_equal (value1
, value2
))
2774 return pushcase (value1
, label
);
2776 /* Find the elt in the chain before which to insert the new value,
2777 to keep the chain sorted in increasing order.
2778 But report an error if this element is a duplicate. */
2779 for (l
= &case_stack
->data
.case_stmt
.case_list
;
2780 /* Keep going past elements distinctly less than this range. */
2781 *l
!= 0 && tree_int_cst_lt ((*l
)->high
, value1
);
2786 /* Element we will insert before must be distinctly greater;
2787 overlap means error. */
2788 if (! tree_int_cst_lt (value2
, (*l
)->low
))
2792 /* Add this label to the chain, and succeed.
2793 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
2794 obstack and will thus survive till the end of the case statement. */
2796 n
= (struct case_node
*) oballoc (sizeof (struct case_node
));
2799 n
->low
= copy_node (value1
);
2800 n
->high
= copy_node (value2
);
2801 n
->code_label
= label
;
2805 expand_label (label
);
2807 case_stack
->data
.case_stmt
.num_ranges
++;
2812 /* Check that all enumeration literals are covered by the case
2813 expressions of a switch. Also, warn if there are any extra
2814 switch cases that are *not* elements of the enumerated type. */
2817 check_for_full_enumeration_handling (type
)
2820 register struct case_node
*n
;
2821 register tree chain
;
2823 /* The time complexity of this loop is currently O(N * M), with
2824 N being the number of enumerals in the enumerated type, and
2825 M being the number of case expressions in the switch. */
2827 for (chain
= TYPE_VALUES (type
);
2829 chain
= TREE_CHAIN (chain
))
2831 /* Find a match between enumeral and case expression, if possible.
2832 Quit looking when we've gone too far (since case expressions
2833 are kept sorted in ascending order). Warn about enumerals not
2834 handled in the switch statement case expression list. */
2836 for (n
= case_stack
->data
.case_stmt
.case_list
;
2837 n
&& tree_int_cst_lt (n
->high
, TREE_VALUE (chain
));
2841 if (!(n
&& tree_int_cst_equal (n
->low
, TREE_VALUE (chain
))))
2842 warning ("enumerated value `%s' not handled in switch",
2843 IDENTIFIER_POINTER (TREE_PURPOSE (chain
)));
2846 /* Now we go the other way around; we warn if there are case
2847 expressions that don't correspond to enumerals. This can
2848 occur since C and C++ don't enforce type-checking of
2849 assignments to enumeration variables. */
2851 for (n
= case_stack
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
2853 for (chain
= TYPE_VALUES (type
);
2854 chain
&& !tree_int_cst_equal (n
->low
, TREE_VALUE (chain
));
2855 chain
= TREE_CHAIN (chain
))
2859 warning ("case value `%d' not in enumerated type `%s'",
2860 TREE_INT_CST_LOW (n
->low
),
2861 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type
)) == IDENTIFIER_NODE
2863 : DECL_NAME (TYPE_NAME (type
))));
2867 /* Terminate a case (Pascal) or switch (C) statement
2868 in which CASE_INDEX is the expression to be tested.
2869 Generate the code to test it and jump to the right place. */
2872 expand_end_case (orig_index
)
2875 tree minval
, maxval
, range
;
2876 rtx default_label
= 0;
2877 register struct case_node
*n
;
2880 rtx table_label
= gen_label_rtx ();
2885 register struct nesting
*thiscase
= case_stack
;
2886 tree index_expr
= thiscase
->data
.case_stmt
.index_expr
;
2887 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (index_expr
));
2889 do_pending_stack_adjust ();
2891 /* An ERROR_MARK occurs for various reasons including invalid data type. */
2892 if (TREE_TYPE (index_expr
) != error_mark_node
)
2894 /* If switch expression was an enumerated type, check that all
2895 enumeration literals are covered by the cases.
2896 No sense trying this if there's a default case, however. */
2898 if (!thiscase
->data
.case_stmt
.default_label
2899 && TREE_CODE (TREE_TYPE (orig_index
)) == ENUMERAL_TYPE
2900 && TREE_CODE (index_expr
) != INTEGER_CST
2902 check_for_full_enumeration_handling (TREE_TYPE (orig_index
));
2904 /* If we don't have a default-label, create one here,
2905 after the body of the switch. */
2906 if (thiscase
->data
.case_stmt
.default_label
== 0)
2908 thiscase
->data
.case_stmt
.default_label
2909 = build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
2910 expand_label (thiscase
->data
.case_stmt
.default_label
);
2912 default_label
= label_rtx (thiscase
->data
.case_stmt
.default_label
);
2914 before_case
= get_last_insn ();
2916 /* Simplify the case-list before we count it. */
2917 group_case_nodes (thiscase
->data
.case_stmt
.case_list
);
2919 /* Get upper and lower bounds of case values.
2920 Also convert all the case values to the index expr's data type. */
2923 for (n
= thiscase
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
2925 /* Check low and high label values are integers. */
2926 if (TREE_CODE (n
->low
) != INTEGER_CST
)
2928 if (TREE_CODE (n
->high
) != INTEGER_CST
)
2931 n
->low
= convert (TREE_TYPE (index_expr
), n
->low
);
2932 n
->high
= convert (TREE_TYPE (index_expr
), n
->high
);
2934 /* Count the elements and track the largest and smallest
2935 of them (treating them as signed even if they are not). */
2943 if (INT_CST_LT (n
->low
, minval
))
2945 if (INT_CST_LT (maxval
, n
->high
))
2948 /* A range counts double, since it requires two compares. */
2949 if (! tree_int_cst_equal (n
->low
, n
->high
))
2953 /* Compute span of values. */
2955 range
= combine (MINUS_EXPR
, maxval
, minval
);
2957 if (count
== 0 || TREE_CODE (TREE_TYPE (index_expr
)) == ERROR_MARK
)
2959 expand_expr (index_expr
, const0_rtx
, VOIDmode
, 0);
2961 emit_jump (default_label
);
2963 /* If range of values is much bigger than number of values,
2964 make a sequence of conditional branches instead of a dispatch.
2965 If the switch-index is a constant, do it this way
2966 because we can optimize it. */
2967 else if (TREE_INT_CST_HIGH (range
) != 0
2971 /* If machine does not have a case insn that compares the
2972 bounds, this means extra overhead for dispatch tables
2973 which raises the threshold for using them. */
2976 || (unsigned) (TREE_INT_CST_LOW (range
)) > 10 * count
2977 || TREE_CODE (index_expr
) == INTEGER_CST
)
2979 index
= expand_expr (index_expr
, 0, VOIDmode
, 0);
2981 /* If the index is a short or char that we do not have
2982 an insn to handle comparisons directly, convert it to
2983 a full integer now, rather than letting each comparison
2984 generate the conversion. */
2986 if ((GET_MODE (index
) == QImode
|| GET_MODE (index
) == HImode
)
2987 && (cmp_optab
->handlers
[(int) GET_MODE(index
)].insn_code
2988 == CODE_FOR_nothing
))
2989 index
= convert_to_mode (SImode
, index
, unsignedp
);
2992 do_pending_stack_adjust ();
2994 index
= protect_from_queue (index
, 0);
2995 if (GET_CODE (index
) == MEM
)
2996 index
= copy_to_reg (index
);
2997 if (GET_CODE (index
) == CONST_INT
2998 || TREE_CODE (index_expr
) == INTEGER_CST
)
3000 /* Make a tree node with the proper constant value
3001 if we don't already have one. */
3002 if (TREE_CODE (index_expr
) != INTEGER_CST
)
3005 = build_int_2 (INTVAL (index
),
3006 !unsignedp
&& INTVAL (index
) >= 0 ? 0 : -1);
3007 index_expr
= convert (TREE_TYPE (index_expr
), index_expr
);
3010 /* For constant index expressions we need only
3011 issue a unconditional branch to the appropriate
3012 target code. The job of removing any unreachable
3013 code is left to the optimisation phase if the
3014 "-O" option is specified. */
3015 for (n
= thiscase
->data
.case_stmt
.case_list
;
3019 if (! tree_int_cst_lt (index_expr
, n
->low
)
3020 && ! tree_int_cst_lt (n
->high
, index_expr
))
3024 emit_jump (label_rtx (n
->code_label
));
3026 emit_jump (default_label
);
3030 /* If the index expression is not constant we generate
3031 a binary decision tree to select the appropriate
3032 target code. This is done as follows:
3034 The list of cases is rearranged into a binary tree,
3035 nearly optimal assuming equal probability for each case.
3037 The tree is transformed into RTL, eliminating
3038 redundant test conditions at the same time.
3040 If program flow could reach the end of the
3041 decision tree an unconditional jump to the
3042 default code is emitted. */
3044 && TREE_CODE (TREE_TYPE (orig_index
)) != ENUMERAL_TYPE
)
3045 estimate_case_costs (thiscase
->data
.case_stmt
.case_list
,
3047 balance_case_nodes (&thiscase
->data
.case_stmt
.case_list
, 0);
3048 emit_case_nodes (index
, thiscase
->data
.case_stmt
.case_list
,
3049 default_label
, unsignedp
);
3050 emit_jump_if_reachable (default_label
);
3056 /* Convert the index to SImode. */
3057 if (TYPE_MODE (TREE_TYPE (index_expr
)) == DImode
)
3059 index_expr
= build (MINUS_EXPR
, TREE_TYPE (index_expr
),
3060 index_expr
, minval
);
3061 minval
= integer_zero_node
;
3063 if (TYPE_MODE (TREE_TYPE (index_expr
)) != SImode
)
3064 index_expr
= convert (type_for_size (GET_MODE_BITSIZE (SImode
), 0),
3066 index
= expand_expr (index_expr
, 0, VOIDmode
, 0);
3068 index
= protect_from_queue (index
, 0);
3069 do_pending_stack_adjust ();
3071 emit_jump_insn (gen_casesi (index
, expand_expr (minval
, 0, VOIDmode
, 0),
3072 expand_expr (range
, 0, VOIDmode
, 0),
3073 table_label
, default_label
));
3075 #ifdef HAVE_tablejump
3076 index_expr
= convert (type_for_size (GET_MODE_BITSIZE (SImode
), 0),
3077 build (MINUS_EXPR
, TREE_TYPE (index_expr
),
3078 index_expr
, minval
));
3079 index
= expand_expr (index_expr
, 0, VOIDmode
, 0);
3081 index
= protect_from_queue (index
, 0);
3082 do_pending_stack_adjust ();
3084 do_tablejump (index
,
3085 gen_rtx (CONST_INT
, VOIDmode
, TREE_INT_CST_LOW (range
)),
3086 table_label
, default_label
);
3089 #endif /* not HAVE_tablejump */
3090 #endif /* not HAVE_casesi */
3092 /* Get table of labels to jump to, in order of case index. */
3094 ncases
= TREE_INT_CST_LOW (range
) + 1;
3095 labelvec
= (rtx
*) alloca (ncases
* sizeof (rtx
));
3096 bzero (labelvec
, ncases
* sizeof (rtx
));
3098 for (n
= thiscase
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
3101 = TREE_INT_CST_LOW (n
->low
) - TREE_INT_CST_LOW (minval
);
3103 while (i
+ TREE_INT_CST_LOW (minval
)
3104 <= TREE_INT_CST_LOW (n
->high
))
3106 = gen_rtx (LABEL_REF
, Pmode
, label_rtx (n
->code_label
));
3109 /* Fill in the gaps with the default. */
3110 for (i
= 0; i
< ncases
; i
++)
3111 if (labelvec
[i
] == 0)
3112 labelvec
[i
] = gen_rtx (LABEL_REF
, Pmode
, default_label
);
3114 /* Output the table */
3115 emit_label (table_label
);
3117 #ifdef CASE_VECTOR_PC_RELATIVE
3118 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC
, CASE_VECTOR_MODE
,
3119 gen_rtx (LABEL_REF
, Pmode
, table_label
),
3120 gen_rtvec_v (ncases
, labelvec
)));
3122 emit_jump_insn (gen_rtx (ADDR_VEC
, CASE_VECTOR_MODE
,
3123 gen_rtvec_v (ncases
, labelvec
)));
3125 /* If the case insn drops through the table,
3126 after the table we must jump to the default-label.
3127 Otherwise record no drop-through after the table. */
3128 #ifdef CASE_DROPS_THROUGH
3129 emit_jump (default_label
);
3135 reorder_insns (NEXT_INSN (before_case
), get_last_insn (),
3136 thiscase
->data
.case_stmt
.start
);
3138 if (thiscase
->exit_label
)
3139 emit_label (thiscase
->exit_label
);
3141 POPSTACK (case_stack
);
3144 /* See case.c for CASE-handling code. */
3146 /* Allocate fixed slots in the stack frame of the current function. */
3148 /* Return size needed for stack frame based on slots so far allocated. */
3153 #ifdef FRAME_GROWS_DOWNWARD
3154 return -frame_offset
+ STARTING_FRAME_OFFSET
;
3156 return frame_offset
- STARTING_FRAME_OFFSET
;
3160 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
3161 with machine mode MODE. */
3164 assign_stack_local (mode
, size
)
3165 enum machine_mode mode
;
3168 register rtx x
, addr
;
3169 int bigend_correction
= 0;
3171 frame_pointer_needed
= 1;
3173 /* Make each stack slot a multiple of the main allocation unit. */
3174 size
= (((size
+ (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) - 1)
3175 / (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
))
3176 * (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
));
3178 /* On a big-endian machine, if we are allocating more space than we will use,
3179 use the least significant bytes of those that are allocated. */
3180 #ifdef BYTES_BIG_ENDIAN
3181 if (mode
!= BLKmode
)
3182 bigend_correction
= size
- GET_MODE_SIZE (mode
);
3185 #ifdef FRAME_GROWS_DOWNWARD
3186 frame_offset
-= size
;
3188 addr
= gen_rtx (PLUS
, Pmode
, frame_pointer_rtx
,
3189 gen_rtx (CONST_INT
, VOIDmode
,
3190 (frame_offset
+ bigend_correction
)));
3191 #ifndef FRAME_GROWS_DOWNWARD
3192 frame_offset
+= size
;
3195 if (! memory_address_p (mode
, addr
))
3196 invalid_stack_slot
= 1;
3198 x
= gen_rtx (MEM
, mode
, addr
);
3200 stack_slot_list
= gen_rtx (EXPR_LIST
, VOIDmode
, x
, stack_slot_list
);
3205 /* Retroactively move an auto variable from a register to a stack slot.
3206 This is done when an address-reference to the variable is seen. */
3209 put_var_into_stack (decl
)
3212 register rtx reg
= DECL_RTL (decl
);
3215 /* No need to do anything if decl has no rtx yet
3216 since in that case caller is setting TREE_ADDRESSABLE
3217 and a stack slot will be assigned when the rtl is made. */
3220 if (GET_CODE (reg
) != REG
)
3223 new = parm_stack_loc (reg
);
3225 new = assign_stack_local (GET_MODE (reg
), GET_MODE_SIZE (GET_MODE (reg
)));
3227 XEXP (reg
, 0) = XEXP (new, 0);
3228 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
3229 REG_USERVAR_P (reg
) = 0;
3230 PUT_CODE (reg
, MEM
);
3232 /* If this is a memory ref that contains aggregate components,
3233 mark it as such for cse and loop optimize. */
3234 MEM_IN_STRUCT_P (reg
)
3235 = (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
3236 || TREE_CODE (TREE_TYPE (decl
)) == RECORD_TYPE
3237 || TREE_CODE (TREE_TYPE (decl
)) == UNION_TYPE
);
3239 fixup_var_refs (reg
);
3243 fixup_var_refs (var
)
3246 extern rtx sequence_stack
;
3247 rtx stack
= sequence_stack
;
3250 stack
= sequence_stack
;
3252 /* Must scan all insns for stack-refs that exceed the limit. */
3253 fixup_var_refs_insns (var
, get_insns (), stack
== 0);
3255 /* Scan all pending sequences too. */
3256 for (; stack
; stack
= XEXP (XEXP (stack
, 1), 1))
3258 push_to_sequence (XEXP (stack
, 0));
3259 fixup_var_refs_insns (var
, XEXP (stack
, 0),
3260 XEXP (XEXP (stack
, 1), 1) == 0);
3261 /* Update remembered end of sequence
3262 in case we added an insn at the end. */
3263 XEXP (XEXP (stack
, 1), 0) = get_last_insn ();
3267 /* Scan all waiting RTL_EXPRs too. */
3268 for (pending
= rtl_expr_chain
; pending
; pending
= TREE_CHAIN (pending
))
3270 rtx seq
= RTL_EXPR_SEQUENCE (TREE_VALUE (pending
));
3271 if (seq
!= const0_rtx
&& seq
!= 0)
3273 push_to_sequence (seq
);
3274 fixup_var_refs_insns (var
, seq
, 0);
3280 /* Scan the insn-chain starting with INSN for refs to VAR
3281 and fix them up. TOPLEVEL is nonzero if this chain is the
3282 main chain of insns for the current function. */
3285 fixup_var_refs_insns (var
, insn
, toplevel
)
3292 rtx next
= NEXT_INSN (insn
);
3294 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
3295 || GET_CODE (insn
) == JUMP_INSN
)
3297 /* The insn to load VAR from a home in the arglist
3298 is now a no-op. When we see it, just delete it. */
3300 && GET_CODE (PATTERN (insn
)) == SET
3301 && SET_DEST (PATTERN (insn
)) == var
3302 && rtx_equal_p (SET_SRC (PATTERN (insn
)), var
))
3304 next
= delete_insn (insn
);
3305 if (insn
== last_parm_insn
)
3306 last_parm_insn
= PREV_INSN (next
);
3309 fixup_var_refs_1 (var
, PATTERN (insn
), insn
);
3310 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
3311 But don't touch other insns referred to by reg-notes;
3312 we will get them elsewhere. */
3313 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
3314 if (GET_CODE (note
) != INSN_LIST
)
3315 XEXP (note
, 0) = walk_fixup_memory_subreg (XEXP (note
, 0), insn
);
3322 fixup_var_refs_1 (var
, x
, insn
)
3328 RTX_CODE code
= GET_CODE (x
);
3337 x
= fixup_stack_1 (x
, insn
);
3338 tem
= gen_reg_rtx (GET_MODE (x
));
3339 /* Put new insn before a CALL, before any USEs before it. */
3340 if (GET_CODE (insn
) == CALL_INSN
)
3341 while (PREV_INSN (insn
) != 0 && GET_CODE (PREV_INSN (insn
)) == INSN
3342 && GET_CODE (PATTERN (PREV_INSN (insn
))) == USE
)
3343 insn
= PREV_INSN (insn
);
3344 emit_insn_before (gen_move_insn (tem
, x
), insn
);
3361 /* Note that in some cases those types of expressions are altered
3362 by optimize_bit_field, and do not survive to get here. */
3365 while (GET_CODE (tem
) == SUBREG
|| GET_CODE (tem
) == SIGN_EXTRACT
3366 || GET_CODE (tem
) == ZERO_EXTRACT
)
3367 tem
= XEXP (tem
, 0);
3370 x
= fixup_stack_1 (x
, insn
);
3371 tem
= gen_reg_rtx (GET_MODE (x
));
3372 if (GET_CODE (x
) == SUBREG
)
3373 x
= fixup_memory_subreg (x
, insn
);
3374 emit_insn_before (gen_move_insn (tem
, x
), insn
);
3380 /* First do special simplification of bit-field references. */
3381 if (GET_CODE (SET_DEST (x
)) == SIGN_EXTRACT
3382 || GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
3383 optimize_bit_field (x
, insn
, 0);
3384 if (GET_CODE (SET_SRC (x
)) == SIGN_EXTRACT
3385 || GET_CODE (SET_SRC (x
)) == ZERO_EXTRACT
)
3386 optimize_bit_field (x
, insn
, 0);
3389 rtx dest
= SET_DEST (x
);
3390 rtx src
= SET_SRC (x
);
3391 rtx outerdest
= dest
;
3394 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
3395 || GET_CODE (dest
) == SIGN_EXTRACT
3396 || GET_CODE (dest
) == ZERO_EXTRACT
)
3397 dest
= XEXP (dest
, 0);
3398 while (GET_CODE (src
) == SUBREG
3399 || GET_CODE (src
) == SIGN_EXTRACT
3400 || GET_CODE (src
) == ZERO_EXTRACT
)
3401 src
= XEXP (src
, 0);
3403 /* If VAR does not appear at the top level of the SET
3404 just scan the lower levels of the tree. */
3406 if (src
!= var
&& dest
!= var
)
3409 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
3410 that may appear inside a SIGN_EXTRACT or ZERO_EXTRACT.
3411 This was legitimate when the MEM was a REG. */
3413 if ((GET_CODE (outerdest
) == SIGN_EXTRACT
3414 || GET_CODE (outerdest
) == ZERO_EXTRACT
)
3415 && GET_CODE (XEXP (outerdest
, 0)) == SUBREG
3416 && SUBREG_REG (XEXP (outerdest
, 0)) == var
)
3417 XEXP (outerdest
, 0) = fixup_memory_subreg (XEXP (outerdest
, 0), insn
);
3419 if ((GET_CODE (outersrc
) == SIGN_EXTRACT
3420 || GET_CODE (outersrc
) == ZERO_EXTRACT
)
3421 && GET_CODE (XEXP (outersrc
, 0)) == SUBREG
3422 && SUBREG_REG (XEXP (outersrc
, 0)) == var
)
3423 XEXP (outersrc
, 0) = fixup_memory_subreg (XEXP (outersrc
, 0), insn
);
3425 /* Make sure that the machine's SIGN_EXTRACT and ZERO_EXTRACT insns
3426 accept a memory operand. */
3428 if (GET_CODE (outersrc
) == ZERO_EXTRACT
3429 && ! ((*insn_operand_predicate
[(int) CODE_FOR_extzv
][0])
3430 (XEXP (outersrc
, 0), VOIDmode
)))
3431 XEXP (outersrc
, 0) = src
3432 = fixup_var_refs_1 (var
, XEXP (outersrc
, 0), insn
);
3435 if (GET_CODE (outersrc
) == SIGN_EXTRACT
3436 && ! ((*insn_operand_predicate
[(int) CODE_FOR_extv
][0])
3437 (XEXP (outersrc
, 0), VOIDmode
)))
3438 XEXP (outersrc
, 0) = src
3439 = fixup_var_refs_1 (var
, XEXP (outersrc
, 0), insn
);
3442 if (GET_CODE (outerdest
) == ZERO_EXTRACT
3443 && ! ((*insn_operand_predicate
[(int) CODE_FOR_insv
][0])
3444 (XEXP (outerdest
, 0), VOIDmode
)))
3446 rtx tem
= gen_reg_rtx (GET_MODE (XEXP (outerdest
, 0)));
3448 emit_insn_before (gen_move_insn (tem
, XEXP (outerdest
, 0)), insn
);
3449 emit_insn_after (gen_move_insn (XEXP (outerdest
, 0), tem
), insn
);
3450 dest
= XEXP (outerdest
, 0) = tem
;
3454 /* Make sure a MEM inside a SIGN_EXTRACT has QImode
3455 since that's what bit-field insns want. */
3457 if ((GET_CODE (outerdest
) == SIGN_EXTRACT
3458 || GET_CODE (outerdest
) == ZERO_EXTRACT
)
3459 && GET_CODE (XEXP (outerdest
, 0)) == MEM
3460 && GET_MODE (XEXP (outerdest
, 0)) != QImode
)
3462 XEXP (outerdest
, 0) = copy_rtx (XEXP (outerdest
, 0));
3463 PUT_MODE (XEXP (outerdest
, 0), QImode
);
3464 /* Adjust the address so the bit field starts within the byte
3465 addressed. This helps certain optimization patterns. */
3466 if (GET_CODE (XEXP (outerdest
, 2)) == CONST_INT
3467 && offsettable_memref_p (XEXP (outerdest
, 0)))
3469 int count
= INTVAL (XEXP (outerdest
, 2));
3471 = adj_offsettable_operand (XEXP (outerdest
, 0),
3472 count
/ GET_MODE_BITSIZE (QImode
));
3474 = gen_rtx (CONST_INT
, VOIDmode
,
3475 count
% GET_MODE_BITSIZE (QImode
));
3479 if ((GET_CODE (outersrc
) == SIGN_EXTRACT
3480 || GET_CODE (outersrc
) == ZERO_EXTRACT
)
3481 && GET_CODE (XEXP (outersrc
, 0)) == MEM
3482 && GET_MODE (XEXP (outersrc
, 0)) != QImode
)
3484 XEXP (outersrc
, 0) = copy_rtx (XEXP (outersrc
, 0));
3485 PUT_MODE (XEXP (outersrc
, 0), QImode
);
3486 /* Adjust the address so the bit field starts within the byte
3487 addressed. This helps certain optimization patterns. */
3488 if (GET_CODE (XEXP (outersrc
, 2)) == CONST_INT
3489 && offsettable_memref_p (XEXP (outersrc
, 0)))
3491 int count
= INTVAL (XEXP (outersrc
, 2));
3493 = adj_offsettable_operand (XEXP (outersrc
, 0),
3494 count
/ GET_MODE_BITSIZE (QImode
));
3496 = gen_rtx (CONST_INT
, VOIDmode
,
3497 count
% GET_MODE_BITSIZE (QImode
));
3501 /* STRICT_LOW_PART is a no-op on memory references
3502 and it can cause combinations to be unrecognizable,
3505 if (dest
== var
&& GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
3506 SET_DEST (x
) = XEXP (SET_DEST (x
), 0);
3508 /* An insn to copy VAR into or out of a register
3509 must be left alone, to avoid an infinite loop here.
3510 But do fix up the address of VAR's stack slot if nec,
3511 and fix up SUBREGs containing VAR
3512 (since they are now memory subregs). */
3514 if (GET_CODE (SET_SRC (x
)) == REG
|| GET_CODE (SET_DEST (x
)) == REG
3515 || (GET_CODE (SET_SRC (x
)) == SUBREG
3516 && GET_CODE (SUBREG_REG (SET_SRC (x
))) == REG
)
3517 || (GET_CODE (SET_DEST (x
)) == SUBREG
3518 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
))
3520 if (src
== var
&& GET_CODE (SET_SRC (x
)) == SUBREG
)
3521 SET_SRC (x
) = fixup_memory_subreg (SET_SRC (x
), insn
);
3522 if (dest
== var
&& GET_CODE (SET_DEST (x
)) == SUBREG
)
3523 SET_DEST (x
) = fixup_memory_subreg (SET_DEST (x
), insn
);
3524 return fixup_stack_1 (x
, insn
);
3527 /* Otherwise, storing into VAR must be handled specially
3528 by storing into a temporary and copying that into VAR
3529 with a new insn after this one. */
3536 /* STRICT_LOW_PART can be discarded, around a MEM. */
3537 if (GET_CODE (tem
) == STRICT_LOW_PART
)
3538 tem
= XEXP (tem
, 0);
3539 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
3540 if (GET_CODE (tem
) == SUBREG
)
3541 tem
= fixup_memory_subreg (tem
, insn
);
3542 fixeddest
= fixup_stack_1 (tem
, insn
);
3543 temp
= gen_reg_rtx (GET_MODE (tem
));
3544 emit_insn_after (gen_move_insn (fixeddest
, temp
), insn
);
3545 SET_DEST (x
) = temp
;
3550 /* Nothing special about this RTX; fix its operands. */
3552 fmt
= GET_RTX_FORMAT (code
);
3553 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3556 XEXP (x
, i
) = fixup_var_refs_1 (var
, XEXP (x
, i
), insn
);
3560 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3562 = fixup_var_refs_1 (var
, XVECEXP (x
, i
, j
), insn
);
3568 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
3569 return an rtx (MEM:m1 newaddr) which is equivalent.
3570 If any insns must be emitted to compute NEWADDR, put them before INSN. */
3573 fixup_memory_subreg (x
, insn
)
3577 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
3578 rtx addr
= XEXP (SUBREG_REG (x
), 0);
3579 enum machine_mode mode
= GET_MODE (x
);
3582 #ifdef BYTES_BIG_ENDIAN
3583 offset
+= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
3584 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
3586 addr
= plus_constant (addr
, offset
);
3587 if (memory_address_p (mode
, addr
))
3588 return change_address (SUBREG_REG (x
), mode
, addr
);
3589 saved
= start_sequence ();
3590 result
= change_address (SUBREG_REG (x
), mode
, addr
);
3591 emit_insn_before (gen_sequence (), insn
);
3592 end_sequence (saved
);
3596 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
3597 Replace subexpressions of X in place.
3598 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
3599 Otherwise return X, with its contents possibly altered.
3601 If any insns must be emitted to compute NEWADDR, put them before INSN. */
3604 walk_fixup_memory_subreg (x
, insn
)
3608 register enum rtx_code code
;
3615 code
= GET_CODE (x
);
3617 if (code
== SUBREG
&& GET_CODE (SUBREG_REG (x
)) == MEM
)
3618 return fixup_memory_subreg (x
, insn
);
3620 /* Nothing special about this RTX; fix its operands. */
3622 fmt
= GET_RTX_FORMAT (code
);
3623 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3626 XEXP (x
, i
) = walk_fixup_memory_subreg (XEXP (x
, i
), insn
);
3630 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3632 = walk_fixup_memory_subreg (XVECEXP (x
, i
, j
), insn
);
3639 /* Fix up any references to stack slots that are invalid memory addresses
3640 because they exceed the maximum range of a displacement. */
3643 fixup_stack_slots ()
3647 /* Did we generate a stack slot that is out of range
3648 or otherwise has an invalid address? */
3649 if (invalid_stack_slot
)
3651 /* Yes. Must scan all insns for stack-refs that exceed the limit. */
3652 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3653 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
3654 || GET_CODE (insn
) == JUMP_INSN
)
3655 fixup_stack_1 (PATTERN (insn
), insn
);
3660 /* For each memory ref within X, if it refers to a stack slot
3661 with an out of range displacement, put the address in a temp register
3662 (emitting new insns before INSN to load these registers)
3663 and alter the memory ref to use that register.
3664 Replace each such MEM rtx with a copy, to avoid clobberage. */
3667 fixup_stack_1 (x
, insn
)
3672 register RTX_CODE code
= GET_CODE (x
);
3677 register rtx ad
= XEXP (x
, 0);
3678 /* If we have address of a stack slot but it's not valid
3679 (displacement is too large), compute the sum in a register. */
3680 if (GET_CODE (ad
) == PLUS
3681 && XEXP (ad
, 0) == frame_pointer_rtx
3682 && GET_CODE (XEXP (ad
, 1)) == CONST_INT
)
3685 if (memory_address_p (GET_MODE (x
), ad
))
3687 temp
= gen_reg_rtx (GET_MODE (ad
));
3688 emit_insn_before (gen_move_insn (temp
, ad
), insn
);
3689 return change_address (x
, VOIDmode
, temp
);
3694 fmt
= GET_RTX_FORMAT (code
);
3695 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3698 XEXP (x
, i
) = fixup_stack_1 (XEXP (x
, i
), insn
);
3702 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3703 XVECEXP (x
, i
, j
) = fixup_stack_1 (XVECEXP (x
, i
, j
), insn
);
3709 /* Optimization: a bit-field instruction whose field
3710 happens to be a byte or halfword in memory
3711 can be changed to a move instruction.
3713 We call here when INSN is an insn to examine or store into a bit-field.
3714 BODY is the SET-rtx to be altered.
3716 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
3717 (Currently this is called only from stmt.c, and EQUIV_MEM is always 0.) */
3720 optimize_bit_field (body
, insn
, equiv_mem
)
3725 register rtx bitfield
;
3728 if (GET_CODE (SET_DEST (body
)) == SIGN_EXTRACT
3729 || GET_CODE (SET_DEST (body
)) == ZERO_EXTRACT
)
3730 bitfield
= SET_DEST (body
), destflag
= 1;
3732 bitfield
= SET_SRC (body
), destflag
= 0;
3734 /* First check that the field being stored has constant size and position
3735 and is in fact a byte or halfword suitably aligned. */
3737 if (GET_CODE (XEXP (bitfield
, 1)) == CONST_INT
3738 && GET_CODE (XEXP (bitfield
, 2)) == CONST_INT
3739 && (INTVAL (XEXP (bitfield
, 1)) == GET_MODE_BITSIZE (QImode
)
3740 || INTVAL (XEXP (bitfield
, 1)) == GET_MODE_BITSIZE (HImode
))
3741 && INTVAL (XEXP (bitfield
, 2)) % INTVAL (XEXP (bitfield
, 1)) == 0)
3743 register rtx memref
= 0;
3745 /* Now check that the containing word is memory, not a register,
3746 and that it is safe to change the machine mode and to
3747 add something to the address. */
3749 if (GET_CODE (XEXP (bitfield
, 0)) == MEM
)
3750 memref
= XEXP (bitfield
, 0);
3751 else if (GET_CODE (XEXP (bitfield
, 0)) == REG
3753 memref
= equiv_mem
[REGNO (XEXP (bitfield
, 0))];
3754 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
3755 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == MEM
)
3756 memref
= SUBREG_REG (XEXP (bitfield
, 0));
3757 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
3759 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == REG
)
3760 memref
= equiv_mem
[REGNO (SUBREG_REG (XEXP (bitfield
, 0)))];
3763 && ! mode_dependent_address_p (XEXP (memref
, 0))
3764 && offsettable_address_p (0, GET_MODE (bitfield
), XEXP (memref
, 0)))
3766 /* Now adjust the address, first for any subreg'ing
3767 that we are now getting rid of,
3768 and then for which byte of the word is wanted. */
3771 = INTVAL (XEXP (bitfield
, 2)) / GET_MODE_BITSIZE (QImode
);
3772 if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
)
3774 offset
+= SUBREG_WORD (XEXP (bitfield
, 0)) * UNITS_PER_WORD
;
3775 #ifdef BYTES_BIG_ENDIAN
3776 offset
-= (MIN (UNITS_PER_WORD
,
3777 GET_MODE_SIZE (GET_MODE (XEXP (bitfield
, 0))))
3778 - MIN (UNITS_PER_WORD
,
3779 GET_MODE_SIZE (GET_MODE (memref
))));
3783 memref
= gen_rtx (MEM
,
3784 (INTVAL (XEXP (bitfield
, 1)) == GET_MODE_BITSIZE (QImode
)
3788 /* Store this memory reference where
3789 we found the bit field reference. */
3794 = adj_offsettable_operand (memref
, offset
);
3795 if (! CONSTANT_ADDRESS_P (SET_SRC (body
)))
3797 rtx src
= SET_SRC (body
);
3798 while (GET_CODE (src
) == SUBREG
3799 && SUBREG_WORD (src
) == 0)
3800 src
= SUBREG_REG (src
);
3801 if (GET_MODE (src
) != GET_MODE (memref
))
3802 src
= gen_lowpart (GET_MODE (memref
), SET_SRC (body
));
3803 SET_SRC (body
) = src
;
3805 else if (GET_MODE (SET_SRC (body
)) != VOIDmode
3806 && GET_MODE (SET_SRC (body
)) != GET_MODE (memref
))
3807 /* This shouldn't happen because anything that didn't have
3808 one of these modes should have got converted explicitly
3809 and then referenced through a subreg.
3810 This is so because the original bit-field was
3811 handled by agg_mode and so its tree structure had
3812 the same mode that memref now has. */
3817 rtx dest
= SET_DEST (body
);
3819 while (GET_CODE (dest
) == SUBREG
3820 && SUBREG_WORD (dest
) == 0)
3821 dest
= SUBREG_REG (dest
);
3822 SET_DEST (body
) = dest
;
3824 memref
= adj_offsettable_operand (memref
, offset
);
3825 if (GET_MODE (dest
) == GET_MODE (memref
))
3826 SET_SRC (body
) = memref
;
3829 /* Convert the mem ref to the destination mode. */
3830 rtx last
= get_last_insn ();
3831 rtx newreg
= gen_reg_rtx (GET_MODE (dest
));
3832 convert_move (newreg
, memref
,
3833 GET_CODE (SET_SRC (body
)) == ZERO_EXTRACT
);
3834 /* Put the conversion before the insn being fixed. */
3835 reorder_insns (NEXT_INSN (last
), get_last_insn (),
3837 SET_SRC (body
) = newreg
;
3841 /* Cause the insn to be re-recognized. */
3843 INSN_CODE (insn
) = -1;
3848 /* 1 + last pseudo register number used for loading a copy
3849 of a parameter of this function. */
3851 static int max_parm_reg
;
3853 /* Vector indexed by REGNO, containing location on stack in which
3854 to put the parm which is nominally in pseudo register REGNO,
3855 if we discover that that parm must go in the stack. */
3856 static rtx
*parm_reg_stack_loc
;
3861 return max_parm_reg
;
3864 /* Return the first insn following those generated by `assign_parms'. */
3867 get_first_nonparm_insn ()
3870 return NEXT_INSN (last_parm_insn
);
3871 return get_insns ();
3874 /* Get the stack home of a REG rtx that is one of this function's parameters.
3875 This is called rather than assign a new stack slot as a local.
3876 Return 0 if there is no existing stack home suitable for such use. */
3879 parm_stack_loc (reg
)
3882 if (REGNO (reg
) < max_parm_reg
)
3883 return parm_reg_stack_loc
[REGNO (reg
)];
3887 /* Return 1 if EXP returns an aggregate value, for which an address
3888 must be passed to the function or returned by the function. */
3891 aggregate_value_p (exp
)
3894 if (TYPE_MODE (TREE_TYPE (exp
)) == BLKmode
)
3896 if (RETURN_IN_MEMORY (TREE_TYPE (exp
)))
3898 if (flag_pcc_struct_return
3899 && (TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
3900 || TREE_CODE (TREE_TYPE (exp
)) == UNION_TYPE
))
3905 /* Convert a mem ref into one with a valid memory address.
3906 Pass through anything else unchanged. */
3912 if (GET_CODE (ref
) != MEM
)
3914 if (memory_address_p (GET_MODE (ref
), XEXP (ref
, 0)))
3916 return change_address (ref
, VOIDmode
,
3917 memory_address (GET_MODE (ref
), XEXP (ref
, 0)));
3920 /* Assign RTL expressions to the function's parameters.
3921 This may involve copying them into registers and using
3922 those registers as the RTL for them. */
3925 assign_parms (fndecl
)
3929 register rtx entry_parm
;
3930 register rtx stack_parm
;
3931 register CUMULATIVE_ARGS args_so_far
;
3932 enum machine_mode passed_mode
, nominal_mode
;
3933 /* Total space needed so far for args on the stack,
3934 given as a constant and a tree-expression. */
3935 struct args_size stack_args_size
;
3936 int first_parm_offset
= FIRST_PARM_OFFSET (fndecl
);
3937 tree fntype
= TREE_TYPE (fndecl
);
3938 /* This is used for the arg pointer when referring to stack args. */
3939 rtx internal_arg_pointer
;
3942 = list_length (DECL_ARGUMENTS (fndecl
)) + FIRST_PSEUDO_REGISTER
;
3944 /* Nonzero if function takes extra anonymous args.
3945 This means the last named arg must be on the stack
3946 right before the anonymous ones.
3947 Also nonzero if the first arg is named `__builtin_va_alist',
3948 which is used on some machines for old-fashioned non-ANSI varargs.h;
3949 this too should be stuck onto the stack as if it had arrived there. */
3951 = ((DECL_ARGUMENTS (fndecl
) != 0
3952 && DECL_NAME (DECL_ARGUMENTS (fndecl
))
3953 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (DECL_ARGUMENTS (fndecl
))),
3954 "__builtin_va_alist")))
3956 (TYPE_ARG_TYPES (fntype
) != 0
3957 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3958 != void_type_node
)));
3959 int arg_pointer_copied
= 0;
3961 #if ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM
3962 internal_arg_pointer
= arg_pointer_rtx
;
3964 /* If the arg pointer reg is not a fixed reg,
3965 make a copy of it, and address parms via the copy. */
3966 if (fixed_regs
[ARG_POINTER_REGNUM
])
3967 internal_arg_pointer
= arg_pointer_rtx
;
3970 internal_arg_pointer
= copy_to_reg (arg_pointer_rtx
);
3971 arg_pointer_copied
= 1;
3975 stack_args_size
.constant
= 0;
3976 stack_args_size
.var
= 0;
3978 /* If struct value address comes on the stack, count it in size of args. */
3979 if (aggregate_value_p (DECL_RESULT (fndecl
))
3980 && GET_CODE (struct_value_incoming_rtx
) == MEM
)
3981 stack_args_size
.constant
+= GET_MODE_SIZE (Pmode
);
3983 parm_reg_stack_loc
= (rtx
*) oballoc (nparmregs
* sizeof (rtx
));
3984 bzero (parm_reg_stack_loc
, nparmregs
* sizeof (rtx
));
3986 INIT_CUMULATIVE_ARGS (args_so_far
, fntype
);
3988 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= TREE_CHAIN (parm
))
3991 = (TREE_CODE (TREE_TYPE (parm
)) == ARRAY_TYPE
3992 || TREE_CODE (TREE_TYPE (parm
)) == RECORD_TYPE
3993 || TREE_CODE (TREE_TYPE (parm
)) == UNION_TYPE
);
3994 struct args_size stack_offset
;
3995 rtx stack_offset_rtx
;
3996 enum direction where_pad
;
3997 /* Extra bytes to add in after parameter is assigned, in
3998 case where argument cannot be assigned an offsetted
3999 location. For example, BLKmode parameters cannot be
4000 other than on a word boundary (no matter the size)
4001 because `access_parm_map' does not know how to handle
4005 DECL_OFFSET (parm
) = -1;
4007 if (TREE_TYPE (parm
) == error_mark_node
4008 /* This can happen after weird syntax errors
4009 or if an enum type is defined among the parms. */
4010 || TREE_CODE (parm
) != PARM_DECL
4011 || DECL_ARG_TYPE (parm
) == NULL
)
4013 DECL_RTL (parm
) = gen_rtx (MEM
, BLKmode
, const0_rtx
);
4014 TREE_USED (parm
) = 1;
4018 /* Find mode of arg as it is passed, and mode of arg
4019 as it should be during execution of this function. */
4020 passed_mode
= TYPE_MODE (DECL_ARG_TYPE (parm
));
4021 nominal_mode
= TYPE_MODE (TREE_TYPE (parm
));
4023 /* Get this parm's offset as an rtx. */
4024 stack_offset
= stack_args_size
;
4025 stack_offset
.constant
+= first_parm_offset
;
4027 /* If this argument needs more than the usual parm alignment, do
4028 extrinsic padding to reach that alignment. */
4030 #ifdef MAX_PARM_BOUNDARY
4031 /* If MAX_PARM_BOUNDARY is not defined, it means that the usual
4032 alignment requirements are relaxed for parms, and that no parm
4033 needs more alignment than PARM_BOUNDARY, regardless of data type. */
4035 if (PARM_BOUNDARY
< TYPE_ALIGN (DECL_ARG_TYPE (parm
)))
4037 int boundary
= PARM_BOUNDARY
;
4039 /* Determine the boundary to pad up to. */
4040 if (TYPE_ALIGN (DECL_ARG_TYPE (parm
)) > boundary
)
4041 boundary
= TYPE_ALIGN (DECL_ARG_TYPE (parm
));
4042 if (boundary
> MAX_PARM_BOUNDARY
)
4043 boundary
= MAX_PARM_BOUNDARY
;
4045 /* If the previous args don't reach such a boundary,
4046 advance to the next one. */
4047 boundary
/= BITS_PER_UNIT
;
4048 stack_offset
.constant
+= boundary
- 1;
4049 stack_offset
.constant
&= ~(boundary
- 1);
4050 stack_args_size
.constant
+= boundary
- 1;
4051 stack_args_size
.constant
&= ~(boundary
- 1);
4053 if (stack_offset
.var
!= 0)
4054 abort (); /* This case not implemented yet */
4056 #endif /* MAX_PARM_BOUNDARY */
4058 /* Find out if the parm needs intrinsic padding (up to PARM_BOUNDARY),
4059 and whether above or below. */
4062 = FUNCTION_ARG_PADDING (passed_mode
,
4063 expand_expr (size_in_bytes (DECL_ARG_TYPE (parm
)),
4066 /* If arg should be padded below, adjust the stack address upward.
4067 This padding is considered part of the space occupied by the
4068 argument. It pads only up to PARM_BOUNDARY, and it does not
4069 depend on the previous arguments, since they are assumed to
4070 occupy a multiple of PARM_BOUNDARY. */
4072 if (where_pad
== downward
)
4074 if (passed_mode
!= BLKmode
)
4076 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
4077 stack_offset
.constant
4078 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
4079 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
4080 - GET_MODE_SIZE (passed_mode
));
4084 tree sizetree
= size_in_bytes (DECL_ARG_TYPE (parm
));
4085 if (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
!= PARM_BOUNDARY
)
4087 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4088 tree s1
= convert_units (sizetree
, BITS_PER_UNIT
, PARM_BOUNDARY
);
4089 tree s2
= convert_units (s1
, PARM_BOUNDARY
, BITS_PER_UNIT
);
4091 ADD_PARM_SIZE (stack_offset
, s2
);
4092 SUB_PARM_SIZE (stack_offset
, sizetree
);
4093 extra
= stack_offset
.constant
% UNITS_PER_WORD
;
4094 stack_offset
.constant
-= extra
;
4099 stack_offset_rtx
= ARGS_SIZE_RTX (stack_offset
);
4101 /* Determine parm's home in the stack,
4102 in case it arrives in the stack or we should pretend it did. */
4104 = gen_rtx (MEM
, passed_mode
,
4105 memory_address (passed_mode
,
4106 gen_rtx (PLUS
, Pmode
,
4107 internal_arg_pointer
,
4108 stack_offset_rtx
)));
4110 /* If this is a memory ref that contains aggregate components,
4111 mark it as such for cse and loop optimize. */
4112 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
4114 /* Let machine desc say which reg (if any) the parm arrives in.
4115 0 means it arrives on the stack. */
4117 /* Variable-size args, and args following such, are never in regs. */
4118 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (parm
))) == INTEGER_CST
4119 || stack_offset
.var
!= 0)
4121 /* Set LAST_NAMED if this is last named arg before some
4122 anonymous args. We treat it as if it were anonymous too. */
4123 int last_named
= (TREE_CHAIN (parm
) == 0 && vararg
);
4124 #ifdef FUNCTION_INCOMING_ARG
4126 = FUNCTION_INCOMING_ARG (args_so_far
, passed_mode
,
4127 DECL_ARG_TYPE (parm
), ! last_named
);
4130 = FUNCTION_ARG (args_so_far
, passed_mode
, DECL_ARG_TYPE (parm
),
4135 /* If this parm was passed part in regs and part in memory,
4136 pretend it arrived entirely in memory
4137 by pushing the register-part onto the stack.
4139 In the special case of a DImode or DFmode that is split,
4140 we could put it together in a pseudoreg directly,
4141 but for now that's not worth bothering with. */
4147 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4148 nregs
= FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, passed_mode
,
4149 DECL_ARG_TYPE (parm
), 1);
4152 #if 0 /* Replaced by new calling convention
4153 which actually passes these args on the stack. */
4154 /* If this is the last named arg and anonymous args follow,
4155 likewise pretend this arg arrived on the stack
4156 so varargs can find the anonymous args following it. */
4157 if (TREE_CHAIN (parm
) == 0 && vararg
)
4159 if (GET_MODE (entry_parm
) == BLKmode
)
4160 nregs
= GET_MODE_SIZE (GET_MODE (entry_parm
)) / UNITS_PER_WORD
;
4162 nregs
= (int_size_in_bytes (DECL_ARG_TYPE (parm
))
4169 current_function_pretend_args_size
4170 = (((nregs
* UNITS_PER_WORD
) + (PARM_BOUNDARY
/ BITS_PER_UNIT
) - 1)
4171 / (PARM_BOUNDARY
/ BITS_PER_UNIT
)
4172 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
4176 emit_move_insn (gen_rtx (MEM
, SImode
,
4177 plus_constant (XEXP (stack_parm
, 0),
4178 i
* GET_MODE_SIZE (SImode
))),
4179 gen_rtx (REG
, SImode
, REGNO (entry_parm
) + i
));
4180 entry_parm
= stack_parm
;
4184 /* If we didn't decide this parm came in a register,
4185 by default it came on the stack. */
4186 if (entry_parm
== 0)
4187 entry_parm
= stack_parm
;
4189 /* For a stack parm, record in DECL_OFFSET the arglist offset
4190 of the parm at the time it is passed (before conversion). */
4191 if (entry_parm
== stack_parm
)
4192 DECL_OFFSET (parm
) = stack_offset
.constant
* BITS_PER_UNIT
;
4194 /* If there is actually space on the stack for this parm,
4195 count it in stack_args_size; otherwise set stack_parm to 0
4196 to indicate there is no preallocated stack slot for the parm. */
4198 if (entry_parm
== stack_parm
4199 #ifdef REG_PARM_STACK_SPACE
4200 /* On some machines, even if a parm value arrives in a register
4201 there is still an (uninitialized) stack slot allocated for it. */
4206 tree sizetree
= size_in_bytes (DECL_ARG_TYPE (parm
));
4207 if (where_pad
!= none
4208 && TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
!= PARM_BOUNDARY
)
4210 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4211 tree s1
= convert_units (sizetree
, BITS_PER_UNIT
, PARM_BOUNDARY
);
4212 sizetree
= convert_units (s1
, PARM_BOUNDARY
, BITS_PER_UNIT
);
4215 ADD_PARM_SIZE (stack_args_size
, sizetree
);
4218 /* No stack slot was pushed for this parm. */
4221 /* Now adjust STACK_PARM to the mode and precise location
4222 where this parameter should live during execution,
4223 if we discover that it must live in the stack during execution.
4224 To make debuggers happier on big-endian machines, we store
4225 the value in the last bytes of the space available. */
4227 if (nominal_mode
!= BLKmode
&& nominal_mode
!= passed_mode
4230 #ifdef BYTES_BIG_ENDIAN
4231 if (GET_MODE_SIZE (nominal_mode
) < UNITS_PER_WORD
)
4233 stack_offset
.constant
4234 += GET_MODE_SIZE (passed_mode
)
4235 - GET_MODE_SIZE (nominal_mode
);
4236 stack_offset_rtx
= ARGS_SIZE_RTX (stack_offset
);
4241 = gen_rtx (MEM
, nominal_mode
,
4242 memory_address (nominal_mode
,
4243 gen_rtx (PLUS
, Pmode
,
4245 stack_offset_rtx
)));
4247 /* If this is a memory ref that contains aggregate components,
4248 mark it as such for cse and loop optimize. */
4249 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
4252 /* If there is rounding to do for a BLKmode parameter,
4253 add it in here, since STACK_OFFSET is not used for the
4254 rest of this iteration. */
4255 stack_offset
.constant
+= extra
;
4257 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4258 in the mode in which it arrives.
4259 STACK_PARM is an RTX for a stack slot where the parameter can live
4260 during the function (in case we want to put it there).
4261 STACK_PARM is 0 if no stack slot was pushed for it.
4263 Now output code if necessary to convert ENTRY_PARM to
4264 the type in which this function declares it,
4265 and store that result in an appropriate place,
4266 which may be a pseudo reg, may be STACK_PARM,
4267 or may be a local stack slot if STACK_PARM is 0.
4269 Set DECL_RTL to that place. */
4271 if (nominal_mode
== BLKmode
)
4273 /* If a BLKmode arrives in registers, copy it to a stack slot. */
4274 if (GET_CODE (entry_parm
) == REG
)
4276 if (stack_parm
== 0)
4278 = assign_stack_local (GET_MODE (entry_parm
),
4279 int_size_in_bytes (TREE_TYPE (parm
)));
4281 move_block_from_reg (REGNO (entry_parm
), stack_parm
,
4282 ((int_size_in_bytes (TREE_TYPE (parm
))
4283 + UNITS_PER_WORD
- 1)
4288 /* If this function uses varargs, and `__builtin_saveregs'
4289 can clobber this stack location, then protect it. */
4291 #ifdef FUNCTION_INCOMING_ARG
4293 = FUNCTION_INCOMING_ARG (args_so_far
, SImode
,
4294 integer_type_node
, 1);
4297 = FUNCTION_ARG (args_so_far
, SImode
,
4298 integer_type_node
, 1);
4300 if (pseudo_parm
&& GET_CODE (pseudo_parm
) == REG
)
4302 push_to_sequence (save_from_saveregs
);
4303 move_block_to_reg (REGNO (pseudo_parm
), stack_parm
,
4304 int_size_in_bytes (TREE_TYPE (parm
))
4306 save_from_saveregs
= get_insns ();
4310 DECL_RTL (parm
) = stack_parm
;
4312 else if (! ((obey_regdecls
&& ! TREE_REGDECL (parm
)
4313 && ! TREE_INLINE (fndecl
))
4314 /* layout_decl may set this. */
4315 || TREE_ADDRESSABLE (parm
)
4316 || TREE_VOLATILE (parm
)
4317 /* If -ffloat-store specified, don't put explicit
4318 float variables into registers. */
4319 || (flag_float_store
4320 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
)))
4322 /* Store the parm in a pseudoregister during the function. */
4323 register rtx parmreg
= gen_reg_rtx (nominal_mode
);
4325 REG_USERVAR_P (parmreg
) = 1;
4326 DECL_RTL (parm
) = parmreg
;
4328 /* Copy the value into the register. */
4329 if (GET_MODE (parmreg
) != GET_MODE (entry_parm
))
4330 convert_move (parmreg
, validize_mem (entry_parm
), 0);
4332 emit_move_insn (parmreg
, validize_mem (entry_parm
));
4334 /* In any case, record the parm's desired stack location
4335 in case we later discover it must live in the stack. */
4336 if (REGNO (parmreg
) >= nparmregs
)
4339 nparmregs
= REGNO (parmreg
) + 5;
4340 new = (rtx
*) oballoc (nparmregs
* sizeof (rtx
));
4341 bcopy (parm_reg_stack_loc
, new, nparmregs
* sizeof (rtx
));
4342 parm_reg_stack_loc
= new;
4344 parm_reg_stack_loc
[REGNO (parmreg
)] = stack_parm
;
4346 /* Mark the register as eliminable if we did no conversion
4347 and it was copied from memory at a fixed offset,
4348 and the arg pointer was not copied to a pseudo-reg.
4349 If the arg pointer is a pseudo reg, such memory-equivalences
4350 as we make here would screw up life analysis for it. */
4351 if (nominal_mode
== passed_mode
4352 && GET_CODE (entry_parm
) == MEM
4353 && stack_offset
.var
== 0
4354 && ! arg_pointer_copied
)
4355 REG_NOTES (get_last_insn ())
4356 = gen_rtx (EXPR_LIST
, REG_EQUIV
,
4357 entry_parm
, REG_NOTES (get_last_insn ()));
4359 /* For pointer data type, suggest pointer register. */
4360 if (TREE_CODE (TREE_TYPE (parm
)) == POINTER_TYPE
)
4361 mark_reg_pointer (parmreg
);
4365 /* Value must be stored in the stack slot STACK_PARM
4366 during function execution. */
4368 if (passed_mode
!= nominal_mode
)
4369 /* Conversion is required. */
4370 entry_parm
= convert_to_mode (nominal_mode
, entry_parm
, 0);
4372 if (entry_parm
!= stack_parm
)
4374 if (stack_parm
== 0)
4375 stack_parm
= assign_stack_local (GET_MODE (entry_parm
),
4376 GET_MODE_SIZE (GET_MODE (entry_parm
)));
4377 emit_move_insn (validize_mem (stack_parm
),
4378 validize_mem (entry_parm
));
4381 DECL_RTL (parm
) = stack_parm
;
4382 frame_pointer_needed
= 1;
4385 if (TREE_VOLATILE (parm
))
4386 MEM_VOLATILE_P (DECL_RTL (parm
)) = 1;
4387 if (TREE_READONLY (parm
))
4388 RTX_UNCHANGING_P (DECL_RTL (parm
)) = 1;
4390 /* Update info on where next arg arrives in registers. */
4392 FUNCTION_ARG_ADVANCE (args_so_far
, passed_mode
, DECL_ARG_TYPE (parm
), 1);
4395 max_parm_reg
= max_reg_num ();
4396 last_parm_insn
= get_last_insn ();
4398 current_function_args_size
= stack_args_size
.constant
;
4401 /* Allocation of space for returned structure values.
4402 During the rtl generation pass, `get_structure_value_addr'
4403 is called from time to time to request the address of a block in our
4404 stack frame in which called functions will store the structures
4405 they are returning. The same space is used for all of these blocks.
4407 We allocate these blocks like stack locals. We keep reusing
4408 the same block until a bigger one is needed. */
4410 /* Length in bytes of largest structure value returned by
4411 any function called so far in this function. */
4412 static int max_structure_value_size
;
4414 /* An rtx for the addr we are currently using for structure values.
4415 This is typically (PLUS (REG:SI stackptr) (CONST_INT...)). */
4416 static rtx structure_value
;
4419 get_structure_value_addr (sizex
)
4423 if (GET_CODE (sizex
) != CONST_INT
)
4425 size
= INTVAL (sizex
);
4427 /* Round up to a multiple of the main allocation unit. */
4428 size
= (((size
+ (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) - 1)
4429 / (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
))
4430 * (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
));
4432 /* If this size is bigger than space we know to use,
4433 get a bigger piece of space. */
4434 if (size
> max_structure_value_size
)
4436 max_structure_value_size
= size
;
4437 structure_value
= assign_stack_local (BLKmode
, size
);
4438 if (GET_CODE (structure_value
) == MEM
)
4439 structure_value
= XEXP (structure_value
, 0);
4442 return structure_value
;
4445 /* Walk the tree of LET_STMTs describing the binding levels within a function
4446 and warn about uninitialized variables.
4447 This is done after calling flow_analysis and before global_alloc
4448 clobbers the pseudo-regs to hard regs. */
4451 uninitialized_vars_warning (block
)
4454 register tree decl
, sub
;
4455 for (decl
= STMT_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
4457 if (TREE_CODE (decl
) == VAR_DECL
4458 /* These warnings are unreliable for and aggregates
4459 because assigning the fields one by one can fail to convince
4460 flow.c that the entire aggregate was initialized.
4461 Unions are troublesome because members may be shorter. */
4462 && TREE_CODE (TREE_TYPE (decl
)) != RECORD_TYPE
4463 && TREE_CODE (TREE_TYPE (decl
)) != UNION_TYPE
4464 && TREE_CODE (TREE_TYPE (decl
)) != ARRAY_TYPE
4465 && DECL_RTL (decl
) != 0
4466 && GET_CODE (DECL_RTL (decl
)) == REG
4467 && regno_uninitialized (REGNO (DECL_RTL (decl
))))
4468 warning_with_decl (decl
,
4469 "`%s' may be used uninitialized in this function");
4470 if (TREE_CODE (decl
) == VAR_DECL
4471 && DECL_RTL (decl
) != 0
4472 && GET_CODE (DECL_RTL (decl
)) == REG
4473 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
4474 warning_with_decl (decl
,
4475 "variable `%s' may be clobbered by `longjmp'");
4477 for (sub
= STMT_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
4478 uninitialized_vars_warning (sub
);
4481 /* If this function call setjmp, put all vars into the stack
4482 unless they were declared `register'. */
4485 setjmp_protect (block
)
4488 register tree decl
, sub
;
4489 for (decl
= STMT_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
4490 if ((TREE_CODE (decl
) == VAR_DECL
4491 || TREE_CODE (decl
) == PARM_DECL
)
4492 && DECL_RTL (decl
) != 0
4493 && GET_CODE (DECL_RTL (decl
)) == REG
4494 && ! TREE_REGDECL (decl
))
4495 put_var_into_stack (decl
);
4496 for (sub
= STMT_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
4497 setjmp_protect (sub
);
4500 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4501 and initialize static variables for generating RTL for the statements
4505 init_function_start (subr
)
4508 this_function
= subr
;
4509 cse_not_expected
= ! optimize
;
4511 /* We have not yet found a reason why a frame pointer cannot
4512 be omitted for this function in particular, but maybe we know
4513 a priori that it is required.
4514 `flag_omit_frame_pointer' has its main effect here. */
4515 frame_pointer_needed
= FRAME_POINTER_REQUIRED
|| ! flag_omit_frame_pointer
;
4517 /* Caller save not needed yet. */
4518 caller_save_needed
= 0;
4520 /* No gotos have been expanded yet. */
4521 goto_fixup_chain
= 0;
4523 /* No stack slots have been made yet. */
4524 stack_slot_list
= 0;
4526 /* No invalid stack slots have been made yet. */
4527 invalid_stack_slot
= 0;
4529 /* No parm regs have been allocated.
4530 (This is important for output_inline_function.) */
4531 max_parm_reg
= FIRST_PSEUDO_REGISTER
;
4533 /* Initialize the RTL mechanism. */
4534 init_emit (write_symbols
);
4536 /* Initialize the queue of pending postincrement and postdecrements,
4537 and some other info in expr.c. */
4540 init_const_rtx_hash_table ();
4542 /* Decide whether function should try to pop its args on return. */
4544 current_function_pops_args
= RETURN_POPS_ARGS (TREE_TYPE (subr
));
4546 current_function_name
= (char *)lang_printable_name (subr
);
4548 /* Nonzero if this is a nested function that uses a static chain. */
4550 current_function_needs_context
4551 = (DECL_CONTEXT (current_function_decl
) != 0
4552 && TREE_CODE (DECL_CONTEXT (current_function_decl
)) == LET_STMT
);
4554 /* Set if a call to setjmp is seen. */
4556 current_function_calls_setjmp
= 0;
4557 current_function_calls_alloca
= 0;
4559 current_function_returns_pcc_struct
= 0;
4560 current_function_returns_struct
= 0;
4562 /* No space assigned yet for structure values. */
4563 max_structure_value_size
= 0;
4564 structure_value
= 0;
4566 /* We are not currently within any block, conditional, loop or case.
4567 @@ No longer true. We are within the block for the parms. */
4575 block_start_count
= 0;
4577 /* We have not yet needed to make a label to jump to for tail-recursion. */
4578 tail_recursion_label
= 0;
4580 /* No stack slots allocated yet. */
4581 frame_offset
= STARTING_FRAME_OFFSET
;
4583 /* No SAVE_EXPRs in this function yet. */
4586 /* No parameters to protect from `__builtin_saveregs' yet. */
4587 save_from_saveregs
= 0;
4589 /* No RTL_EXPRs in this function yet. */
4592 /* Within function body, compute a type's size as soon it is laid out. */
4593 immediate_size_expand
++;
4595 init_pending_stack_adjust ();
4596 inhibit_defer_pop
= 0;
4597 current_function_pretend_args_size
= 0;
4599 /* Prevent ever trying to delete the first instruction of a function.
4600 Also tell final how to output a linenum before the function prologue. */
4601 emit_line_note (DECL_SOURCE_FILE (subr
), DECL_SOURCE_LINE (subr
));
4602 /* Make sure first insn is a note even if we don't want linenums.
4603 This makes sure the first insn will never be deleted.
4604 Also, final expects a note to appear there. */
4605 emit_note (0, NOTE_INSN_DELETED
);
4606 /* Indicate the beginning of the function body,
4607 as opposed to parm setup. */
4608 emit_note (0, NOTE_INSN_FUNCTION_BEG
);
4610 /* Set flags used by final.c. */
4611 if (aggregate_value_p (DECL_RESULT (subr
)))
4613 #ifdef PCC_STATIC_STRUCT_RETURN
4614 if (flag_pcc_struct_return
)
4615 current_function_returns_pcc_struct
= 1;
4618 current_function_returns_struct
= 1;
4622 /* Start the RTL for a new function, and set variables used for
4624 SUBR is the FUNCTION_DECL node.
4625 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4626 the function's parameters, which must be run at any return statement. */
4629 expand_function_start (subr
, parms_have_cleanups
)
4631 int parms_have_cleanups
;
4636 /* Make sure volatile mem refs aren't considered
4637 valid operands of arithmetic insns. */
4640 /* If the parameters of this function need cleaning up, get a label
4641 for the beginning of the code which executes those cleanups. This must
4642 be done before doing anything with return_label. */
4643 if (parms_have_cleanups
)
4644 cleanup_label
= gen_label_rtx ();
4648 /* Make the label for return statements to jump to, if this machine
4649 does not have a one-instruction return and uses an epilogue,
4650 or if it returns a structure, or if it has parm cleanups. */
4652 if (cleanup_label
== 0 && HAVE_return
4653 && ! current_function_returns_pcc_struct
4654 && ! (current_function_returns_struct
&& ! optimize
))
4657 return_label
= gen_label_rtx ();
4659 return_label
= gen_label_rtx ();
4662 /* Initialize rtx used to return the value. */
4663 /* Do this before assign_parms so that we copy the struct value address
4664 before any library calls that assign parms might generate. */
4666 /* Decide whether to return the value in memory or in a register. */
4667 if (aggregate_value_p (DECL_RESULT (subr
)))
4669 /* Returning something that won't go in a register. */
4670 register rtx value_address
;
4672 #ifdef PCC_STATIC_STRUCT_RETURN
4673 if (flag_pcc_struct_return
)
4675 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
4676 value_address
= assemble_static_space (size
);
4677 current_function_returns_pcc_struct
= 1;
4682 /* Expect to be passed the address of a place to store the value. */
4683 value_address
= gen_reg_rtx (Pmode
);
4684 emit_move_insn (value_address
, struct_value_incoming_rtx
);
4685 current_function_returns_struct
= 1;
4687 DECL_RTL (DECL_RESULT (subr
))
4688 = gen_rtx (MEM
, DECL_MODE (DECL_RESULT (subr
)),
4691 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
4692 /* If return mode is void, this decl rtl should not be used. */
4693 DECL_RTL (DECL_RESULT (subr
)) = 0;
4694 else if (parms_have_cleanups
)
4696 /* If function will end with cleanup code for parms,
4697 compute the return values into a pseudo reg,
4698 which we will copy into the true return register
4699 after the cleanups are done. */
4700 DECL_RTL (DECL_RESULT (subr
))
4701 = gen_reg_rtx (DECL_MODE (DECL_RESULT (subr
)));
4702 TREE_REGDECL (DECL_RESULT (subr
)) = 1;
4705 /* Scalar, returned in a register. */
4707 #ifdef FUNCTION_OUTGOING_VALUE
4708 DECL_RTL (DECL_RESULT (subr
))
4709 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr
)), subr
);
4711 DECL_RTL (DECL_RESULT (subr
))
4712 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr
)), subr
);
4715 current_function_returns_pointer
4716 = (TREE_CODE (DECL_RESULT_TYPE (subr
)) == POINTER_TYPE
);
4718 /* Mark this reg as the function's return value. */
4719 if (GET_CODE (DECL_RTL (DECL_RESULT (subr
))) == REG
)
4721 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr
))) = 1;
4722 TREE_REGDECL (DECL_RESULT (subr
)) = 1;
4726 /* Initialize rtx for parameters and local variables.
4727 In some cases this requires emitting insns. */
4729 assign_parms (subr
);
4732 /* Not yet defined in GCC 1.37.1. */
4734 /* Tell final that the parameters are in the final places
4735 (either on the stack or in registers). */
4736 emit_note ((char *) 0, NOTE_INSN_PARMS_HOMED
);
4739 /* If doing stupid allocation, mark parms as born here. */
4741 if (GET_CODE (get_last_insn ()) != NOTE
)
4742 emit_note (0, NOTE_INSN_DELETED
);
4743 parm_birth_insn
= get_last_insn ();
4747 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_parm_reg
; i
++)
4748 use_variable (regno_reg_rtx
[i
]);
4751 /* After the parm initializations is where the tail-recursion label
4752 should go, if we end up needing one. */
4753 tail_recursion_reentry
= get_last_insn ();
4755 /* Evaluate now the sizes of any types declared among the arguments. */
4756 for (tem
= get_pending_sizes (); tem
; tem
= TREE_CHAIN (tem
))
4757 expand_expr (TREE_VALUE (tem
), 0, VOIDmode
, 0);
4759 /* Make sure there is a line number after the function entry setup code.
4760 There normally is one anyway, from the following statement,
4761 but there could fail to be one if there is no newline here. */
4762 force_next_line_note ();
4765 /* Generate RTL for the end of the current function.
4766 FILENAME and LINE are the current position in the source file. */
4768 /* ??? Nobody seems to emit the cleanup_label and the cleanups themselves.
4770 !!! Not true. finish_function does this is cplus-decl.c. */
4773 expand_function_end (filename
, line
)
4779 extern rtx sequence_stack
;
4781 #if 0 /* I think unused parms are legitimate enough. */
4782 /* Warn about unused parms. */
4784 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4785 decl
; decl
= TREE_CHAIN (decl
))
4786 if (! TREE_USED (decl
) && TREE_CODE (decl
) == VAR_DECL
)
4787 warning_with_decl (decl
, "unused parameter `%s'");
4790 /* End any sequences that failed to be closed due to syntax errors. */
4791 while (sequence_stack
)
4794 /* Outside function body, can't compute type's actual size
4795 until next function's body starts. */
4796 immediate_size_expand
--;
4798 /* If doing stupid register allocation,
4799 mark register parms as dying here. */
4804 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_parm_reg
; i
++)
4805 use_variable (regno_reg_rtx
[i
]);
4807 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
4809 for (tem
= save_expr_regs
; tem
; tem
= XEXP (tem
, 1))
4811 use_variable (XEXP (tem
, 0));
4812 use_variable_after (XEXP (tem
, 0), parm_birth_insn
);
4816 clear_pending_stack_adjust ();
4817 do_pending_stack_adjust ();
4819 /* Mark the end of the function body.
4820 If control reaches this insn, the function can drop through
4821 without returning a value. */
4822 emit_note (0, NOTE_INSN_FUNCTION_END
);
4824 /* Output a linenumber for the end of the function.
4825 SDB depends on this. */
4826 emit_line_note_force (filename
, line
);
4828 /* Output the label for the actual return from the function,
4829 if one is expected. This happens either because a function epilogue
4830 is used instead of a return instruction, or because a return was done
4831 with a goto in order to run local cleanups, or because of pcc-style
4832 structure returning. */
4835 emit_label (return_label
);
4837 /* If we had calls to alloca, and this machine needs
4838 an accurate stack pointer to exit the function,
4839 insert some code to save and restore the stack pointer. */
4840 #ifdef EXIT_IGNORE_STACK
4841 if (! EXIT_IGNORE_STACK
)
4843 if (current_function_calls_alloca
)
4845 rtx tem
= gen_reg_rtx (Pmode
);
4846 emit_insn_after (gen_rtx (SET
, VOIDmode
, tem
, stack_pointer_rtx
),
4848 emit_insn (gen_rtx (SET
, VOIDmode
, stack_pointer_rtx
, tem
));
4851 /* If scalar return value was computed in a pseudo-reg,
4852 copy that to the hard return register. */
4853 if (DECL_RTL (DECL_RESULT (current_function_decl
)) != 0
4854 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) == REG
4855 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl
)))
4856 >= FIRST_PSEUDO_REGISTER
))
4858 rtx real_decl_result
;
4860 #ifdef FUNCTION_OUTGOING_VALUE
4862 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl
)),
4863 current_function_decl
);
4866 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl
)),
4867 current_function_decl
);
4869 REG_FUNCTION_VALUE_P (real_decl_result
) = 1;
4870 emit_move_insn (real_decl_result
,
4871 DECL_RTL (DECL_RESULT (current_function_decl
)));
4872 emit_insn (gen_rtx (USE
, VOIDmode
, real_decl_result
));
4875 /* If returning a structure, arrange to return the address of the value
4876 in a place where debuggers expect to find it. */
4877 /* If returning a structure PCC style,
4878 the caller also depends on this value.
4879 And current_function_returns_pcc_struct is not necessarily set. */
4880 if (current_function_returns_struct
4881 || current_function_returns_pcc_struct
)
4883 rtx value_address
= XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
4884 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
4885 #ifdef FUNCTION_OUTGOING_VALUE
4887 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type
),
4888 current_function_decl
);
4891 = hard_function_value (build_pointer_type (type
),
4892 current_function_decl
);
4896 /* If this works, leave it in. */
4897 /* This is not really the function value,
4898 and it confuses integrate.c around line 1211. */
4899 REG_FUNCTION_VALUE_P (outgoing
) = 1;
4902 emit_move_insn (outgoing
, value_address
);
4903 use_variable (outgoing
);
4906 /* Output a return insn if we are using one.
4907 Otherwise, let the rtl chain end here, to drop through
4908 into the epilogue. */
4912 emit_jump_insn (gen_return ());
4915 /* Fix up any gotos that jumped out to the outermost
4916 binding level of the function.
4917 Must follow emitting RETURN_LABEL. */
4919 /* If you have any cleanups to do at this point,
4920 and they need to create temporary variables,
4921 then you will lose. */
4922 fixup_gotos (0, 0, 0, get_insns (), 0);
4927 obstack_init (&stmt_obstack
);
4928 empty_cleanup_list
= build_tree_list (NULL_TREE
, NULL_TREE
);