No empty .Rs/.Re
[netbsd-mini2440.git] / gnu / usr.bin / g++ / cc1plus / expr.c
blob1e91ed6d6610a7f54aa5daa51bd445e10f8ab874
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 1, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 #include "config.h"
22 #include "rtl.h"
23 #include "tree.h"
24 #include "flags.h"
25 #include "insn-flags.h"
26 #include "insn-codes.h"
27 #include "expr.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "gvarargs.h"
31 #include "typeclass.h"
33 /* Decide whether a function's arguments should be processed
34 from first to last or from last to first. */
36 #ifdef STACK_GROWS_DOWNWARD
37 #ifdef PUSH_ROUNDING
38 #define PUSH_ARGS_REVERSED /* If it's last to first */
39 #endif
40 #endif
42 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
43 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
45 /* If this is nonzero, we do not bother generating VOLATILE
46 around volatile memory references, and we are willing to
47 output indirect addresses. If cse is to follow, we reject
48 indirect addresses so a useful potential cse is generated;
49 if it is used only once, instruction combination will produce
50 the same indirect address eventually. */
51 int cse_not_expected;
53 /* Nonzero to generate code for all the subroutines within an
54 expression before generating the upper levels of the expression.
55 Nowadays this is never zero. */
56 int do_preexpand_calls = 1;
58 /* Number of units that we should eventually pop off the stack.
59 These are the arguments to function calls that have already returned. */
60 int pending_stack_adjust;
62 /* Nonzero means stack pops must not be deferred, and deferred stack
63 pops must not be output. It is nonzero inside a function call,
64 inside a conditional expression, inside a statement expression,
65 and in other cases as well. */
66 int inhibit_defer_pop;
68 /* A list of all cleanups which belong to the arguments of
69 function calls being expanded by expand_call. */
70 static tree cleanups_this_call;
72 /* Nonzero means current function may call alloca
73 as a subroutine. (__builtin_alloca does not count.) */
74 int may_call_alloca;
76 rtx store_expr ();
77 static void store_constructor ();
78 static rtx store_field ();
79 static rtx expand_call ();
80 static void emit_call_1 ();
81 static rtx prepare_call_address ();
82 static rtx expand_builtin ();
83 static rtx compare ();
84 static rtx compare_constants ();
85 static rtx compare1 ();
86 static rtx do_store_flag ();
87 static void preexpand_calls ();
88 static rtx expand_increment ();
89 static void init_queue ();
91 void do_pending_stack_adjust ();
93 /* MOVE_RATIO is the number of move instructions that is better than
94 a block move. */
96 #ifndef MOVE_RATIO
97 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi)
98 #define MOVE_RATIO 2
99 #else
100 /* A value of around 6 would minimize code size; infinity would minimize
101 execution time. */
102 #define MOVE_RATIO 15
103 #endif
104 #endif
106 /* Table indexed by tree code giving 1 if the code is for a
107 comparison operation, or anything that is most easily
108 computed with a conditional branch.
110 We include tree.def to give it the proper length.
111 The contents thus created are irrelevant.
112 The real contents are initialized in init_comparisons. */
114 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) 0,
116 static char comparison_code[] = {
117 #include "tree.def"
119 #undef DEFTREECODE
121 /* This is run once per compilation. */
123 void
124 init_comparisons ()
126 comparison_code[(int) EQ_EXPR] = 1;
127 comparison_code[(int) NE_EXPR] = 1;
128 comparison_code[(int) LT_EXPR] = 1;
129 comparison_code[(int) GT_EXPR] = 1;
130 comparison_code[(int) LE_EXPR] = 1;
131 comparison_code[(int) GE_EXPR] = 1;
134 /* This is run at the start of compiling a function. */
136 void
137 init_expr ()
139 init_queue ();
140 may_call_alloca = 0;
143 /* Manage the queue of increment instructions to be output
144 for POSTINCREMENT_EXPR expressions, etc. */
146 static rtx pending_chain;
148 /* Queue up to increment (or change) VAR later. BODY says how:
149 BODY should be the same thing you would pass to emit_insn
150 to increment right away. It will go to emit_insn later on.
152 The value is a QUEUED expression to be used in place of VAR
153 where you want to guarantee the pre-incrementation value of VAR. */
155 static rtx
156 enqueue_insn (var, body)
157 rtx var, body;
159 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
160 var, 0, 0, body, pending_chain);
161 return pending_chain;
164 /* Use protect_from_queue to convert a QUEUED expression
165 into something that you can put immediately into an instruction.
166 If the queued incrementation has not happened yet,
167 protect_from_queue returns the variable itself.
168 If the incrementation has happened, protect_from_queue returns a temp
169 that contains a copy of the old value of the variable.
171 Any time an rtx which might possibly be a QUEUED is to be put
172 into an instruction, it must be passed through protect_from_queue first.
173 QUEUED expressions are not meaningful in instructions.
175 Do not pass a value through protect_from_queue and then hold
176 on to it for a while before putting it in an instruction!
177 If the queue is flushed in between, incorrect code will result. */
180 protect_from_queue (x, modify)
181 register rtx x;
182 int modify;
184 register RTX_CODE code = GET_CODE (x);
186 if (code != QUEUED)
188 /* A special hack for read access to (MEM (QUEUED ...))
189 to facilitate use of autoincrement.
190 Make a copy of the contents of the memory location
191 rather than a copy of the address, but not
192 if the value is of mode BLKmode. */
193 if (code == MEM && GET_MODE (x) != BLKmode
194 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
196 register rtx y = XEXP (x, 0);
197 XEXP (x, 0) = QUEUED_VAR (y);
198 if (QUEUED_INSN (y))
200 register rtx temp = gen_reg_rtx (GET_MODE (x));
201 emit_insn_before (gen_move_insn (temp, x),
202 QUEUED_INSN (y));
203 return temp;
205 return x;
207 /* Otherwise, recursively protect the subexpressions of all
208 the kinds of rtx's that can contain a QUEUED. */
209 if (code == MEM)
210 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
211 else if (code == PLUS || code == MULT)
213 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
214 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
216 return x;
218 /* If the increment has not happened, use the variable itself. */
219 if (QUEUED_INSN (x) == 0)
220 return QUEUED_VAR (x);
221 /* If the increment has happened and a pre-increment copy exists,
222 use that copy. */
223 if (QUEUED_COPY (x) != 0)
224 return QUEUED_COPY (x);
225 /* The increment has happened but we haven't set up a pre-increment copy.
226 Set one up now, and use it. */
227 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
228 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
229 QUEUED_INSN (x));
230 return QUEUED_COPY (x);
233 /* Return nonzero if X contains a QUEUED expression:
234 if it contains anything that will be altered by a queued increment.
235 We handle only combinations of MEM, PLUS, MINUS and MULT operators
236 since memory addresses generally contain only those. */
238 static int
239 queued_subexp_p (x)
240 rtx x;
242 register enum rtx_code code = GET_CODE (x);
243 switch (code)
245 case QUEUED:
246 return 1;
247 case MEM:
248 return queued_subexp_p (XEXP (x, 0));
249 case MULT:
250 case PLUS:
251 case MINUS:
252 return queued_subexp_p (XEXP (x, 0))
253 || queued_subexp_p (XEXP (x, 1));
255 return 0;
258 /* Perform all the pending incrementations. */
260 void
261 emit_queue ()
263 register rtx p;
264 while (p = pending_chain)
266 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
267 pending_chain = QUEUED_NEXT (p);
271 static void
272 init_queue ()
274 if (pending_chain)
275 abort ();
278 /* Copy data from FROM to TO, where the machine modes are not the same.
279 Both modes may be integer, or both may be floating.
280 UNSIGNEDP should be nonzero if FROM is an unsigned type.
281 This causes zero-extension instead of sign-extension. */
283 void
284 convert_move (to, from, unsignedp)
285 register rtx to, from;
286 int unsignedp;
288 enum machine_mode to_mode = GET_MODE (to);
289 enum machine_mode from_mode = GET_MODE (from);
290 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
291 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
292 int extending = (int) to_mode > (int) from_mode;
294 to = protect_from_queue (to, 1);
295 from = protect_from_queue (from, 0);
297 if (to_real != from_real)
298 abort ();
300 if (to_mode == from_mode
301 || (from_mode == VOIDmode && CONSTANT_P (from)))
303 emit_move_insn (to, from);
304 return;
307 if (to_real)
309 #ifdef HAVE_extendsfdf2
310 if (HAVE_extendsfdf2 && extending)
312 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
313 return;
315 #endif
316 #ifdef HAVE_truncdfsf2
317 if (HAVE_truncdfsf2 && ! extending)
319 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
320 return;
322 #endif
323 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, (extending
324 ? "__extendsfdf2"
325 : "__truncdfsf2")), 0,
326 GET_MODE (to), 1,
327 from, (extending ? SFmode : DFmode));
328 emit_move_insn (to, hard_libcall_value (GET_MODE (to)));
329 return;
332 /* Now both modes are integers. */
334 if (to_mode == DImode)
336 if (unsignedp)
338 #ifdef HAVE_zero_extendsidi2
339 if (HAVE_zero_extendsidi2 && from_mode == SImode)
340 emit_unop_insn (CODE_FOR_zero_extendsidi2, to, from, ZERO_EXTEND);
341 else
342 #endif
343 #ifdef HAVE_zero_extendhidi2
344 if (HAVE_zero_extendhidi2 && from_mode == HImode)
345 emit_unop_insn (CODE_FOR_zero_extendhidi2, to, from, ZERO_EXTEND);
346 else
347 #endif
348 #ifdef HAVE_zero_extendqidi2
349 if (HAVE_zero_extendqidi2 && from_mode == QImode)
350 emit_unop_insn (CODE_FOR_zero_extendqidi2, to, from, ZERO_EXTEND);
351 else
352 #endif
353 #ifdef HAVE_zero_extendsidi2
354 if (HAVE_zero_extendsidi2)
356 convert_move (gen_lowpart (SImode, to), from, unsignedp);
357 emit_unop_insn (CODE_FOR_zero_extendsidi2, to,
358 gen_lowpart (SImode, to), ZERO_EXTEND);
360 else
361 #endif
363 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
364 convert_move (gen_lowpart (SImode, to), from, unsignedp);
365 emit_clr_insn (gen_highpart (SImode, to));
368 #ifdef HAVE_extendsidi2
369 else if (HAVE_extendsidi2 && from_mode == SImode)
370 emit_unop_insn (CODE_FOR_extendsidi2, to, from, SIGN_EXTEND);
371 #endif
372 #ifdef HAVE_extendhidi2
373 else if (HAVE_extendhidi2 && from_mode == HImode)
374 emit_unop_insn (CODE_FOR_extendhidi2, to, from, SIGN_EXTEND);
375 #endif
376 #ifdef HAVE_extendqidi2
377 else if (HAVE_extendqidi2 && from_mode == QImode)
378 emit_unop_insn (CODE_FOR_extendqidi2, to, from, SIGN_EXTEND);
379 #endif
380 #ifdef HAVE_extendsidi2
381 else if (HAVE_extendsidi2)
383 convert_move (gen_lowpart (SImode, to), from, unsignedp);
384 emit_unop_insn (CODE_FOR_extendsidi2, to,
385 gen_lowpart (SImode, to), SIGN_EXTEND);
387 #endif
388 #ifdef HAVE_slt
389 else if (HAVE_slt && insn_operand_mode[(int) CODE_FOR_slt][0] == SImode)
391 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
392 convert_move (gen_lowpart (SImode, to), from, unsignedp);
393 emit_insn (gen_slt (gen_highpart (SImode, to)));
395 #endif
396 else
398 register rtx label = gen_label_rtx ();
400 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
401 emit_clr_insn (gen_highpart (SImode, to));
402 convert_move (gen_lowpart (SImode, to), from, unsignedp);
403 emit_cmp_insn (gen_lowpart (SImode, to),
404 gen_rtx (CONST_INT, VOIDmode, 0),
405 0, 0, 0);
406 NO_DEFER_POP;
407 emit_jump_insn (gen_bge (label));
408 expand_unop (SImode, one_cmpl_optab,
409 gen_highpart (SImode, to), gen_highpart (SImode, to),
411 emit_label (label);
412 OK_DEFER_POP;
414 return;
417 if (from_mode == DImode)
419 convert_move (to, gen_lowpart (SImode, from), 0);
420 return;
423 /* Now follow all the conversions between integers
424 no more than a word long. */
426 /* For truncation, usually we can just refer to FROM in a narrower mode. */
427 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
428 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
429 GET_MODE_BITSIZE (from_mode))
430 && ((GET_CODE (from) == MEM
431 && ! MEM_VOLATILE_P (from)
432 && ! mode_dependent_address_p (XEXP (from, 0)))
433 || GET_CODE (from) == REG
434 || GET_CODE (from) == SUBREG))
436 emit_move_insn (to, gen_lowpart (to_mode, from));
437 return;
440 if (to_mode == SImode && from_mode == HImode)
442 if (unsignedp)
444 #ifdef HAVE_zero_extendhisi2
445 if (HAVE_zero_extendhisi2)
446 emit_unop_insn (CODE_FOR_zero_extendhisi2, to, from, ZERO_EXTEND);
447 else
448 #endif
449 abort ();
451 else
453 #ifdef HAVE_extendhisi2
454 if (HAVE_extendhisi2)
455 emit_unop_insn (CODE_FOR_extendhisi2, to, from, SIGN_EXTEND);
456 else
457 #endif
458 abort ();
460 return;
463 if (to_mode == SImode && from_mode == QImode)
465 if (unsignedp)
467 #ifdef HAVE_zero_extendqisi2
468 if (HAVE_zero_extendqisi2)
470 emit_unop_insn (CODE_FOR_zero_extendqisi2, to, from, ZERO_EXTEND);
471 return;
473 #endif
474 #if defined (HAVE_zero_extendqihi2) && defined (HAVE_extendhisi2)
475 if (HAVE_zero_extendqihi2 && HAVE_extendhisi2)
477 register rtx temp = gen_reg_rtx (HImode);
478 emit_unop_insn (CODE_FOR_zero_extendqihi2, temp, from, ZERO_EXTEND);
479 emit_unop_insn (CODE_FOR_extendhisi2, to, temp, SIGN_EXTEND);
480 return;
482 #endif
484 else
486 #ifdef HAVE_extendqisi2
487 if (HAVE_extendqisi2)
489 emit_unop_insn (CODE_FOR_extendqisi2, to, from, SIGN_EXTEND);
490 return;
492 #endif
493 #if defined (HAVE_extendqihi2) && defined (HAVE_extendhisi2)
494 if (HAVE_extendqihi2 && HAVE_extendhisi2)
496 register rtx temp = gen_reg_rtx (HImode);
497 emit_unop_insn (CODE_FOR_extendqihi2, temp, from, SIGN_EXTEND);
498 emit_unop_insn (CODE_FOR_extendhisi2, to, temp, SIGN_EXTEND);
499 return;
501 #endif
503 abort ();
506 if (to_mode == HImode && from_mode == QImode)
508 if (unsignedp)
510 #ifdef HAVE_zero_extendqihi2
511 if (HAVE_zero_extendqihi2)
513 emit_unop_insn (CODE_FOR_zero_extendqihi2, to, from, ZERO_EXTEND);
514 return;
516 #endif
518 else
520 #ifdef HAVE_extendqihi2
521 if (HAVE_extendqihi2)
523 emit_unop_insn (CODE_FOR_extendqihi2, to, from, SIGN_EXTEND);
524 return;
526 #endif
528 abort ();
531 #if 0 /* This seems to be redundant with code 100 lines up. */
533 /* Now we are truncating an integer to a smaller one.
534 If the result is a temporary, we might as well just copy it,
535 since only the low-order part of the result needs to be valid
536 and it is valid with no change. */
538 if (GET_CODE (to) == REG)
540 if (GET_CODE (from) == REG)
542 emit_move_insn (to, gen_lowpart (GET_MODE (to), from));
543 return;
545 else if (GET_CODE (from) == SUBREG)
547 from = copy_rtx (from);
548 /* This is safe since FROM is not more than one word. */
549 PUT_MODE (from, GET_MODE (to));
550 emit_move_insn (to, from);
551 return;
553 #ifndef BYTES_BIG_ENDIAN
554 else if (GET_CODE (from) == MEM)
556 register rtx addr = XEXP (from, 0);
557 if (memory_address_p (GET_MODE (to), addr))
559 emit_move_insn (to, gen_rtx (MEM, GET_MODE (to), addr));
560 return;
563 #endif /* not BYTES_BIG_ENDIAN */
565 #endif /* 0 */
567 if (from_mode == SImode && to_mode == HImode)
569 #ifdef HAVE_truncsihi2
570 if (HAVE_truncsihi2)
572 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
573 return;
575 #endif
576 abort ();
579 if (from_mode == SImode && to_mode == QImode)
581 #ifdef HAVE_truncsiqi2
582 if (HAVE_truncsiqi2)
584 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
585 return;
587 #endif
588 abort ();
591 if (from_mode == HImode && to_mode == QImode)
593 #ifdef HAVE_trunchiqi2
594 if (HAVE_trunchiqi2)
596 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
597 return;
599 #endif
600 abort ();
603 /* Mode combination is not recognized. */
604 abort ();
607 /* Return an rtx for a value that would result
608 from converting X to mode MODE.
609 Both X and MODE may be floating, or both integer.
610 UNSIGNEDP is nonzero if X is an unsigned value.
611 This can be done by referring to a part of X in place
612 or by copying to a new temporary with conversion. */
615 convert_to_mode (mode, x, unsignedp)
616 enum machine_mode mode;
617 rtx x;
618 int unsignedp;
620 register rtx temp;
621 if (mode == GET_MODE (x))
622 return x;
623 if (integer_mode_p (mode)
624 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
625 && ! (GET_CODE (x) == MEM && MEM_VOLATILE_P (x)))
626 return gen_lowpart (mode, x);
627 temp = gen_reg_rtx (mode);
628 convert_move (temp, x, unsignedp);
629 return temp;
633 integer_mode_p (mode)
634 enum machine_mode mode;
636 return (int) mode > (int) VOIDmode && (int) mode <= (int) TImode;
639 /* Generate several move instructions to copy LEN bytes
640 from block FROM to block TO. (These are MEM rtx's with BLKmode).
641 The caller must pass FROM and TO
642 through protect_from_queue before calling.
643 ALIGN (in bytes) is maximum alignment we can assume. */
645 struct move_by_pieces
647 rtx to;
648 rtx to_addr;
649 int autinc_to;
650 int explicit_inc_to;
651 rtx from;
652 rtx from_addr;
653 int autinc_from;
654 int explicit_inc_from;
655 int len;
656 int offset;
657 int reverse;
660 static void move_by_pieces_1 ();
661 static int move_by_pieces_ninsns ();
663 static void
664 move_by_pieces (to, from, len, align)
665 rtx to, from;
666 int len, align;
668 struct move_by_pieces data;
669 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
671 data.offset = 0;
672 data.to_addr = to_addr;
673 data.from_addr = from_addr;
674 data.to = to;
675 data.from = from;
676 data.autinc_to
677 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
678 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
679 data.autinc_from
680 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
681 || GET_CODE (from_addr) == POST_INC
682 || GET_CODE (from_addr) == POST_DEC);
684 data.explicit_inc_from = 0;
685 data.explicit_inc_to = 0;
686 data.reverse
687 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
688 if (data.reverse) data.offset = len;
689 data.len = len;
691 /* If copying requires more than two move insns,
692 copy addresses to registers (to make displacements shorter)
693 and use post-increment if available. */
694 if (!(data.autinc_from && data.autinc_to)
695 && move_by_pieces_ninsns (len, align) > 2)
697 #ifdef HAVE_PRE_DECREMENT
698 if (data.reverse && ! data.autinc_from)
700 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
701 data.autinc_from = 1;
702 data.explicit_inc_from = -1;
704 #endif
705 #ifdef HAVE_POST_INCREMENT
706 if (! data.autinc_from)
708 data.from_addr = copy_addr_to_reg (from_addr);
709 data.autinc_from = 1;
710 data.explicit_inc_from = 1;
712 #endif
713 if (!data.autinc_from && CONSTANT_P (from_addr))
714 data.from_addr = copy_addr_to_reg (from_addr);
715 #ifdef HAVE_PRE_DECREMENT
716 if (data.reverse && ! data.autinc_to)
718 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
719 data.autinc_to = 1;
720 data.explicit_inc_to = -1;
722 #endif
723 #ifdef HAVE_POST_INCREMENT
724 if (! data.reverse && ! data.autinc_to)
726 data.to_addr = copy_addr_to_reg (to_addr);
727 data.autinc_to = 1;
728 data.explicit_inc_to = 1;
730 #endif
731 if (!data.autinc_to && CONSTANT_P (to_addr))
732 data.to_addr = copy_addr_to_reg (to_addr);
735 #ifdef STRICT_ALIGNMENT
736 if (align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
737 align = MOVE_MAX;
738 #else
739 align = MOVE_MAX;
740 #endif
742 #ifdef HAVE_movti
743 if (HAVE_movti && align >= GET_MODE_SIZE (TImode))
744 move_by_pieces_1 (gen_movti, TImode, &data);
745 #endif
746 #ifdef HAVE_movdi
747 if (HAVE_movdi && align >= GET_MODE_SIZE (DImode))
748 move_by_pieces_1 (gen_movdi, DImode, &data);
749 #endif
750 #ifdef HAVE_movsi
751 if (align >= GET_MODE_SIZE (SImode))
752 move_by_pieces_1 (gen_movsi, SImode, &data);
753 #endif
754 #ifdef HAVE_movhi
755 if (HAVE_movhi && align >= GET_MODE_SIZE (HImode))
756 move_by_pieces_1 (gen_movhi, HImode, &data);
757 #endif
758 #ifdef HAVE_movqi
759 move_by_pieces_1 (gen_movqi, QImode, &data);
760 #else
761 movqi instruction required in machine description
762 #endif
765 /* Return number of insns required to move L bytes by pieces.
766 ALIGN (in bytes) is maximum alignment we can assume. */
768 static int
769 move_by_pieces_ninsns (l, align)
770 unsigned int l;
771 int align;
773 register int n_insns = 0;
775 #ifdef STRICT_ALIGNMENT
776 if (align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
777 align = MOVE_MAX;
778 #else
779 align = MOVE_MAX;
780 #endif
782 #ifdef HAVE_movti
783 if (HAVE_movti && align >= GET_MODE_SIZE (TImode))
784 n_insns += l / GET_MODE_SIZE (TImode), l %= GET_MODE_SIZE (TImode);
785 #endif
786 #ifdef HAVE_movdi
787 if (HAVE_movdi && align >= GET_MODE_SIZE (DImode))
788 n_insns += l / GET_MODE_SIZE (DImode), l %= GET_MODE_SIZE (DImode);
789 #endif
790 #ifdef HAVE_movsi
791 if (HAVE_movsi && align >= GET_MODE_SIZE (SImode))
792 n_insns += l / GET_MODE_SIZE (SImode), l %= GET_MODE_SIZE (SImode);
793 #endif
794 #ifdef HAVE_movhi
795 if (HAVE_movhi && align >= GET_MODE_SIZE (HImode))
796 n_insns += l / GET_MODE_SIZE (HImode), l %= GET_MODE_SIZE (HImode);
797 #endif
798 n_insns += l;
800 return n_insns;
803 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
804 with move instructions for mode MODE. GENFUN is the gen_... function
805 to make a move insn for that mode. DATA has all the other info. */
807 static void
808 move_by_pieces_1 (genfun, mode, data)
809 rtx (*genfun) ();
810 enum machine_mode mode;
811 struct move_by_pieces *data;
813 register int size = GET_MODE_SIZE (mode);
814 register rtx to1, from1;
816 while (data->len >= size)
818 if (data->reverse) data->offset -= size;
820 to1 = (data->autinc_to
821 ? gen_rtx (MEM, mode, data->to_addr)
822 : change_address (data->to, mode,
823 plus_constant (data->to_addr, data->offset)));
824 from1 =
825 (data->autinc_from
826 ? gen_rtx (MEM, mode, data->from_addr)
827 : change_address (data->from, mode,
828 plus_constant (data->from_addr, data->offset)));
830 #ifdef HAVE_PRE_DECREMENT
831 if (data->explicit_inc_to < 0)
832 emit_insn (gen_sub2_insn (data->to_addr,
833 gen_rtx (CONST_INT, VOIDmode, size)));
834 if (data->explicit_inc_from < 0)
835 emit_insn (gen_sub2_insn (data->from_addr,
836 gen_rtx (CONST_INT, VOIDmode, size)));
837 #endif
839 emit_insn ((*genfun) (to1, from1));
840 #ifdef HAVE_POST_INCREMENT
841 if (data->explicit_inc_to > 0)
842 emit_insn (gen_add2_insn (data->to_addr,
843 gen_rtx (CONST_INT, VOIDmode, size)));
844 if (data->explicit_inc_from > 0)
845 emit_insn (gen_add2_insn (data->from_addr,
846 gen_rtx (CONST_INT, VOIDmode, size)));
847 #endif
849 if (! data->reverse) data->offset += size;
851 data->len -= size;
855 /* Emit code to move a block Y to a block X.
856 This may be done with string-move instructions,
857 with multiple scalar move instructions, or with a library call.
859 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
860 with mode BLKmode.
861 SIZE is an rtx that says how long they are.
862 ALIGN is the maximum alignment we can assume they have,
863 measured in bytes. */
865 static void
866 emit_block_move (x, y, size, align)
867 rtx x, y;
868 rtx size;
869 int align;
871 if (GET_MODE (x) != BLKmode)
872 abort ();
874 if (GET_MODE (y) != BLKmode)
875 abort ();
877 x = protect_from_queue (x, 1);
878 y = protect_from_queue (y, 0);
880 if (GET_CODE (x) != MEM)
881 abort ();
882 if (GET_CODE (y) != MEM)
883 abort ();
884 if (size == 0)
885 abort ();
887 if (GET_CODE (size) == CONST_INT
888 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
889 < MOVE_RATIO))
890 move_by_pieces (x, y, INTVAL (size), align);
891 else
893 /* Try the most limited insn first, because there's no point
894 including more than one in the machine description unless
895 the more limited one has some advantage. */
896 #ifdef HAVE_movstrqi
897 if (HAVE_movstrqi
898 && GET_CODE (size) == CONST_INT
899 && ((unsigned) INTVAL (size)
900 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
902 emit_insn (gen_movstrqi (x, y, size,
903 gen_rtx (CONST_INT, VOIDmode, align)));
904 return;
906 #endif
907 #ifdef HAVE_movstrhi
908 if (HAVE_movstrhi
909 && GET_CODE (size) == CONST_INT
910 && ((unsigned) INTVAL (size)
911 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
913 emit_insn (gen_movstrhi (x, y, size,
914 gen_rtx (CONST_INT, VOIDmode, align)));
915 return;
917 #endif
918 #ifdef HAVE_movstrsi
919 if (HAVE_movstrsi)
921 emit_insn (gen_movstrsi (x, y, size,
922 gen_rtx (CONST_INT, VOIDmode, align)));
923 return;
925 #endif
927 #ifdef TARGET_MEM_FUNCTIONS
928 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "memcpy"), 0,
929 VOIDmode, 3, XEXP (x, 0), Pmode,
930 XEXP (y, 0), Pmode,
931 size, Pmode);
932 #else
933 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "bcopy"), 0,
934 VOIDmode, 3, XEXP (y, 0), Pmode,
935 XEXP (x, 0), Pmode,
936 size, Pmode);
937 #endif
941 /* Copy all or part of a BLKmode value X into registers starting at REGNO.
942 The number of registers to be filled is NREGS. */
944 void
945 move_block_to_reg (regno, x, nregs)
946 int regno;
947 rtx x;
948 int nregs;
950 int i;
951 if (GET_CODE (x) == CONST_DOUBLE && x != dconst0_rtx)
952 x = force_const_double_mem (x);
953 for (i = 0; i < nregs; i++)
955 if (GET_CODE (x) == REG)
956 emit_move_insn (gen_rtx (REG, SImode, regno + i),
957 gen_rtx (SUBREG, SImode, x, i));
958 else if (x == dconst0_rtx)
959 emit_move_insn (gen_rtx (REG, SImode, regno + i),
960 const0_rtx);
961 else
962 emit_move_insn (gen_rtx (REG, SImode, regno + i),
963 gen_rtx (MEM, SImode,
964 memory_address (SImode,
965 plus_constant (XEXP (x, 0),
966 i * GET_MODE_SIZE (SImode)))));
970 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
971 The number of registers to be filled is NREGS. */
973 void
974 move_block_from_reg (regno, x, nregs)
975 int regno;
976 rtx x;
977 int nregs;
979 int i;
980 for (i = 0; i < nregs; i++)
982 if (GET_CODE (x) == REG)
983 emit_move_insn (gen_rtx (SUBREG, SImode, x, i),
984 gen_rtx (REG, SImode, regno + i));
985 else
986 emit_move_insn (gen_rtx (MEM, SImode,
987 memory_address (SImode,
988 plus_constant (XEXP (x, 0),
989 i * GET_MODE_SIZE (SImode)))),
990 gen_rtx (REG, SImode, regno + i));
994 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
996 static void
997 use_regs (regno, nregs)
998 int regno;
999 int nregs;
1001 int i;
1002 for (i = 0; i < nregs; i++)
1003 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, SImode, regno + i)));
1006 /* Write zeros through the storage of OBJECT.
1007 If OBJECT has BLKmode, SIZE is its length in bytes. */
1009 void
1010 clear_storage (object, size)
1011 rtx object;
1012 int size;
1014 if (GET_MODE (object) == BLKmode)
1016 #ifdef TARGET_MEM_FUNCTIONS
1017 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "memset"), 0,
1018 VOIDmode, 3,
1019 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1020 gen_rtx (CONST_INT, VOIDmode, size), Pmode);
1021 #else
1022 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "bzero"), 0,
1023 VOIDmode, 2,
1024 XEXP (object, 0), Pmode,
1025 gen_rtx (CONST_INT, VOIDmode, size), Pmode);
1026 #endif
1028 else
1029 emit_move_insn (object, const0_rtx);
1032 /* Generate code to copy Y into X.
1033 Both Y and X must have the same mode, except that
1034 Y can be a constant with VOIDmode.
1035 This mode cannot be BLKmode; use emit_block_move for that.
1037 Return the last instruction emitted. */
1040 emit_move_insn (x, y)
1041 rtx x, y;
1043 enum machine_mode mode = GET_MODE (x);
1044 x = protect_from_queue (x, 1);
1045 y = protect_from_queue (y, 0);
1047 if ((CONSTANT_P (y) || GET_CODE (y) == CONST_DOUBLE)
1048 && ! LEGITIMATE_CONSTANT_P (y))
1050 y = force_const_mem (mode, y);
1051 if (! memory_address_p (mode, XEXP (y, 0)))
1052 y = gen_rtx (MEM, mode, memory_address (mode, XEXP (y, 0)));
1055 if (mode == BLKmode)
1056 abort ();
1057 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1058 return
1059 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1060 #if 0
1061 /* It turns out you get much better optimization (in cse and flow)
1062 if you define movdi and movdf instruction patterns
1063 even if they must turn into multiple assembler instructions. */
1064 else if (GET_MODE_SIZE (mode) >= GET_MODE_SIZE (SImode))
1066 register int count = GET_MODE_SIZE (mode) / GET_MODE_SIZE (SImode);
1067 register int i;
1068 if (GET_CODE (y) == CONST_DOUBLE && y != dconst0_rtx)
1069 y = force_const_double_mem (y);
1070 for (i = 0; i < count; i++)
1072 rtx x1, y1;
1073 if (GET_CODE (x) == REG)
1074 x1 = gen_rtx (SUBREG, SImode, x, i);
1075 else
1076 x1 = gen_rtx (MEM, SImode,
1077 memory_address (SImode,
1078 plus_constant (XEXP (x, 0),
1079 i * GET_MODE_SIZE (SImode))));
1080 if (GET_CODE (y) == REG)
1081 y1 = gen_rtx (SUBREG, SImode, y, i);
1082 else if (y == dconst0_rtx)
1083 y1 = const0_rtx;
1084 else
1085 y1 = gen_rtx (MEM, SImode,
1086 memory_address (SImode,
1087 plus_constant (XEXP (y, 0),
1088 i * GET_MODE_SIZE (SImode))));
1089 emit_insn (gen_movsi (protect_from_queue (x1, 1), protect_from_queue (y1, 0)));
1092 #endif
1093 else
1094 abort ();
1097 /* Pushing data onto the stack. */
1099 /* Push a block of length SIZE (perhaps variable)
1100 and return an rtx to address the beginning of the block.
1101 Note that it is not possible for the value returned to be a QUEUED.
1102 The value may be stack_pointer_rtx.
1104 The value we return does take account of STACK_POINTER_OFFSET. */
1107 push_block (size)
1108 rtx size;
1110 register rtx temp;
1111 if (CONSTANT_P (size) || GET_CODE (size) == REG)
1112 anti_adjust_stack (size);
1113 else
1114 anti_adjust_stack (copy_to_mode_reg (Pmode, size));
1116 #ifdef STACK_GROWS_DOWNWARD
1117 temp = stack_pointer_rtx;
1118 #else
1119 temp = gen_rtx (PLUS, Pmode,
1120 stack_pointer_rtx,
1121 negate_rtx (Pmode, size));
1122 if (GET_CODE (size) != CONST_INT)
1123 temp = force_operand (temp, 0);
1124 #endif
1126 #ifdef STACK_POINTER_OFFSET
1127 temp = plus_constant (temp, STACK_POINTER_OFFSET);
1128 #endif /* STACK_POINTER_OFFSET */
1130 return memory_address (QImode, temp);
1133 static rtx
1134 gen_push_operand ()
1136 return gen_rtx (
1137 #ifdef STACK_GROWS_DOWNWARD
1138 PRE_DEC,
1139 #else
1140 PRE_INC,
1141 #endif
1142 Pmode,
1143 stack_pointer_rtx);
1146 /* Generate code to push X onto the stack, assuming it has mode MODE.
1147 MODE is redundant except when X is a CONST_INT (since they don't
1148 carry mode info).
1149 SIZE is an rtx for the size of data to be copied (in bytes),
1150 needed only if X is BLKmode.
1152 ALIGN (in bytes) is maximum alignment we can assume.
1154 If PARTIAL is nonzero, then copy that many of the first words
1155 of X into registers starting with REG, and push the rest of X.
1156 The amount of space pushed is decreased by PARTIAL words,
1157 rounded *down* to a multiple of PARM_BOUNDARY.
1158 REG must be a hard register in this case.
1160 EXTRA is the amount in bytes of extra space to leave next to this arg.
1162 On a machine that lacks real push insns, ARGS_ADDR is the address of
1163 the bottom of the argument block for this call. We use indexing off there
1164 to store the arg. On machines with push insns, ARGS_ADDR is 0.
1166 ARGS_SO_FAR is the size of args previously pushed for this call. */
1168 static void
1169 emit_push_insn (x, mode, size, align, partial, reg, extra, args_addr, args_so_far)
1170 register rtx x;
1171 enum machine_mode mode;
1172 rtx size;
1173 int align;
1174 int partial;
1175 rtx reg;
1176 int extra;
1177 rtx args_addr;
1178 rtx args_so_far;
1180 rtx xinner;
1181 enum direction stack_direction
1182 #ifdef STACK_GROWS_DOWNWARD
1183 = downward;
1184 #else
1185 = upward;
1186 #endif
1188 /* Decide where to pad the argument: `downward' for below,
1189 `upward' for above, or `none' for don't pad it.
1190 Default is below for small data on big-endian machines; else above. */
1191 enum direction where_pad = FUNCTION_ARG_PADDING (mode, size);
1193 xinner = x = protect_from_queue (x, 0);
1195 if (extra)
1197 if (args_addr == 0)
1199 /* Push padding now if padding above and stack grows down,
1200 or if padding below and stack grows up. */
1201 if (where_pad != none && where_pad != stack_direction)
1202 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));
1204 else
1206 /* If space already allocated, just adjust the address we use. */
1207 if (where_pad == downward)
1208 args_so_far = plus_constant (args_so_far, extra);
1212 if (mode == BLKmode)
1214 /* Copy a block into the stack, entirely or partially. */
1216 register rtx temp;
1217 int used = partial * UNITS_PER_WORD;
1218 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1219 int skip;
1221 if (size == 0)
1222 abort ();
1224 used -= offset;
1226 /* USED is now the # of bytes we need not copy to the stack
1227 because registers will take care of them. */
1229 if (partial != 0)
1230 xinner = change_address (xinner, BLKmode,
1231 plus_constant (XEXP (xinner, 0), used));
1233 /* If the partial register-part of the arg counts in its stack size,
1234 skip the part of stack space corresponding to the registers.
1235 Otherwise, start copying to the beginning of the stack space,
1236 by setting SKIP to 0. */
1237 #ifndef FIRST_PARM_CALLER_OFFSET
1238 skip = 0;
1239 #else
1240 skip = used;
1241 #endif
1243 #ifdef PUSH_ROUNDING
1244 /* Do it with several push insns if that doesn't take lots of insns
1245 and if there is no difficulty with push insns that skip bytes
1246 on the stack for alignment purposes. */
1247 if (args_addr == 0
1248 && GET_CODE (size) == CONST_INT
1249 && args_addr == 0
1250 && skip == 0
1251 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1252 < MOVE_RATIO)
1253 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1254 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1255 INTVAL (size) - used, align);
1256 else
1257 #endif /* PUSH_ROUNDING */
1259 /* Otherwise make space on the stack and copy the data
1260 to the address of that space. */
1262 /* Deduct words put into registers from the size we must copy. */
1263 if (partial != 0)
1265 if (GET_CODE (size) == CONST_INT)
1266 size = gen_rtx (CONST_INT, VOIDmode, INTVAL (size) - used);
1267 else
1268 size = expand_binop (GET_MODE (size), sub_optab, size,
1269 gen_rtx (CONST_INT, VOIDmode, used),
1270 0, 0, OPTAB_LIB_WIDEN);
1273 /* Get the address of the stack space. */
1274 if (! args_addr)
1275 temp = push_block (size);
1276 else if (GET_CODE (args_so_far) == CONST_INT)
1277 temp = memory_address (BLKmode,
1278 plus_constant (args_addr,
1279 skip + INTVAL (args_so_far)));
1280 else
1281 temp = memory_address (BLKmode,
1282 plus_constant (gen_rtx (PLUS, Pmode,
1283 args_addr, args_so_far),
1284 skip));
1286 /* TEMP is the address of the block. Copy the data there. */
1287 if (GET_CODE (size) == CONST_INT
1288 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1289 < MOVE_RATIO))
1291 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1292 INTVAL (size), align);
1293 goto ret;
1295 /* Try the most limited insn first, because there's no point
1296 including more than one in the machine description unless
1297 the more limited one has some advantage. */
1298 #ifdef HAVE_movstrqi
1299 if (HAVE_movstrqi
1300 && GET_CODE (size) == CONST_INT
1301 && ((unsigned) INTVAL (size)
1302 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1304 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1305 xinner, size,
1306 gen_rtx (CONST_INT, VOIDmode, align)));
1307 goto ret;
1309 #endif
1310 #ifdef HAVE_movstrhi
1311 if (HAVE_movstrhi
1312 && GET_CODE (size) == CONST_INT
1313 && ((unsigned) INTVAL (size)
1314 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1316 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1317 xinner, size,
1318 gen_rtx (CONST_INT, VOIDmode, align)));
1319 goto ret;
1321 #endif
1322 #ifdef HAVE_movstrsi
1323 if (HAVE_movstrsi)
1325 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1326 xinner, size,
1327 gen_rtx (CONST_INT, VOIDmode, align)));
1328 goto ret;
1330 #endif
1332 if (reg_mentioned_p (stack_pointer_rtx, temp))
1334 /* Now that emit_library_call does force_operand
1335 before pushing anything, preadjustment does not work. */
1336 temp = copy_to_reg (temp);
1337 #if 0
1338 /* Correct TEMP so it holds what will be a description of
1339 the address to copy to, valid after one arg is pushed. */
1340 int xsize = GET_MODE_SIZE (Pmode);
1341 #ifdef PUSH_ROUNDING
1342 xsize = PUSH_ROUNDING (xsize);
1343 #endif
1344 xsize = ((xsize + PARM_BOUNDARY / BITS_PER_UNIT - 1)
1345 / (PARM_BOUNDARY / BITS_PER_UNIT)
1346 * (PARM_BOUNDARY / BITS_PER_UNIT));
1347 #ifdef TARGET_MEM_FUNCTIONS
1348 /* If we are calling bcopy, we push one arg before TEMP.
1349 If calling memcpy, we push two. */
1350 xsize *= 2;
1351 #endif
1352 #ifdef STACK_GROWS_DOWNWARD
1353 temp = plus_constant (temp, xsize);
1354 #else
1355 temp = plus_constant (temp, -xsize);
1356 #endif /* not STACK_GROWS_DOWNWARD */
1357 #endif /* 0 */
1360 /* Make inhibit_defer_pop nonzero around the library call
1361 to force it to pop the bcopy-arguments right away. */
1362 NO_DEFER_POP;
1363 #ifdef TARGET_MEM_FUNCTIONS
1364 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "memcpy"), 0,
1365 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1366 size, Pmode);
1367 #else
1368 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "bcopy"), 0,
1369 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1370 size, Pmode);
1371 #endif
1372 OK_DEFER_POP;
1375 else if (partial > 0)
1377 /* Scalar partly in registers. */
1379 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1380 int i;
1381 int not_stack;
1382 /* # words of start of argument
1383 that we must make space for but need not store. */
1384 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1385 int args_offset = INTVAL (args_so_far);
1386 int skip;
1388 /* If we make space by pushing it, we might as well push
1389 the real data. Otherwise, we can leave OFFSET nonzero
1390 and leave the space uninitialized. */
1391 if (args_addr == 0)
1392 offset = 0;
1394 /* Now NOT_STACK gets the number of words that we don't need to
1395 allocate on the stack. */
1396 not_stack = partial - offset;
1398 /* If the partial register-part of the arg counts in its stack size,
1399 skip the part of stack space corresponding to the registers.
1400 Otherwise, start copying to the beginning of the stack space,
1401 by setting SKIP to 0. */
1402 #ifndef FIRST_PARM_CALLER_OFFSET
1403 skip = 0;
1404 #else
1405 skip = not_stack;
1406 #endif
1408 if (GET_CODE (x) == CONST_DOUBLE && x != dconst0_rtx)
1409 x = force_const_double_mem (x);
1411 /* Loop over all the words allocated on the stack for this arg. */
1412 /* We can do it by words, because any scalar bigger than a word
1413 has a size a multiple of a word. */
1414 #ifndef PUSH_ARGS_REVERSED
1415 for (i = not_stack; i < size; i++)
1416 #else
1417 for (i = size - 1; i >= not_stack; i--)
1418 #endif
1419 if (i >= not_stack + offset)
1421 rtx wd;
1422 rtx addr;
1423 /* Get the next word of the value in WD. */
1424 if (GET_CODE (x) == MEM)
1426 rtx addr = memory_address (SImode,
1427 plus_constant (XEXP (x, 0),
1428 i * UNITS_PER_WORD));
1429 /* Copy to a reg, since machine may lack
1430 memory-to-memory move insns. */
1431 wd = copy_to_reg (gen_rtx (MEM, SImode, addr));
1433 else if (GET_CODE (x) == REG)
1434 wd = gen_rtx (SUBREG, SImode, x, i);
1435 else if (x == dconst0_rtx)
1436 wd = const0_rtx;
1437 else
1438 abort ();
1440 emit_push_insn (wd,
1441 SImode, 0, align, 0, 0, 0, args_addr,
1442 gen_rtx (CONST_INT, VOIDmode,
1443 args_offset + (i - not_stack + skip) * UNITS_PER_WORD));
1446 else
1448 rtx addr;
1449 #ifdef PUSH_ROUNDING
1450 if (args_addr == 0)
1451 addr = gen_push_operand ();
1452 else
1453 #endif
1454 if (GET_CODE (args_so_far) == CONST_INT)
1455 addr
1456 = memory_address (mode,
1457 plus_constant (args_addr, INTVAL (args_so_far)));
1458 else
1459 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1460 args_so_far));
1462 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1465 ret:
1466 /* If part should go in registers, copy that part
1467 into the appropriate registers. Do this now, at the end,
1468 since mem-to-mem copies above may do function calls. */
1469 if (partial > 0)
1470 move_block_to_reg (REGNO (reg), x, partial);
1472 if (extra && args_addr == 0 && where_pad == stack_direction)
1473 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));
1476 /* Output a library call to function FUN (a SYMBOL_REF rtx)
1477 (emitting the queue unless NO_QUEUE is nonzero),
1478 for a value of mode OUTMODE,
1479 with NARGS different arguments, passed as alternating rtx values
1480 and machine_modes to convert them to.
1481 The rtx values should have been passed through protect_from_queue already. */
1483 void
1484 emit_library_call (va_alist)
1485 va_dcl
1487 register va_list p;
1488 register int args_size = 0;
1489 register int argnum;
1490 enum machine_mode outmode;
1491 int nargs;
1492 rtx fun;
1493 rtx orgfun;
1494 int inc;
1495 int count;
1496 rtx *regvec;
1497 rtx argblock = 0;
1498 CUMULATIVE_ARGS args_so_far;
1499 struct arg { rtx value; enum machine_mode mode; };
1500 struct arg *argvec;
1501 int old_inhibit_defer_pop = inhibit_defer_pop;
1502 int stack_padding = 0;
1503 int no_queue = 0;
1504 rtx use_insns;
1506 va_start (p);
1507 orgfun = fun = va_arg (p, rtx);
1508 no_queue = va_arg (p, int);
1509 outmode = va_arg (p, enum machine_mode);
1510 nargs = va_arg (p, int);
1512 regvec = (rtx *) alloca (nargs * sizeof (rtx));
1514 /* Copy all the libcall-arguments out of the varargs data
1515 and into a vector ARGVEC. */
1516 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1517 for (count = 0; count < nargs; count++)
1519 rtx val = va_arg (p, rtx);
1520 enum machine_mode mode = va_arg (p, enum machine_mode);
1522 argvec[count].value = val;
1524 /* Convert the arg value to the mode the library wants.
1525 Also make sure it is a reasonable operand
1526 for a move or push insn. */
1527 /* ??? It is wrong to do it here; must do it earlier
1528 where we know the signedness of the arg. */
1529 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
1531 val = gen_reg_rtx (mode);
1532 convert_move (val, argvec[count].value, 0);
1534 else if (GET_CODE (val) != REG && GET_CODE (val) != MEM
1536 && ! ((CONSTANT_P (val) || GET_CODE (val) == CONST_DOUBLE)
1537 && LEGITIMATE_CONSTANT_P (val)))
1538 val = force_operand (val, 0);
1540 argvec[count].value = val;
1541 argvec[count].mode = mode;
1543 va_end (p);
1545 /* If we have no actual push instructions, make space for all the args
1546 right now. */
1547 #ifndef PUSH_ROUNDING
1548 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0);
1549 for (count = 0; count < nargs; count++)
1551 register enum machine_mode mode = argvec[count].mode;
1552 register rtx reg;
1553 register int partial;
1555 reg = FUNCTION_ARG (args_so_far, mode, (tree)0, 1);
1556 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1557 partial = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, (tree)0, 1);
1558 #else
1559 partial = 0;
1560 #endif
1561 if (reg == 0 || partial != 0)
1562 args_size += GET_MODE_SIZE (mode);
1563 if (partial != 0)
1564 args_size -= partial * GET_MODE_SIZE (SImode);
1565 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
1568 if (args_size != 0)
1570 #ifdef STACK_ARGS_ADJUST
1571 struct args_size size;
1572 size.constant = args_size;
1573 size.var = 0;
1574 STACK_ARGS_ADJUST (size);
1575 args_size = size.constant;
1576 #endif
1577 argblock
1578 = push_block (round_push (gen_rtx (CONST_INT, VOIDmode, args_size)));
1580 #endif /* no PUSH_ROUNDING */
1582 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0);
1584 #ifdef PUSH_ARGS_REVERSED
1585 inc = -1;
1586 argnum = nargs - 1;
1587 #else
1588 inc = 1;
1589 argnum = 0;
1590 #endif
1591 args_size = stack_padding;
1593 for (count = 0; count < nargs; count++, argnum += inc)
1595 register enum machine_mode mode = argvec[argnum].mode;
1596 register rtx val = argvec[argnum].value;
1597 rtx reg;
1598 int partial;
1599 int arg_size;
1601 reg = FUNCTION_ARG (args_so_far, mode, (tree)0, 1);
1602 regvec[argnum] = reg;
1603 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1604 partial = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, (tree)0, 1);
1605 #else
1606 partial = 0;
1607 #endif
1609 if (reg != 0 && partial == 0)
1610 emit_move_insn (reg, val);
1611 else
1612 emit_push_insn (val, mode, 0, 0, partial, reg, 0, argblock,
1613 gen_rtx (CONST_INT, VOIDmode, args_size));
1615 /* Compute size of stack space used by this argument. */
1616 if (reg == 0 || partial != 0)
1617 arg_size = GET_MODE_SIZE (mode);
1618 else
1619 arg_size = 0;
1620 if (partial != 0)
1621 arg_size
1622 -= ((partial * UNITS_PER_WORD)
1623 / (PARM_BOUNDARY / BITS_PER_UNIT)
1624 * (PARM_BOUNDARY / BITS_PER_UNIT));
1626 args_size += arg_size;
1627 NO_DEFER_POP;
1628 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
1631 /* For version 1.37, try deleting this entirely. */
1632 if (! no_queue)
1633 emit_queue ();
1635 fun = prepare_call_address (fun, 0);
1637 /* Any regs containing parms remain in use through the call. */
1638 start_sequence ();
1639 for (count = 0; count < nargs; count++)
1640 if (regvec[count] != 0)
1641 emit_insn (gen_rtx (USE, VOIDmode, regvec[count]));
1643 use_insns = gen_sequence ();
1644 end_sequence ();
1646 #ifdef STACK_BOUNDARY
1647 args_size = (args_size + STACK_BYTES - 1) / STACK_BYTES * STACK_BYTES;
1648 #endif
1650 /* Don't allow popping to be deferred, since then
1651 cse'ing of library calls could delete a call and leave the pop. */
1652 NO_DEFER_POP;
1653 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size,
1654 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
1655 outmode != VOIDmode ? hard_libcall_value (outmode) : 0,
1656 old_inhibit_defer_pop + 1, use_insns);
1657 OK_DEFER_POP;
1660 /* Expand an assignment that stores the value of FROM into TO.
1661 If WANT_VALUE is nonzero, return an rtx for the value of TO.
1662 (This may contain a QUEUED rtx.)
1663 Otherwise, the returned value is not meaningful.
1665 SUGGEST_REG is no longer actually used.
1666 It used to mean, copy the value through a register
1667 and return that register, if that is possible.
1668 But now we do this if WANT_VALUE.
1670 If the value stored is a constant, we return the constant. */
1673 expand_assignment (to, from, want_value, suggest_reg)
1674 tree to, from;
1675 int want_value;
1676 int suggest_reg;
1678 register rtx to_rtx = 0;
1680 /* Don't crash if the lhs of the assignment was erroneous. */
1682 if (TREE_CODE (to) == ERROR_MARK)
1683 return expand_expr (from, 0, VOIDmode, 0);
1685 /* Assignment of a structure component needs special treatment
1686 if the structure component's rtx is not simply a MEM.
1687 Assignment of an array element at a constant index
1688 has the same problem. */
1690 if (TREE_CODE (to) == COMPONENT_REF
1691 || (TREE_CODE (to) == ARRAY_REF
1692 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
1693 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
1695 register enum machine_mode mode1;
1696 int bitsize;
1697 int volstruct = 0;
1698 tree tem = to;
1699 int bitpos = 0;
1700 int unsignedp;
1702 if (TREE_CODE (to) == COMPONENT_REF)
1704 tree field = TREE_OPERAND (to, 1);
1705 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) * DECL_SIZE_UNIT (field);
1706 mode1 = DECL_MODE (TREE_OPERAND (to, 1));
1707 unsignedp = TREE_UNSIGNED (field);
1709 else
1711 mode1 = TYPE_MODE (TREE_TYPE (to));
1712 bitsize = GET_MODE_BITSIZE (mode1);
1713 unsignedp = TREE_UNSIGNED (TREE_TYPE (to));
1716 /* Compute cumulative bit-offset for nested component-refs
1717 and array-refs, and find the ultimate containing object. */
1719 while (1)
1721 if (TREE_CODE (tem) == COMPONENT_REF)
1723 bitpos += DECL_OFFSET (TREE_OPERAND (tem, 1));
1724 if (TREE_THIS_VOLATILE (tem))
1725 volstruct = 1;
1727 else if (TREE_CODE (tem) == ARRAY_REF
1728 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
1729 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
1731 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
1732 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
1733 * TYPE_SIZE_UNIT (TREE_TYPE (tem)));
1735 else
1736 break;
1737 tem = TREE_OPERAND (tem, 0);
1739 /* TEM is now the containing data object. */
1741 /* If we are going to use store_bit_field and extract_bit_field,
1742 make sure to_rtx will be safe for multiple use. */
1743 if (mode1 == BImode && want_value)
1744 tem = stabilize_reference (tem);
1746 to_rtx = expand_expr (tem, 0, VOIDmode, 0);
1748 return store_field (to_rtx, bitsize, bitpos, mode1, from,
1749 (want_value
1750 /* Spurious cast makes HPUX compiler happy. */
1751 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
1752 : VOIDmode),
1753 unsignedp,
1754 /* Required alignment of containing datum. */
1755 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT);
1758 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
1759 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
1761 if (to_rtx == 0)
1762 to_rtx = expand_expr (to, 0, VOIDmode, 0);
1764 /* Compute FROM and store the value in the rtx we got. */
1766 return store_expr (from, to_rtx, want_value);
1769 /* Generate code for computing expression EXP,
1770 and storing the value into TARGET.
1771 Returns TARGET or an equivalent value.
1772 TARGET may contain a QUEUED rtx.
1774 If SUGGEST_REG is nonzero, copy the value through a register
1775 and return that register, if that is possible.
1777 If the value stored is a constant, we return the constant. */
1780 store_expr (exp, target, suggest_reg)
1781 register tree exp;
1782 register rtx target;
1783 int suggest_reg;
1785 register rtx temp;
1786 int dont_return_target = 0;
1788 /* Copying a non-constant CONSTRUCTOR needs special treatment. */
1790 if (TREE_CODE (exp) == CONSTRUCTOR && ! TREE_LITERAL (exp))
1792 store_constructor (exp, target);
1793 return target;
1796 if (suggest_reg && GET_CODE (target) == MEM && GET_MODE (target) != BLKmode)
1797 /* If target is in memory and caller wants value in a register instead,
1798 arrange that. Pass TARGET as target for expand_expr so that,
1799 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
1800 We know expand_expr will not use the target in that case. */
1802 temp = expand_expr (exp, cse_not_expected ? 0 : target,
1803 GET_MODE (target), 0);
1804 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
1805 temp = copy_to_reg (temp);
1806 dont_return_target = 1;
1808 else if (queued_subexp_p (target))
1809 /* If target contains a postincrement, it is not safe
1810 to use as the returned value. It would access the wrong
1811 place by the time the queued increment gets output.
1812 So copy the value through a temporary and use that temp
1813 as the result. */
1815 temp = expand_expr (exp, 0, GET_MODE (target), 0);
1816 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
1817 temp = copy_to_reg (temp);
1818 dont_return_target = 1;
1820 else
1822 temp = expand_expr (exp, target, GET_MODE (target), 0);
1823 /* DO return TARGET if it's a specified hardware register.
1824 expand_return relies on this. */
1825 if (!(target && GET_CODE (target) == REG
1826 && REGNO (target) < FIRST_PSEUDO_REGISTER)
1827 && (CONSTANT_P (temp) || GET_CODE (temp) == CONST_DOUBLE))
1828 dont_return_target = 1;
1831 /* If value was not generated in the target, store it there.
1832 Convert the value to TARGET's type first if nec. */
1834 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
1836 target = protect_from_queue (target, 1);
1837 if (GET_MODE (temp) != GET_MODE (target)
1838 && GET_MODE (temp) != VOIDmode)
1840 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
1841 if (dont_return_target)
1843 /* In this case, we will return TEMP,
1844 so make sure it has the proper mode.
1845 But don't forget to store the value into TARGET. */
1846 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
1847 emit_move_insn (target, temp);
1849 else
1850 convert_move (target, temp, unsignedp);
1853 else if (GET_MODE (temp) == BLKmode)
1854 emit_block_move (target, temp, expr_size (exp),
1855 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
1856 else
1857 emit_move_insn (target, temp);
1859 if (dont_return_target)
1860 return temp;
1861 return target;
1864 /* Store the value of constructor EXP into the rtx TARGET.
1865 TARGET is either a REG or a MEM. */
1867 static void
1868 store_constructor (exp, target)
1869 tree exp;
1870 rtx target;
1872 /* Don't try copying piece by piece into a hard register
1873 since that is vulnerable to being clobbered by EXP.
1874 Instead, construct in a pseudo register and then copy it all. */
1875 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
1877 rtx temp = gen_reg_rtx (GET_MODE (target));
1878 store_constructor (exp, temp);
1879 emit_move_insn (target, temp);
1880 return;
1883 if (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
1885 register tree elt;
1887 /* If the constructor has fewer fields than the structure,
1888 clear the whole structure first. */
1890 if (list_length (CONSTRUCTOR_ELTS (exp))
1891 != list_length (TYPE_FIELDS (TREE_TYPE (exp))))
1892 clear_storage (target, int_size_in_bytes (TREE_TYPE (exp)));
1893 else
1894 /* Inform later passes that the old value is dead. */
1895 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
1897 /* Store each element of the constructor into
1898 the corresponding field of TARGET. */
1900 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
1902 register tree field = TREE_PURPOSE (elt);
1903 register enum machine_mode mode;
1904 int bitsize;
1905 int bitpos;
1906 int unsignedp;
1908 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) * DECL_SIZE_UNIT (field);
1909 mode = DECL_MODE (field);
1910 unsignedp = TREE_UNSIGNED (field);
1912 bitpos = DECL_OFFSET (field);
1914 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
1915 /* The alignment of TARGET is
1916 at least what its type requires. */
1917 VOIDmode, 0,
1918 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
1921 else if (TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE)
1923 register tree elt;
1924 register int i;
1925 tree domain = TYPE_DOMAIN (TREE_TYPE (exp));
1926 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
1927 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
1928 tree elttype = TREE_TYPE (TREE_TYPE (exp));
1930 /* If the constructor has fewer fields than the structure,
1931 clear the whole structure first. */
1933 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1)
1934 clear_storage (target, maxelt - minelt + 1);
1935 else
1936 /* Inform later passes that the old value is dead. */
1937 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
1939 /* Store each element of the constructor into
1940 the corresponding element of TARGET, determined
1941 by counting the elements. */
1942 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
1943 elt;
1944 elt = TREE_CHAIN (elt), i++)
1946 register enum machine_mode mode;
1947 int bitsize;
1948 int bitpos;
1949 int unsignedp;
1951 mode = TYPE_MODE (elttype);
1952 bitsize = GET_MODE_BITSIZE (mode);
1953 unsignedp = TREE_UNSIGNED (elttype);
1955 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
1956 * TYPE_SIZE_UNIT (elttype));
1958 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
1959 /* The alignment of TARGET is
1960 at least what its type requires. */
1961 VOIDmode, 0,
1962 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
1967 /* Store the value of EXP (an expression tree)
1968 into a subfield of TARGET which has mode MODE and occupies
1969 BITSIZE bits, starting BITPOS bits from the start of TARGET.
1971 If VALUE_MODE is VOIDmode, return nothing in particular.
1972 UNSIGNEDP is not used in this case.
1974 Otherwise, return an rtx for the value stored. This rtx
1975 has mode VALUE_MODE if that is convenient to do.
1976 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
1978 ALIGN is the alignment that TARGET is known to have, measured in bytes. */
1980 static rtx
1981 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, align)
1982 rtx target;
1983 int bitsize, bitpos;
1984 enum machine_mode mode;
1985 tree exp;
1986 enum machine_mode value_mode;
1987 int unsignedp;
1988 int align;
1990 /* If the structure is in a register or if the component
1991 is a bit field, we cannot use addressing to access it.
1992 Use bit-field techniques or SUBREG to store in it. */
1994 if (mode == BImode || GET_CODE (target) == REG
1995 || GET_CODE (target) == SUBREG)
1997 store_bit_field (target, bitsize, bitpos,
1998 mode,
1999 expand_expr (exp, 0, VOIDmode, 0),
2000 align);
2001 if (value_mode != VOIDmode)
2002 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2003 0, value_mode, 0, align);
2004 return const0_rtx;
2006 else
2008 rtx addr = XEXP (target, 0);
2009 rtx to_rtx;
2011 /* If a value is wanted, it must be the lhs;
2012 so make the address stable for multiple use. */
2014 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2015 && ! CONSTANT_ADDRESS_P (addr))
2016 addr = copy_to_reg (addr);
2018 /* Now build a reference to just the desired component. */
2020 to_rtx = change_address (target, mode,
2021 plus_constant (addr,
2022 (bitpos / BITS_PER_UNIT)));
2023 MEM_IN_STRUCT_P (to_rtx) = 1;
2025 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2029 /* Given an rtx VALUE that may contain additions and multiplications,
2030 return an equivalent value that just refers to a register or memory.
2031 This is done by generating instructions to perform the arithmetic
2032 and returning a pseudo-register containing the value. */
2035 force_operand (value, target)
2036 rtx value, target;
2038 register optab binoptab = 0;
2039 register rtx op2;
2040 /* Use subtarget as the target for operand 0 of a binary operation. */
2041 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2043 if (GET_CODE (value) == PLUS)
2044 binoptab = add_optab;
2045 else if (GET_CODE (value) == MINUS)
2046 binoptab = sub_optab;
2047 else if (GET_CODE (value) == MULT)
2049 op2 = XEXP (value, 1);
2050 if (!CONSTANT_P (op2)
2051 && !(GET_CODE (op2) == REG && op2 != subtarget))
2052 subtarget = 0;
2053 return expand_mult (GET_MODE (value),
2054 force_operand (XEXP (value, 0), subtarget),
2055 force_operand (op2, 0),
2056 target, 0);
2059 if (binoptab)
2061 op2 = XEXP (value, 1);
2062 if (!CONSTANT_P (op2)
2063 && !(GET_CODE (op2) == REG && op2 != subtarget))
2064 subtarget = 0;
2065 if (binoptab == sub_optab
2066 && GET_CODE (op2) == CONST_INT && INTVAL (op2) < 0)
2068 binoptab = add_optab;
2069 op2 = gen_rtx (CONST_INT, VOIDmode, - INTVAL (op2));
2071 return expand_binop (GET_MODE (value), binoptab,
2072 force_operand (XEXP (value, 0), subtarget),
2073 force_operand (op2, 0),
2074 target, 0, OPTAB_LIB_WIDEN);
2075 /* We give UNSIGNEP = 0 to expand_binop
2076 because the only operations we are expanding here are signed ones. */
2078 return value;
2081 /* expand_expr: generate code for computing expression EXP.
2082 An rtx for the computed value is returned. The value is never null.
2083 In the case of a void EXP, const0_rtx is returned.
2085 The value may be stored in TARGET if TARGET is nonzero.
2086 TARGET is just a suggestion; callers must assume that
2087 the rtx returned may not be the same as TARGET.
2089 If TARGET is CONST0_RTX, it means that the value will be ignored.
2091 If TMODE is not VOIDmode, it suggests generating the
2092 result in mode TMODE. But this is done only when convenient.
2093 Otherwise, TMODE is ignored and the value generated in its natural mode.
2094 TMODE is just a suggestion; callers must assume that
2095 the rtx returned may not have mode TMODE.
2097 If MODIFIER is EXPAND_SUM then when EXP is an addition
2098 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
2099 or a nest of (PLUS ...) and (MINUS ...) where the terms are
2100 products as above, or REG or MEM, or constant.
2101 Ordinarily in such cases we would output mul or add instructions
2102 and then return a pseudo reg containing the sum.
2104 If MODIFIER is EXPAND_CONST_ADDRESS then it is ok to return
2105 a MEM rtx whose address is a constant that isn't a legitimate address. */
2107 /* Subroutine of expand_expr:
2108 save the non-copied parts (LIST) of an expr (LHS), and return a list
2109 which can restore these values to their previous values,
2110 should something modify their storage. */
2111 static tree
2112 save_noncopied_parts (lhs, list)
2113 tree lhs;
2114 tree list;
2116 tree tail;
2117 tree parts = 0;
2119 for (tail = list; tail; tail = TREE_CHAIN (tail))
2120 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2121 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
2122 else
2124 tree part = TREE_VALUE (tail);
2125 tree part_type = TREE_TYPE (part);
2126 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part, 0);
2127 rtx target = assign_stack_local (TYPE_MODE (part_type),
2128 int_size_in_bytes (part_type));
2129 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
2130 target = change_address (target, TYPE_MODE (part_type), 0);
2131 parts = tree_cons (to_be_saved,
2132 build (RTL_EXPR, part_type, 0, (tree) target),
2133 parts);
2134 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
2136 return parts;
2139 /* Subroutine of expand_expr:
2140 save the non-copied parts (LIST) of an expr (LHS), and return a list
2141 which can restore these values to their previous values,
2142 should something modify their storage. */
2143 static tree
2144 init_noncopied_parts (lhs, list)
2145 tree lhs;
2146 tree list;
2148 tree tail;
2149 tree parts = 0;
2151 for (tail = list; tail; tail = TREE_CHAIN (tail))
2152 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2153 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
2154 else
2156 tree part = TREE_VALUE (tail);
2157 tree part_type = TREE_TYPE (part);
2158 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part, 0);
2159 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
2161 return parts;
2164 /* Subroutine of expand_expr:
2165 return the target to use when recursively expanding
2166 the first operand of an arithmetic operation. */
2168 static rtx
2169 validate_subtarget (subtarget, otherop)
2170 rtx subtarget;
2171 tree otherop;
2173 if (TREE_LITERAL (otherop))
2174 return subtarget;
2175 if (TREE_CODE (otherop) == VAR_DECL
2176 && DECL_RTL (otherop) != subtarget)
2177 return subtarget;
2178 return 0;
2181 static int
2182 fixed_type_p (exp)
2183 tree exp;
2185 if (TREE_CODE (exp) == PARM_DECL
2186 || TREE_CODE (exp) == VAR_DECL
2187 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == NEW_EXPR
2188 || TREE_CODE (exp) == COMPONENT_REF
2189 || TREE_CODE (exp) == ARRAY_REF)
2190 return 1;
2191 return 0;
2195 expand_expr (exp, target, tmode, modifier)
2196 register tree exp;
2197 rtx target;
2198 enum machine_mode tmode;
2199 enum expand_modifier modifier;
2201 extern rtx (*lang_expand_expr)();
2202 register rtx op0, op1, temp;
2203 tree type = TREE_TYPE (exp);
2204 register enum machine_mode mode = TYPE_MODE (type);
2205 register enum tree_code code = TREE_CODE (exp);
2206 optab this_optab;
2207 int negate_1;
2208 /* Use subtarget as the target for operand 0 of a binary operation. */
2209 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2210 rtx original_target = target;
2211 int ignore = target == const0_rtx;
2213 /* Don't use hard regs as subtargets, because the combiner
2214 can only handle pseudo regs. */
2215 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
2216 subtarget = 0;
2217 /* Avoid subtargets inside loops,
2218 since they hide some invariant expressions. */
2219 if (optimize && inside_loop ())
2220 subtarget = 0;
2222 if (ignore) target = 0, original_target = 0;
2224 /* If will do cse, generate all results into registers
2225 since 1) that allows cse to find more things
2226 and 2) otherwise cse could produce an insn the machine
2227 cannot support. */
2229 if (! cse_not_expected && mode != BLKmode)
2230 target = subtarget;
2232 /* No sense saving up arithmetic to be done
2233 if it's all in the wrong mode to form part of an address.
2234 And force_operand won't know whether to sign-extend or zero-extend. */
2236 if (mode != Pmode && modifier == EXPAND_SUM)
2237 modifier = EXPAND_NORMAL;
2239 switch (code)
2241 case PARM_DECL:
2242 if (DECL_RTL (exp) == 0)
2244 error_with_decl (exp, "prior parameter's size depends on `%s'");
2245 return const0_rtx;
2248 case FUNCTION_DECL:
2249 case VAR_DECL:
2250 case RESULT_DECL:
2251 if (DECL_RTL (exp) == 0)
2252 abort ();
2253 /* Must mark EXP used because it might be a compiler-generated
2254 variable used by a compiler-generated expression. */
2255 TREE_USED (exp) = 1;
2256 /* This is the case of an array whose size is to be determined
2257 from its initializer, while the initializer is still being parsed.
2258 See expand_decl. */
2259 if (GET_CODE (DECL_RTL (exp)) == MEM
2260 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
2261 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
2262 XEXP (DECL_RTL (exp), 0));
2263 if (GET_CODE (DECL_RTL (exp)) == MEM
2264 && modifier != EXPAND_CONST_ADDRESS)
2266 /* DECL_RTL probably contains a constant address.
2267 On RISC machines where a constant address isn't valid,
2268 make some insns to get that address into a register. */
2269 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
2270 || (flag_force_addr
2271 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
2272 return change_address (DECL_RTL (exp), VOIDmode,
2273 copy_rtx (XEXP (DECL_RTL (exp), 0)));
2275 return DECL_RTL (exp);
2277 case INTEGER_CST:
2278 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT)
2279 return gen_rtx (CONST_INT, VOIDmode, TREE_INT_CST_LOW (exp));
2280 /* Generate immediate CONST_DOUBLE
2281 which will be turned into memory by reload if necessary. */
2282 return immed_double_const (TREE_INT_CST_LOW (exp),
2283 TREE_INT_CST_HIGH (exp),
2284 mode);
2286 case CONST_DECL:
2287 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
2289 case REAL_CST:
2290 /* If optimized, generate immediate CONST_DOUBLE
2291 which will be turned into memory by reload if necessary. */
2292 if (!cse_not_expected)
2293 return immed_real_const (exp);
2294 case COMPLEX_CST:
2295 case STRING_CST:
2296 if (! TREE_CST_RTL (exp))
2297 output_constant_def (exp);
2299 /* TREE_CST_RTL probably contains a constant address.
2300 On RISC machines where a constant address isn't valid,
2301 make some insns to get that address into a register. */
2302 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
2303 && modifier != EXPAND_CONST_ADDRESS
2304 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
2305 return change_address (TREE_CST_RTL (exp), VOIDmode,
2306 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
2307 return TREE_CST_RTL (exp);
2309 case SAVE_EXPR:
2310 if (SAVE_EXPR_RTL (exp) == 0)
2312 rtx reg = gen_reg_rtx (mode);
2313 SAVE_EXPR_RTL (exp) = reg;
2314 store_expr (TREE_OPERAND (exp, 0), reg, 0);
2315 if (!optimize)
2316 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, reg,
2317 save_expr_regs);
2319 /* Don't let the same rtl node appear in two places. */
2320 return SAVE_EXPR_RTL (exp);
2322 case IF_STMT:
2323 emit_line_note (STMT_SOURCE_FILE (exp), STMT_SOURCE_LINE (exp));
2325 /* Exit flag is 0 for now. */
2326 expand_start_cond (STMT_COND (exp), 0);
2327 expand_expr_stmt (STMT_THEN (exp));
2328 if (STMT_ELSE (exp) == 0)
2329 expand_end_cond ();
2330 else
2332 expand_start_else ();
2333 expand_expr_stmt (STMT_ELSE (exp));
2334 expand_end_else ();
2336 return const0_rtx;
2338 case LOOP_STMT:
2340 tree vars = STMT_LOOP_VARS (exp);
2342 emit_line_note (STMT_SOURCE_FILE (exp), STMT_SOURCE_LINE (exp));
2344 while (vars)
2346 if (DECL_RTL (vars) == 0)
2347 expand_decl (vars);
2348 expand_decl_init (vars);
2349 vars = TREE_CHAIN (vars);
2352 if (TREE_READONLY (exp))
2354 /* Looks like a `while-do' loop. */
2355 expand_start_loop (1);
2356 expand_exit_loop_if_false (STMT_LOOP_COND (exp));
2357 expand_expr_stmt (STMT_LOOP_BODY (exp));
2358 expand_end_loop ();
2360 else
2362 /* Looks like a `do-while' loop. */
2363 expand_start_loop_continue_elsewhere (1);
2364 expand_expr_stmt (STMT_LOOP_BODY (exp));
2365 expand_loop_continue_here ();
2366 expand_exit_loop_if_false (STMT_LOOP_COND (exp));
2367 expand_end_loop ();
2370 if (obey_regdecls)
2371 for (vars = STMT_LOOP_VARS (exp); vars; vars = TREE_CHAIN (vars))
2372 use_variable (DECL_RTL (vars));
2374 return const0_rtx;
2376 case LET_STMT:
2378 tree vars = STMT_VARS (exp);
2380 TREE_USED (exp) = 1;
2382 /* If VARS have not yet been expanded, expand them now. */
2383 while (vars)
2385 if (DECL_RTL (vars) == 0)
2386 expand_decl (vars);
2387 /* Have to initialize these vars whereever this LET_STMT
2388 is expanded. */
2389 expand_decl_init (vars);
2390 vars = TREE_CHAIN (vars);
2393 temp = expand_expr (STMT_BODY (exp), target, tmode, modifier);
2395 if (obey_regdecls)
2396 for (vars = STMT_VARS (exp); vars; vars = TREE_CHAIN (vars))
2397 /* TYPE_DECLs do not have rtl. */
2398 if (DECL_RTL (vars))
2399 use_variable (DECL_RTL (vars));
2401 return temp;
2404 case RTL_EXPR:
2405 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
2406 abort ();
2407 emit_insns (RTL_EXPR_SEQUENCE (exp));
2408 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
2409 return RTL_EXPR_RTL (exp);
2411 case CONSTRUCTOR:
2412 /* All elts simple constants => refer to a constant in memory. */
2413 if (TREE_STATIC (exp))
2414 /* For aggregate types with non-BLKmode modes,
2415 this should ideally construct a CONST_INT. */
2417 rtx constructor = output_constant_def (exp);
2418 if (! memory_address_p (GET_MODE (constructor),
2419 XEXP (constructor, 0)))
2420 constructor = change_address (constructor, VOIDmode,
2421 XEXP (constructor, 0));
2422 return constructor;
2425 if (ignore)
2427 tree elt;
2428 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2429 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
2430 return const0_rtx;
2432 else
2434 if (target == 0)
2435 target = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
2436 get_structure_value_addr (expr_size (exp)));
2437 store_expr (exp, target, 0);
2438 return target;
2441 case INDIRECT_REF:
2443 tree exp1 = TREE_OPERAND (exp, 0);
2444 tree exp2;
2446 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
2447 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
2448 This code has the same general effect as simply doing
2449 expand_expr on the save expr, except that the expression PTR
2450 is computed for use as a memory address. This means different
2451 code, suitable for indexing, may be generated. */
2452 if (TREE_CODE (exp1) == SAVE_EXPR
2453 && SAVE_EXPR_RTL (exp1) == 0
2454 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
2455 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
2456 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
2458 temp = expand_expr (TREE_OPERAND (exp1, 0), 0, VOIDmode, EXPAND_SUM);
2459 op0 = memory_address (mode, temp);
2460 op0 = copy_all_regs (op0);
2461 SAVE_EXPR_RTL (exp1) = op0;
2463 else
2465 if (modifier == EXPAND_INTO_STACK
2466 && original_target
2467 && GET_CODE (original_target) == MEM)
2468 op0 = expand_expr (exp1, XEXP (original_target, 0),
2469 VOIDmode, EXPAND_INTO_STACK);
2470 else
2471 op0 = expand_expr (exp1, 0, VOIDmode, EXPAND_SUM);
2472 op0 = memory_address (mode, op0);
2475 temp = gen_rtx (MEM, mode, op0);
2476 /* If address was computed by addition,
2477 mark this as an element of an aggregate. */
2478 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
2479 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
2480 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR))
2481 MEM_IN_STRUCT_P (temp) = 1;
2482 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
2483 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
2484 return temp;
2486 case ARRAY_REF:
2487 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
2488 || TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
2490 /* Nonconstant array index or nonconstant element size.
2491 Generate the tree for *(&array+index) and expand that,
2492 except do it in a language-independent way
2493 and don't complain about non-lvalue arrays.
2494 `mark_addressable' should already have been called
2495 for any array for which this case will be reached. */
2497 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
2498 TREE_OPERAND (exp, 0));
2499 tree index = TREE_OPERAND (exp, 1);
2500 tree elt;
2502 /* Convert the integer argument to a type the same size as a pointer
2503 so the multiply won't overflow spuriously. */
2504 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
2505 index = convert (type_for_size (POINTER_SIZE, 0), index);
2507 /* The array address isn't volatile even if the array is. */
2508 TREE_VOLATILE (array_adr) = 0;
2510 elt = build1 (INDIRECT_REF, type,
2511 fold (build (PLUS_EXPR, TYPE_POINTER_TO (type),
2512 array_adr,
2513 fold (build (MULT_EXPR,
2514 TYPE_POINTER_TO (type),
2515 index, size_in_bytes (type))))));
2517 return expand_expr (elt, target, tmode, modifier);
2520 /* Fold an expression like: "foo"[2].
2521 This is not done in fold so it won't happen inside &. */
2523 int i;
2524 tree arg0 = TREE_OPERAND (exp, 0);
2525 tree arg1 = TREE_OPERAND (exp, 1);
2527 if (TREE_CODE (arg0) == STRING_CST
2528 && TREE_CODE (arg1) == INTEGER_CST
2529 && !TREE_INT_CST_HIGH (arg1)
2530 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
2532 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
2534 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
2535 TREE_TYPE (exp) = integer_type_node;
2536 return expand_expr (exp, target, tmode, modifier);
2538 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
2540 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
2541 TREE_TYPE (exp) = integer_type_node;
2542 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
2547 /* If this is a constant index into a constant array,
2548 just get the value from the array. */
2549 if (TREE_READONLY (TREE_OPERAND (exp, 0))
2550 && ! TREE_VOLATILE (TREE_OPERAND (exp, 0))
2551 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
2552 && TREE_LITERAL (TREE_OPERAND (exp, 1))
2553 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
2554 && DECL_INITIAL (TREE_OPERAND (exp, 0))
2555 && TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0))) != ERROR_MARK)
2557 tree index = fold (TREE_OPERAND (exp, 1));
2558 if (TREE_CODE (index) == INTEGER_CST)
2560 int i = TREE_INT_CST_LOW (index);
2561 tree init = CONSTRUCTOR_ELTS (DECL_INITIAL (TREE_OPERAND (exp, 0)));
2563 while (init && i--)
2564 init = TREE_CHAIN (init);
2565 if (init)
2566 return expand_expr (fold (TREE_VALUE (init)), target, tmode, modifier);
2569 /* Treat array-ref with constant index as a component-ref. */
2571 case COMPONENT_REF:
2573 register enum machine_mode mode1;
2574 int volstruct = 0;
2575 int bitsize;
2576 tree tem = exp;
2577 int bitpos = 0;
2578 int unsignedp;
2580 if (TREE_CODE (exp) == COMPONENT_REF)
2582 tree field = TREE_OPERAND (exp, 1);
2583 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) * DECL_SIZE_UNIT (field);
2584 mode1 = DECL_MODE (field);
2585 unsignedp = TREE_UNSIGNED (field);
2587 else
2589 mode1 = TYPE_MODE (TREE_TYPE (exp));
2590 bitsize = GET_MODE_BITSIZE (mode1);
2591 unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2594 /* Compute cumulative bit-offset for nested component-refs
2595 and array-refs, and find the ultimate containing object. */
2597 while (1)
2599 if (TREE_CODE (tem) == COMPONENT_REF)
2601 bitpos += DECL_OFFSET (TREE_OPERAND (tem, 1));
2602 if (TREE_THIS_VOLATILE (tem))
2603 volstruct = 1;
2605 else if (TREE_CODE (tem) == ARRAY_REF
2606 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
2607 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
2609 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
2610 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
2611 * TYPE_SIZE_UNIT (TREE_TYPE (tem)));
2613 else
2614 break;
2615 tem = TREE_OPERAND (tem, 0);
2618 op0 = expand_expr (tem, 0, VOIDmode,
2619 (modifier == EXPAND_CONST_ADDRESS
2620 ? modifier : EXPAND_NORMAL));
2622 if (mode1 == BImode || GET_CODE (op0) == REG
2623 || GET_CODE (op0) == SUBREG)
2624 return extract_bit_field (op0, bitsize, bitpos, unsignedp,
2625 target, mode, tmode,
2626 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT);
2627 /* Get a reference to just this component. */
2628 if (modifier == EXPAND_CONST_ADDRESS)
2629 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
2630 (bitpos / BITS_PER_UNIT)));
2631 else
2632 op0 = change_address (op0, mode1,
2633 plus_constant (XEXP (op0, 0),
2634 (bitpos / BITS_PER_UNIT)));
2635 MEM_IN_STRUCT_P (op0) = 1;
2636 MEM_VOLATILE_P (op0) |= volstruct;
2637 /* If OP0 is in the shared structure-value stack slot,
2638 and it is not BLKmode, copy it into a register.
2639 The shared slot may be clobbered at any time by another call.
2640 BLKmode is safe because our caller will either copy the value away
2641 or take another component and come back here. */
2642 if (mode != BLKmode
2643 && TREE_CODE (TREE_OPERAND (exp, 0)) == CALL_EXPR
2644 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
2645 op0 = copy_to_reg (op0);
2646 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
2647 return op0;
2648 if (target == 0)
2649 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
2650 convert_move (target, op0, unsignedp);
2651 return target;
2654 /* Intended for a reference to a buffer of a file-object in Pascal.
2655 But it's not certain that a special tree code will really be
2656 necessary for these. INDIRECT_REF might work for them. */
2657 case BUFFER_REF:
2658 abort ();
2660 case WITH_CLEANUP_EXPR:
2661 if (RTL_EXPR_RTL (exp) == 0)
2663 RTL_EXPR_RTL (exp)
2664 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
2665 cleanups_this_call = tree_cons (0, TREE_OPERAND (exp, 2), cleanups_this_call);
2666 /* That's it for this cleanup. */
2667 TREE_OPERAND (exp, 2) = 0;
2669 return RTL_EXPR_RTL (exp);
2671 case OFFSET_REF:
2672 return expand_expr (resolve_offset_ref (exp), target, tmode, modifier);
2674 case CALL_EXPR:
2675 /* Check for a built-in function. */
2676 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
2677 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
2678 && (DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
2679 != NOT_BUILT_IN))
2680 return expand_builtin (exp, target, subtarget, tmode, ignore);
2681 /* If this call was expanded already by preexpand_calls,
2682 just return the result we got. */
2683 if (CALL_EXPR_RTL (exp) != 0)
2684 return CALL_EXPR_RTL (exp);
2685 return expand_call (exp,
2686 (modifier == EXPAND_INTO_STACK) ? original_target : target,
2687 ignore, modifier);
2689 case NOP_EXPR:
2690 case CONVERT_EXPR:
2691 case REFERENCE_EXPR:
2692 if (TREE_CODE (type) == VOID_TYPE || ignore)
2694 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
2695 return const0_rtx;
2697 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
2698 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
2699 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, mode, 0);
2700 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
2701 return op0;
2702 if (flag_force_mem && GET_CODE (op0) == MEM)
2703 op0 = copy_to_reg (op0);
2704 if (GET_MODE (op0) == VOIDmode)
2705 /* Avoid problem in convert_move due to unknown mode of OP0. */
2706 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
2707 op0);
2708 if (target == 0)
2709 target = gen_reg_rtx (mode);
2710 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
2711 return target;
2713 case PLUS_EXPR:
2714 preexpand_calls (exp);
2715 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
2716 && modifier == EXPAND_SUM)
2718 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode, EXPAND_SUM);
2719 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
2720 return op1;
2722 negate_1 = 1;
2723 plus_minus:
2724 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2725 && modifier == EXPAND_SUM)
2727 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
2728 op0 = plus_constant (op0,
2729 negate_1 * TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
2730 return op0;
2732 this_optab = add_optab;
2733 if (modifier != EXPAND_SUM) goto binop;
2734 subtarget = validate_subtarget (subtarget, TREE_OPERAND (exp, 1));
2735 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
2736 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, EXPAND_SUM);
2737 /* Put a sum last, to simplify what follows. */
2738 #ifdef OLD_INDEXING
2739 if (GET_CODE (op1) == MULT)
2741 temp = op0;
2742 op0 = op1;
2743 op1 = temp;
2745 #endif
2746 #ifndef OLD_INDEXING
2747 /* Make sure any term that's a sum with a constant comes last. */
2748 if (GET_CODE (op0) == PLUS
2749 && CONSTANT_P (XEXP (op0, 1)))
2751 temp = op0;
2752 op0 = op1;
2753 op1 = temp;
2755 /* If adding to a sum including a constant,
2756 associate it to put the constant outside. */
2757 if (GET_CODE (op1) == PLUS
2758 && CONSTANT_P (XEXP (op1, 1)))
2760 rtx tem;
2761 int constant_term = 0;
2763 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
2764 /* Let's also eliminate constants from op0 if possible. */
2765 tem = eliminate_constant_term (op0, &constant_term);
2766 if (GET_CODE (XEXP (op1, 1)) == CONST_INT)
2768 if (constant_term != 0)
2769 return plus_constant (tem, INTVAL (XEXP (op1, 1)) + constant_term);
2770 else
2771 return plus_constant (op0, INTVAL (XEXP (op1, 1)));
2773 else
2774 return gen_rtx (PLUS, mode, op0, XEXP (op1, 1));
2776 #endif
2777 return gen_rtx (PLUS, mode, op0, op1);
2779 case MINUS_EXPR:
2780 preexpand_calls (exp);
2781 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2782 && GET_MODE_BITSIZE (TYPE_MODE (type)) <= HOST_BITS_PER_INT)
2784 int negated;
2785 if (modifier == EXPAND_SUM)
2787 negate_1 = -1;
2788 goto plus_minus;
2790 subtarget = validate_subtarget (subtarget, TREE_OPERAND (exp, 1));
2791 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
2792 negated = - TREE_INT_CST_LOW (TREE_OPERAND (exp, 1));
2793 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_INT)
2794 negated &= (1 << GET_MODE_BITSIZE (mode)) - 1;
2795 op1 = gen_rtx (CONST_INT, VOIDmode, negated);
2796 this_optab = add_optab;
2797 goto binop2;
2799 this_optab = sub_optab;
2800 goto binop;
2802 case MULT_EXPR:
2803 preexpand_calls (exp);
2804 /* If first operand is constant, swap them.
2805 Thus the following special case checks need only
2806 check the second operand. */
2807 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
2809 register tree t1 = TREE_OPERAND (exp, 0);
2810 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
2811 TREE_OPERAND (exp, 1) = t1;
2814 /* Attempt to return something suitable for generating an
2815 indexed address, for machines that support that. */
2817 if (modifier == EXPAND_SUM
2818 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
2820 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
2822 /* Apply distributive law if OP0 is x+c. */
2823 if (GET_CODE (op0) == PLUS
2824 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
2825 return gen_rtx (PLUS, mode,
2826 gen_rtx (MULT, mode, XEXP (op0, 0),
2827 gen_rtx (CONST_INT, VOIDmode,
2828 TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
2829 gen_rtx (CONST_INT, VOIDmode,
2830 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2831 * INTVAL (XEXP (op0, 1)))));
2833 if (GET_CODE (op0) != REG)
2834 op0 = force_operand (op0, 0);
2835 if (GET_CODE (op0) != REG)
2836 op0 = copy_to_mode_reg (mode, op0);
2838 return gen_rtx (MULT, mode, op0,
2839 gen_rtx (CONST_INT, VOIDmode,
2840 TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
2842 subtarget = validate_subtarget (subtarget, TREE_OPERAND (exp, 1));
2843 /* Check for multiplying things that have been extended
2844 from a narrower type. If this machine supports multiplying
2845 in that narrower type with a result in the desired type,
2846 do it that way, and avoid the explicit type-conversion. */
2847 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
2848 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE
2849 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
2850 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
2851 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2852 && int_fits_type_p (TREE_OPERAND (exp, 1),
2853 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
2854 /* Don't use a widening multiply if a shift will do. */
2855 && exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0)
2857 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
2858 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
2860 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
2861 /* If both operands are extended, they must either both
2862 be zero-extended or both be sign-extended. */
2863 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
2865 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
2867 enum machine_mode innermode
2868 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
2869 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
2870 ? umul_widen_optab : smul_widen_optab);
2871 if (mode == GET_MODE_WIDER_MODE (innermode)
2872 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2874 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
2875 0, VOIDmode, 0);
2876 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
2877 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
2878 else
2879 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
2880 0, VOIDmode, 0);
2881 goto binop2;
2884 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
2885 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
2886 return expand_mult (mode, op0, op1, target, TREE_UNSIGNED (type));
2888 case TRUNC_DIV_EXPR:
2889 case FLOOR_DIV_EXPR:
2890 case CEIL_DIV_EXPR:
2891 case ROUND_DIV_EXPR:
2892 case EXACT_DIV_EXPR:
2893 preexpand_calls (exp);
2894 subtarget = validate_subtarget (subtarget, TREE_OPERAND (exp, 1));
2895 /* Possible optimization: compute the dividend with EXPAND_SUM
2896 then if the divisor is constant can optimize the case
2897 where some terms of the dividend have coeffs divisible by it. */
2898 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
2899 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
2900 return expand_divmod (0, code, mode, op0, op1, target,
2901 TREE_UNSIGNED (type));
2903 case RDIV_EXPR:
2904 preexpand_calls (exp);
2905 this_optab = flodiv_optab;
2906 goto binop;
2908 case TRUNC_MOD_EXPR:
2909 case FLOOR_MOD_EXPR:
2910 case CEIL_MOD_EXPR:
2911 case ROUND_MOD_EXPR:
2912 preexpand_calls (exp);
2913 subtarget = validate_subtarget (subtarget, TREE_OPERAND (exp, 1));
2914 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
2915 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
2916 return expand_divmod (1, code, mode, op0, op1, target,
2917 TREE_UNSIGNED (type));
2918 #if 0
2919 #ifdef HAVE_divmoddisi4
2920 if (GET_MODE (op0) != DImode)
2922 temp = gen_reg_rtx (DImode);
2923 convert_move (temp, op0, 0);
2924 op0 = temp;
2925 if (GET_MODE (op1) != SImode && GET_CODE (op1) != CONST_INT)
2927 temp = gen_reg_rtx (SImode);
2928 convert_move (temp, op1, 0);
2929 op1 = temp;
2931 temp = gen_reg_rtx (SImode);
2932 if (target == 0)
2933 target = gen_reg_rtx (SImode);
2934 emit_insn (gen_divmoddisi4 (temp, protect_from_queue (op0, 0),
2935 protect_from_queue (op1, 0),
2936 protect_from_queue (target, 1)));
2937 return target;
2939 #endif
2940 #endif
2942 case FIX_ROUND_EXPR:
2943 case FIX_FLOOR_EXPR:
2944 case FIX_CEIL_EXPR:
2945 abort (); /* Not used for C. */
2947 case FIX_TRUNC_EXPR:
2948 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
2949 if (target == 0)
2950 target = gen_reg_rtx (mode);
2952 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2953 if (mode == HImode || mode == QImode)
2955 register rtx temp = gen_reg_rtx (SImode);
2956 expand_fix (temp, op0, 0);
2957 convert_move (target, temp, 0);
2959 else
2960 expand_fix (target, op0, unsignedp);
2962 return target;
2964 case FLOAT_EXPR:
2965 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
2966 if (target == 0)
2967 target = gen_reg_rtx (mode);
2968 if (GET_MODE (op0) == VOIDmode)
2969 /* Avoid problem in convert_move due to unknown mode of OP0. */
2970 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
2971 op0);
2973 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
2974 if (GET_MODE (op0) == HImode
2975 || GET_MODE (op0) == QImode)
2977 register rtx temp = gen_reg_rtx (SImode);
2978 convert_move (temp, op0, unsignedp);
2979 expand_float (target, temp, 0);
2981 else
2982 expand_float (target, op0, unsignedp);
2984 return target;
2986 case NEGATE_EXPR:
2987 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
2988 temp = expand_unop (mode, neg_optab, op0, target, 0);
2989 if (temp == 0)
2990 abort ();
2991 return temp;
2993 case ABS_EXPR:
2994 /* First try to do it with a special abs instruction.
2995 If that does not win, use conditional jump and negate. */
2996 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
2997 temp = expand_unop (mode, abs_optab, op0, target, 0);
2998 if (temp != 0)
2999 return temp;
3000 temp = gen_label_rtx ();
3001 if (target == 0 || GET_CODE (target) != REG)
3002 target = gen_reg_rtx (mode);
3003 emit_move_insn (target, op0);
3004 emit_cmp_insn (target,
3005 expand_expr (convert (TREE_TYPE (exp), integer_zero_node),
3006 0, VOIDmode, 0),
3007 0, 0, 0);
3008 NO_DEFER_POP;
3009 emit_jump_insn (gen_bge (temp));
3010 op0 = expand_unop (mode, neg_optab, target, target, 0);
3011 if (op0 != target)
3012 emit_move_insn (target, op0);
3013 emit_label (temp);
3014 OK_DEFER_POP;
3015 return target;
3017 case MAX_EXPR:
3018 case MIN_EXPR:
3019 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
3020 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3021 if (target == 0 || GET_CODE (target) != REG || target == op1)
3022 target = gen_reg_rtx (mode);
3023 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
3024 if (target != op0)
3025 emit_move_insn (target, op0);
3026 op0 = gen_label_rtx ();
3027 if (code == MAX_EXPR)
3028 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
3029 ? compare1 (target, op1, GEU, LEU, 1, mode)
3030 : compare1 (target, op1, GE, LE, 0, mode));
3031 else
3032 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
3033 ? compare1 (target, op1, LEU, GEU, 1, mode)
3034 : compare1 (target, op1, LE, GE, 0, mode));
3035 if (temp == const0_rtx)
3036 emit_move_insn (target, op1);
3037 else if (temp != const1_rtx)
3039 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
3040 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
3041 else
3042 abort ();
3043 emit_move_insn (target, op1);
3045 emit_label (op0);
3046 return target;
3048 /* ??? Can optimize when the operand of this is a bitwise operation,
3049 by using a different bitwise operation. */
3050 case BIT_NOT_EXPR:
3051 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3052 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
3053 if (temp == 0)
3054 abort ();
3055 return temp;
3057 case FFS_EXPR:
3058 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3059 temp = expand_unop (mode, ffs_optab, op0, target, 1);
3060 if (temp == 0)
3061 abort ();
3062 return temp;
3064 /* ??? Can optimize bitwise operations with one arg constant.
3065 Pastel optimizes (a bitwise1 n) bitwise2 (a bitwise3 b)
3066 and (a bitwise1 b) bitwise2 b (etc)
3067 but that is probably not worth while. */
3069 /* BIT_AND_EXPR is for bitwise anding.
3070 TRUTH_AND_EXPR is for anding two boolean values
3071 when we want in all cases to compute both of them.
3072 In general it is fastest to do TRUTH_AND_EXPR by
3073 computing both operands as actual zero-or-1 values
3074 and then bitwise anding. In cases where there cannot
3075 be any side effects, better code would be made by
3076 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
3077 but the question is how to recognize those cases. */
3079 case TRUTH_AND_EXPR:
3080 case BIT_AND_EXPR:
3081 preexpand_calls (exp);
3082 subtarget = validate_subtarget (subtarget, TREE_OPERAND (exp, 1));
3083 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3084 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3085 return expand_bit_and (mode, op0, op1, target);
3087 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
3088 case TRUTH_OR_EXPR:
3089 case BIT_IOR_EXPR:
3090 preexpand_calls (exp);
3091 this_optab = ior_optab;
3092 goto binop;
3094 case BIT_XOR_EXPR:
3095 preexpand_calls (exp);
3096 this_optab = xor_optab;
3097 goto binop;
3099 case LSHIFT_EXPR:
3100 case RSHIFT_EXPR:
3101 case LROTATE_EXPR:
3102 case RROTATE_EXPR:
3103 preexpand_calls (exp);
3104 subtarget = validate_subtarget (subtarget, TREE_OPERAND (exp, 1));
3105 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3106 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
3107 TREE_UNSIGNED (type));
3109 /* ??? cv's were used to effect here to combine additive constants
3110 and to determine the answer when only additive constants differ.
3111 Also, the addition of one can be handled by changing the condition. */
3112 case LT_EXPR:
3113 case LE_EXPR:
3114 case GT_EXPR:
3115 case GE_EXPR:
3116 case EQ_EXPR:
3117 case NE_EXPR:
3118 preexpand_calls (exp);
3119 temp = do_store_flag (exp, target, mode);
3120 if (temp != 0)
3121 return temp;
3122 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
3123 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
3124 && subtarget
3125 && (GET_MODE (subtarget)
3126 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3128 temp = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3129 if (temp != subtarget)
3130 temp = copy_to_reg (temp);
3131 op1 = gen_label_rtx ();
3132 emit_cmp_insn (temp, const0_rtx, 0, TREE_UNSIGNED (type), 0);
3133 emit_jump_insn (gen_beq (op1));
3134 emit_move_insn (temp, const1_rtx);
3135 emit_label (op1);
3136 return temp;
3138 /* If no set-flag instruction, must generate a conditional
3139 store into a temporary variable. Drop through
3140 and handle this like && and ||. */
3142 case TRUTH_ANDIF_EXPR:
3143 case TRUTH_ORIF_EXPR:
3144 temp = gen_reg_rtx (mode);
3145 emit_clr_insn (temp);
3146 op1 = gen_label_rtx ();
3147 jumpifnot (exp, op1);
3148 emit_0_to_1_insn (temp);
3149 emit_label (op1);
3150 return temp;
3152 case TRUTH_NOT_EXPR:
3153 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
3154 /* The parser is careful to generate TRUTH_NOT_EXPR
3155 only with operands that are always zero or one. */
3156 temp = expand_binop (mode, xor_optab, op0,
3157 gen_rtx (CONST_INT, mode, 1),
3158 target, 1, OPTAB_LIB_WIDEN);
3159 if (temp == 0)
3160 abort ();
3161 return temp;
3163 case COMPOUND_EXPR:
3164 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3165 emit_queue ();
3166 return expand_expr (TREE_OPERAND (exp, 1), target, VOIDmode, 0);
3168 case COND_EXPR:
3170 /* Note that COND_EXPRs whose type is a structure or union
3171 are required to be constructed to contain assignments of
3172 a temporary variable, so that we can evaluate them here
3173 for side effect only. If type is void, we must do likewise. */
3175 /* If an arm of the branch requires a cleanup, that
3176 only that cleanup is performed. */
3178 tree old_cleanups = cleanups_this_call;
3179 cleanups_this_call = 0;
3181 op0 = gen_label_rtx ();
3182 op1 = gen_label_rtx ();
3184 if (mode == VOIDmode || ignore)
3185 temp = 0;
3186 else if (target)
3187 temp = target;
3188 else if (mode == BLKmode)
3190 if (TYPE_SIZE (type) == 0 || ! TREE_LITERAL (TYPE_SIZE (type)))
3191 abort ();
3192 temp = assign_stack_local (BLKmode,
3193 (TREE_INT_CST_LOW (TYPE_SIZE (type))
3194 * TYPE_SIZE_UNIT (type)
3195 + BITS_PER_UNIT - 1)
3196 / BITS_PER_UNIT);
3198 else
3199 temp = gen_reg_rtx (mode);
3201 jumpifnot (TREE_OPERAND (exp, 0), op0);
3202 NO_DEFER_POP;
3203 if (temp != 0)
3204 store_expr (TREE_OPERAND (exp, 1), temp, 0);
3205 else
3206 expand_expr (TREE_OPERAND (exp, 1), ignore ? const0_rtx : 0,
3207 VOIDmode, 0);
3208 if (cleanups_this_call)
3210 sorry ("aggreage value in COND_EXPR");
3211 cleanups_this_call = 0;
3214 emit_queue ();
3215 emit_jump_insn (gen_jump (op1));
3216 emit_barrier ();
3217 emit_label (op0);
3218 if (temp != 0)
3219 store_expr (TREE_OPERAND (exp, 2), temp, 0);
3220 else
3221 expand_expr (TREE_OPERAND (exp, 2), ignore ? const0_rtx : 0,
3222 VOIDmode, 0);
3223 if (cleanups_this_call)
3225 sorry ("aggreage value in COND_EXPR");
3226 cleanups_this_call = 0;
3229 emit_queue ();
3230 emit_label (op1);
3231 OK_DEFER_POP;
3232 cleanups_this_call = old_cleanups;
3233 return temp;
3236 case NEW_EXPR:
3238 /* Something needs to be initialized, but we didn't know
3239 where that thing was when building the tree. For example,
3240 it could be the return value of a function, or a parameter
3241 to a function which lays down in the stack, or a temporary
3242 variable which must be passed by reference. */
3244 tree slot = TREE_OPERAND (exp, 0);
3246 if (TREE_CODE (slot) != VAR_DECL)
3247 abort ();
3249 if (target == 0)
3251 target = assign_stack_local (TYPE_MODE (type),
3252 int_size_in_bytes (type));
3254 DECL_RTL (slot) = target;
3256 /* Since SLOT is not known to the called function
3257 to belong to its stack frame, we must build an explicit
3258 cleanup. This case occurs when we must build up a reference
3259 to pass the reference as an argument. In this case,
3260 it is very likely that such a reference need not be
3261 built here. */
3263 if (TREE_OPERAND (exp, 2) == 0)
3264 TREE_OPERAND (exp, 2) = (tree)maybe_build_cleanup (slot);
3265 if (TREE_OPERAND (exp, 2))
3266 cleanups_this_call = tree_cons (0, TREE_OPERAND (exp, 2),
3267 cleanups_this_call);
3269 else
3271 /* This case does occur, when expanding a parameter which
3272 needs to be constructed on the stack. The target
3273 is the actual stack address that we want to initialize.
3275 The function we call will perform the cleanup in this case. */
3276 DECL_RTL (slot) = target;
3279 /* Run the initialization expression now with the target. */
3280 return expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3283 case INIT_EXPR:
3285 tree lhs = TREE_OPERAND (exp, 0);
3286 tree rhs = TREE_OPERAND (exp, 1);
3287 tree noncopied_parts = 0;
3288 tree type = TREE_TYPE (lhs);
3290 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
3291 if (TYPE_NONCOPIED_PARTS (type) != 0 && !fixed_type_p (rhs))
3292 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
3293 TYPE_NONCOPIED_PARTS (type));
3294 while (noncopied_parts != 0)
3296 expand_assignment (TREE_VALUE (noncopied_parts),
3297 TREE_PURPOSE (noncopied_parts), 0, 0);
3298 noncopied_parts = TREE_CHAIN (noncopied_parts);
3300 return temp;
3303 case MODIFY_EXPR:
3305 /* If lhs is complex, expand calls in rhs before computing it.
3306 That's so we don't compute a pointer and save it over a call.
3307 If lhs is simple, compute it first so we can give it as a
3308 target if the rhs is just a call. This avoids an extra temp and copy
3309 and that prevents a partial-subsumption which makes bad code.
3310 Actually we could treat component_ref's of vars like vars. */
3311 tree lhs = TREE_OPERAND (exp, 0);
3312 tree rhs = TREE_OPERAND (exp, 1);
3313 tree noncopied_parts = 0;
3314 tree type = TREE_TYPE (lhs);
3315 temp = 0;
3317 if (TREE_CODE (lhs) != VAR_DECL
3318 && TREE_CODE (lhs) != RESULT_DECL
3319 && TREE_CODE (lhs) != PARM_DECL)
3320 preexpand_calls (exp);
3322 /* Check for |= or &= of a bitfield of size one into another bitfield
3323 of size 1. In this case, (unless we need the result of the
3324 assignment) we can do this more efficiently with a
3325 test followed by an assignment, if necessary. */
3326 if (ignore
3327 && TREE_CODE (lhs) == COMPONENT_REF
3328 && (TREE_CODE (rhs) == BIT_IOR_EXPR
3329 || TREE_CODE (rhs) == BIT_AND_EXPR)
3330 && TREE_OPERAND (rhs, 0) == lhs
3331 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
3332 && (TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1)))
3333 * DECL_SIZE_UNIT (TREE_OPERAND (lhs, 1))) == 1
3334 && (TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1)))
3335 * DECL_SIZE_UNIT (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
3337 rtx label = gen_label_rtx ();
3339 do_jump (TREE_OPERAND (rhs, 1),
3340 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
3341 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
3342 expand_assignment (lhs, convert (TREE_TYPE (rhs),
3343 (TREE_CODE (rhs) == BIT_IOR_EXPR
3344 ? integer_one_node
3345 : integer_zero_node)),
3346 0, 0);
3347 emit_label (label);
3348 return const0_rtx;
3351 if (TYPE_NONCOPIED_PARTS (type) != 0
3352 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
3353 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
3354 TYPE_NONCOPIED_PARTS (type));
3356 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
3357 while (noncopied_parts != 0)
3359 expand_assignment (TREE_PURPOSE (noncopied_parts),
3360 TREE_VALUE (noncopied_parts), 0, 0);
3361 noncopied_parts = TREE_CHAIN (noncopied_parts);
3363 return temp;
3366 case PREINCREMENT_EXPR:
3367 case PREDECREMENT_EXPR:
3368 return expand_increment (exp, 0);
3370 case POSTINCREMENT_EXPR:
3371 case POSTDECREMENT_EXPR:
3372 return expand_increment (exp, !ignore);
3374 case ADDR_EXPR:
3375 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode,
3376 modifier != EXPAND_INTO_STACK ? EXPAND_CONST_ADDRESS : EXPAND_INTO_STACK);
3377 if (GET_CODE (op0) != MEM)
3378 abort ();
3379 if (modifier == EXPAND_SUM)
3380 return XEXP (op0, 0);
3381 op0 = force_operand (XEXP (op0, 0), target);
3382 if (flag_force_addr && GET_CODE (op0) != REG)
3383 return force_reg (Pmode, op0);
3384 return op0;
3386 case ENTRY_VALUE_EXPR:
3387 abort ();
3389 case ERROR_MARK:
3390 return const0_rtx;
3392 default:
3393 return (*lang_expand_expr) (exp, target, tmode, modifier);
3396 /* Here to do an ordinary binary operator, generating an instruction
3397 from the optab already placed in `this_optab'. */
3398 binop:
3399 /* Detect things like x = y | (a == b)
3400 and do them as (x = y), (a == b ? x |= 1 : 0), x. */
3401 /* First, get the comparison or conditional into the second arg. */
3402 if (comparison_code[(int) TREE_CODE (TREE_OPERAND (exp, 0))]
3403 || (TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR
3404 && (integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
3405 || integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 2)))))
3407 if (this_optab == ior_optab || this_optab == add_optab
3408 || this_optab == xor_optab)
3410 tree exch = TREE_OPERAND (exp, 1);
3411 TREE_OPERAND (exp, 1) = TREE_OPERAND (exp, 0);
3412 TREE_OPERAND (exp, 0) = exch;
3415 /* Optimize X + (Y ? Z : 0) by computing X and maybe adding Z. */
3416 if (comparison_code[(int) TREE_CODE (TREE_OPERAND (exp, 1))]
3417 || (TREE_CODE (TREE_OPERAND (exp, 1)) == COND_EXPR
3418 && (integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 1), 1))
3419 || integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 1), 2)))))
3421 if (this_optab == ior_optab || this_optab == add_optab
3422 || this_optab == xor_optab || this_optab == sub_optab
3423 || this_optab == lshl_optab || this_optab == ashl_optab
3424 || this_optab == lshr_optab || this_optab == ashr_optab
3425 || this_optab == rotl_optab || this_optab == rotr_optab)
3427 tree thenexp;
3428 rtx thenv = 0;
3430 /* TARGET gets a reg in which we can perform the computation.
3431 Use the specified target if it's a pseudo reg and safe. */
3432 target = validate_subtarget (subtarget, TREE_OPERAND (exp, 1));
3433 if (target == 0) target = gen_reg_rtx (mode);
3435 /* Compute X into the target. */
3436 store_expr (TREE_OPERAND (exp, 0), target, 0);
3437 op0 = gen_label_rtx ();
3439 /* If other operand is a comparison COMP, treat it as COMP ? 1 : 0 */
3440 if (TREE_CODE (TREE_OPERAND (exp, 1)) != COND_EXPR)
3442 do_jump (TREE_OPERAND (exp, 1), op0, 0);
3443 thenv = const1_rtx;
3445 else if (integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 1), 2)))
3447 do_jump (TREE_OPERAND (TREE_OPERAND (exp, 1), 0), op0, 0);
3448 thenexp = TREE_OPERAND (TREE_OPERAND (exp, 1), 1);
3450 else
3452 do_jump (TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0, op0);
3453 thenexp = TREE_OPERAND (TREE_OPERAND (exp, 1), 2);
3456 if (thenv == 0)
3457 thenv = expand_expr (thenexp, 0, VOIDmode, 0);
3459 /* THENV is now Z, the value to operate on, as an rtx.
3460 We have already tested that Y isn't zero, so do the operation. */
3462 if (this_optab == rotl_optab || this_optab == rotr_optab)
3463 temp = expand_binop (mode, this_optab, target, thenv, target,
3464 -1, OPTAB_LIB);
3465 else if (this_optab == lshl_optab || this_optab == lshr_optab)
3466 temp = expand_binop (mode, this_optab, target, thenv, target,
3467 1, OPTAB_LIB_WIDEN);
3468 else
3469 temp = expand_binop (mode, this_optab, target, thenv, target,
3470 0, OPTAB_LIB_WIDEN);
3471 if (target != temp)
3472 emit_move_insn (target, temp);
3474 emit_queue ();
3475 do_pending_stack_adjust ();
3476 emit_label (op0);
3477 return target;
3480 subtarget = validate_subtarget (subtarget, TREE_OPERAND (exp, 1));
3481 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3482 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3483 binop2:
3484 temp = expand_binop (mode, this_optab, op0, op1, target,
3485 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
3486 if (temp == 0)
3487 abort ();
3488 return temp;
3491 /* Expand an expression EXP that calls a built-in function,
3492 with result going to TARGET if that's convenient
3493 (and in mode MODE if that's convenient).
3494 SUBTARGET may be used as the target for computing one of EXP's operands.
3495 IGNORE is nonzero if the value is to be ignored. */
3497 static rtx
3498 expand_builtin (exp, target, subtarget, mode, ignore)
3499 tree exp;
3500 rtx target;
3501 rtx subtarget;
3502 enum machine_mode mode;
3503 int ignore;
3505 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3506 tree arglist = TREE_OPERAND (exp, 1);
3507 rtx op0;
3509 switch (DECL_FUNCTION_CODE (fndecl))
3511 case BUILT_IN_ABS:
3512 case BUILT_IN_LABS:
3513 case BUILT_IN_FABS:
3514 /* build_function_call changes these into ABS_EXPR. */
3515 abort ();
3517 case BUILT_IN_SAVEREGS:
3519 /* When this function is called, it means that registers must be
3520 saved on entry to this function. So we migrate the
3521 call to the first insn of this function. */
3522 rtx last = get_last_insn ();
3523 rtx temp, saving_insns, last_saving_insn;
3525 /* If there are parameters which must be saved from this function,
3526 save them now. */
3527 saving_insns = save_from_saveregs;
3528 emit_insns (saving_insns);
3529 if (saving_insns)
3531 emit_note (0, -1);
3532 last_saving_insn = get_last_insn ();
3535 /* Now really call the function. `expand_call' does not call
3536 expand_builtin, so there is no danger of infinite recursion here. */
3537 temp = expand_call (exp, target, ignore);
3539 if (saving_insns)
3541 extern void use_variable ();
3544 if (GET_CODE (saving_insns) == INSN)
3545 note_stores (PATTERN (saving_insns), use_variable);
3546 saving_insns = NEXT_INSN (saving_insns);
3547 } while (saving_insns != last_saving_insn);
3549 reorder_insns (NEXT_INSN (last), get_last_insn (), get_insns ());
3550 return temp;
3553 case BUILT_IN_CLASSIFY_TYPE:
3554 if (arglist != 0)
3556 tree type = TREE_TYPE (TREE_VALUE (arglist));
3557 enum tree_code code = TREE_CODE (type);
3558 if (code == VOID_TYPE)
3559 return gen_rtx (CONST_INT, VOIDmode, void_type_class);
3560 if (code == INTEGER_TYPE)
3561 return gen_rtx (CONST_INT, VOIDmode, integer_type_class);
3562 if (code == CHAR_TYPE)
3563 return gen_rtx (CONST_INT, VOIDmode, char_type_class);
3564 if (code == ENUMERAL_TYPE)
3565 return gen_rtx (CONST_INT, VOIDmode, enumeral_type_class);
3566 if (code == BOOLEAN_TYPE)
3567 return gen_rtx (CONST_INT, VOIDmode, boolean_type_class);
3568 if (code == POINTER_TYPE)
3569 return gen_rtx (CONST_INT, VOIDmode, pointer_type_class);
3570 if (code == REFERENCE_TYPE)
3571 return gen_rtx (CONST_INT, VOIDmode, reference_type_class);
3572 if (code == OFFSET_TYPE)
3573 return gen_rtx (CONST_INT, VOIDmode, offset_type_class);
3574 if (code == REAL_TYPE)
3575 return gen_rtx (CONST_INT, VOIDmode, real_type_class);
3576 if (code == COMPLEX_TYPE)
3577 return gen_rtx (CONST_INT, VOIDmode, complex_type_class);
3578 if (code == FUNCTION_TYPE)
3579 return gen_rtx (CONST_INT, VOIDmode, function_type_class);
3580 if (code == METHOD_TYPE)
3581 return gen_rtx (CONST_INT, VOIDmode, method_type_class);
3582 if (code == RECORD_TYPE)
3583 return gen_rtx (CONST_INT, VOIDmode, record_type_class);
3584 if (code == UNION_TYPE)
3585 return gen_rtx (CONST_INT, VOIDmode, union_type_class);
3586 if (code == ARRAY_TYPE)
3587 return gen_rtx (CONST_INT, VOIDmode, array_type_class);
3588 if (code == STRING_TYPE)
3589 return gen_rtx (CONST_INT, VOIDmode, string_type_class);
3590 if (code == SET_TYPE)
3591 return gen_rtx (CONST_INT, VOIDmode, set_type_class);
3592 if (code == FILE_TYPE)
3593 return gen_rtx (CONST_INT, VOIDmode, file_type_class);
3594 if (code == LANG_TYPE)
3595 return gen_rtx (CONST_INT, VOIDmode, lang_type_class);
3597 return gen_rtx (CONST_INT, VOIDmode, no_type_class);
3599 case BUILT_IN_ALLOCA:
3600 if (arglist == 0
3601 /* Arg could be non-integer if user redeclared this fcn wrong. */
3602 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
3603 return const0_rtx;
3604 frame_pointer_needed = 1;
3605 current_function_calls_alloca = 1;
3606 /* Compute the argument. */
3607 op0 = expand_expr (TREE_VALUE (arglist), 0, VOIDmode, 0);
3608 if (! CONSTANT_P (op0))
3610 op0 = force_reg (GET_MODE (op0), op0);
3611 if (GET_MODE (op0) != Pmode)
3612 op0 = convert_to_mode (Pmode, op0, 1);
3614 /* Push that much space (rounding it up). */
3615 do_pending_stack_adjust ();
3617 #ifdef STACK_POINTER_OFFSET
3618 /* If we will have to round the result down (which is up
3619 if stack grows down), make sure we have extra space so the
3620 user still gets at least as much space as he asked for. */
3621 if ((STACK_POINTER_OFFSET + STACK_BYTES - 1) / STACK_BYTES
3622 != STACK_POINTER_OFFSET / STACK_BYTES)
3623 op0 = plus_constant (op0, STACK_BYTES);
3624 #endif
3626 #ifdef STACK_GROWS_DOWNWARD
3627 anti_adjust_stack (round_push (op0));
3628 #endif
3629 /* Return a copy of current stack ptr, in TARGET if possible. */
3630 if (target)
3631 emit_move_insn (target, stack_pointer_rtx);
3632 else
3633 target = copy_to_reg (stack_pointer_rtx);
3634 #ifdef STACK_POINTER_OFFSET
3635 /* If the contents of the stack pointer reg are offset from the
3636 actual top-of-stack address, add the offset here. */
3637 if (GET_CODE (target) == REG)
3638 emit_insn (gen_add2_insn (target,
3639 gen_rtx (CONST_INT, VOIDmode,
3640 (STACK_POINTER_OFFSET + STACK_BYTES - 1) / STACK_BYTES * STACK_BYTES)));
3641 else
3643 rtx temp =
3644 expand_binop (GET_MODE (target), add_optab, target,
3645 gen_rtx (CONST_INT, VOIDmode,
3646 (STACK_POINTER_OFFSET + STACK_BYTES - 1) / STACK_BYTES * STACK_BYTES),
3647 target,
3648 1, OPTAB_DIRECT);
3649 if (temp == 0) abort ();
3650 if (temp != target)
3651 emit_move_insn (target, temp);
3653 #endif
3654 #ifndef STACK_GROWS_DOWNWARD
3655 anti_adjust_stack (round_push (op0));
3656 #endif
3657 /* Some systems require a particular insn to refer to the stack
3658 to make the pages exist. */
3659 #ifdef HAVE_probe
3660 if (HAVE_probe)
3661 emit_insn (gen_probe ());
3662 #endif
3663 return target;
3665 case BUILT_IN_FFS:
3666 if (arglist == 0
3667 /* Arg could be non-integer if user redeclared this fcn wrong. */
3668 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
3669 return const0_rtx;
3671 /* Compute the argument. */
3672 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
3673 /* Compute ffs, into TARGET if possible.
3674 Set TARGET to wherever the result comes back. */
3675 target = expand_unop (mode, ffs_optab, op0, target, 1);
3676 if (target == 0)
3677 abort ();
3678 return target;
3680 default:
3681 abort ();
3685 /* Expand code for a post- or pre- increment or decrement
3686 and return the RTX for the result.
3687 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
3689 static rtx
3690 expand_increment (exp, post)
3691 register tree exp;
3692 int post;
3694 register rtx op0, op1;
3695 register rtx temp;
3696 register tree incremented = TREE_OPERAND (exp, 0);
3697 optab this_optab = add_optab;
3698 int icode;
3699 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3700 int op0_is_copy = 0;
3702 /* Stabilize any component ref that might need to be
3703 evaluated more than once below. */
3704 if (TREE_CODE (incremented) == COMPONENT_REF
3705 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
3706 || DECL_MODE (TREE_OPERAND (incremented, 1)) == BImode))
3707 incremented = stabilize_reference (incremented);
3709 /* Compute the operands as RTX.
3710 Note whether OP0 is the actual lvalue or a copy of it:
3711 I believe it is a copy iff it is a register and insns were
3712 generated in computing it. */
3713 temp = get_last_insn ();
3714 op0 = expand_expr (incremented, 0, VOIDmode, 0);
3715 if (temp != get_last_insn ())
3716 op0_is_copy = (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG);
3717 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3719 /* Decide whether incrementing or decrementing. */
3720 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
3721 || TREE_CODE (exp) == PREDECREMENT_EXPR)
3722 this_optab = sub_optab;
3724 /* If OP0 is not the actual lvalue, but rather a copy in a register,
3725 then we cannot just increment OP0. We must
3726 therefore contrive to increment the original value.
3727 Then we can return OP0 since it is a copy of the old value. */
3728 if (op0_is_copy)
3730 /* This is the easiest way to increment the value wherever it is.
3731 Problems with multiple evaluation of INCREMENTED
3732 are prevented because either (1) it is a component_ref,
3733 in which case it was stabilized above, or (2) it is an array_ref
3734 with constant index in an array in a register, which is
3735 safe to reevaluate. */
3736 tree newexp = build ((this_optab == add_optab
3737 ? PLUS_EXPR : MINUS_EXPR),
3738 TREE_TYPE (exp),
3739 incremented,
3740 TREE_OPERAND (exp, 1));
3741 temp = expand_assignment (incremented, newexp, ! post, 0);
3742 return post ? op0 : temp;
3745 /* Convert decrement by a constant into a negative increment. */
3746 if (this_optab == sub_optab
3747 && GET_CODE (op1) == CONST_INT)
3749 op1 = gen_rtx (CONST_INT, VOIDmode, - INTVAL (op1));
3750 this_optab = add_optab;
3753 if (post)
3755 /* We have a true reference to the value in OP0.
3756 If there is an insn to add or subtract in this mode, queue it. */
3758 /* I'm not sure this is still necessary. */
3759 op0 = stabilize (op0);
3761 icode = (int) this_optab->handlers[(int) mode].insn_code;
3762 if (icode != (int) CODE_FOR_nothing
3763 /* Make sure that OP0 is valid for operands 0 and 1
3764 of the insn we want to queue. */
3765 && (*insn_operand_predicate[icode][0]) (op0, mode)
3766 && (*insn_operand_predicate[icode][1]) (op0, mode))
3768 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
3769 op1 = force_reg (mode, op1);
3771 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
3775 /* Preincrement, or we can't increment with one simple insn. */
3776 if (post)
3777 /* Save a copy of the value before inc or dec, to return it later. */
3778 temp = copy_to_reg (op0);
3779 else
3780 /* Arrange to return the incremented value. */
3781 /* Copy the rtx because expand_binop will protect from the queue,
3782 and the results of that would be invalid for us to return
3783 if our caller does emit_queue before using our result. */
3784 temp = copy_rtx (op0);
3786 /* Increment however we can. */
3787 op1 = expand_binop (mode, this_optab, op0, op1, op0,
3788 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
3789 /* Make sure the value is stored into OP0. */
3790 if (op1 != op0)
3791 emit_move_insn (op0, op1);
3793 return temp;
3796 /* Expand all function calls contained within EXP, innermost ones first.
3797 But don't look within expressions that have sequence points.
3798 For each CALL_EXPR, record the rtx for its value
3799 in the CALL_EXPR_RTL field.
3801 Calls that return large structures for which a structure return
3802 stack slot is needed are not preexpanded. Preexpanding them loses
3803 because if more than one were preexpanded they would try to use the
3804 same stack slot. */
3806 static void
3807 preexpand_calls (exp)
3808 tree exp;
3810 register int nops, i;
3812 if (! do_preexpand_calls)
3813 return;
3815 /* Only expressions and references can contain calls. */
3817 if (tree_code_type[(int) TREE_CODE (exp)][0] != 'e'
3818 && tree_code_type[(int) TREE_CODE (exp)][0] != 'r')
3819 return;
3821 switch (TREE_CODE (exp))
3823 case CALL_EXPR:
3824 /* Do nothing to built-in functions. */
3825 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
3826 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
3827 && (DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
3828 != NOT_BUILT_IN))
3829 return;
3830 /* Precompute calls that don't return values in memory. */
3831 if (CALL_EXPR_RTL (exp) == 0
3832 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3833 && ! RETURN_IN_MEMORY (TREE_TYPE (exp)))
3834 CALL_EXPR_RTL (exp) = expand_call (exp, 0, 0, 0);
3835 return;
3837 case COMPOUND_EXPR:
3838 case COND_EXPR:
3839 case TRUTH_ANDIF_EXPR:
3840 case TRUTH_ORIF_EXPR:
3841 /* If we find one of these, then we can be sure
3842 the adjust will be done for it (since it makes jumps).
3843 Do it now, so that if this is inside an argument
3844 of a function, we don't get the stack adjustment
3845 after some other args have already been pushed. */
3846 do_pending_stack_adjust ();
3847 return;
3849 case RTL_EXPR:
3850 return;
3852 case SAVE_EXPR:
3853 if (SAVE_EXPR_RTL (exp) != 0)
3854 return;
3857 nops = tree_code_length[(int) TREE_CODE (exp)];
3858 for (i = 0; i < nops; i++)
3859 if (TREE_OPERAND (exp, i) != 0)
3861 register int type = *tree_code_type[(int) TREE_CODE (TREE_OPERAND (exp, i))];
3862 if (type == 'e' || type == 'r')
3863 preexpand_calls (TREE_OPERAND (exp, i));
3867 /* Force FUNEXP into a form suitable for the address of a CALL,
3868 and return that as an rtx. Also load the static chain register
3869 from either FUNEXP or CONTEXT. */
3871 static rtx
3872 prepare_call_address (funexp, context)
3873 rtx funexp;
3874 rtx context;
3876 funexp = protect_from_queue (funexp, 0);
3877 if (context != 0)
3878 context = protect_from_queue (context, 0);
3880 /* Function variable in language with nested functions. */
3881 if (GET_MODE (funexp) == EPmode)
3883 emit_move_insn (static_chain_rtx, gen_highpart (Pmode, funexp));
3884 funexp = memory_address (FUNCTION_MODE, gen_lowpart (Pmode, funexp));
3885 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
3887 else
3889 if (context != 0)
3890 /* Unless function variable in C, or top level function constant */
3891 emit_move_insn (static_chain_rtx, lookup_static_chain (context));
3893 /* Make a valid memory address and copy constants thru pseudo-regs,
3894 but not for a constant address if -fno-function-cse. */
3895 if (GET_CODE (funexp) != SYMBOL_REF)
3896 funexp = memory_address (FUNCTION_MODE, funexp);
3897 else
3899 #ifndef NO_FUNCTION_CSE
3900 if (optimize && ! flag_no_function_cse)
3901 funexp = force_reg (Pmode, funexp);
3902 #endif
3905 if (context != 0)
3906 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
3908 return funexp;
3911 /* Generate instructions to call function FUNEXP,
3912 and optionally pop the results.
3913 The CALL_INSN is the first insn generated.
3915 FUNTYPE is the data type of the function, or, for a library call,
3916 the identifier for the name of the call. This is given to the
3917 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
3919 STACK_SIZE is the number of bytes of arguments on the stack,
3920 rounded up to STACK_BOUNDARY; zero if the size is variable.
3921 This is both to put into the call insn and
3922 to generate explicit popping code if necessary.
3924 NEXT_ARG_REG is the rtx that results from executing
3925 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
3926 just after all the args have had their registers assigned.
3927 This could be whatever you like, but normally it is the first
3928 arg-register beyond those used for args in this call,
3929 or 0 if all the arg-registers are used in this call.
3930 It is passed on to `gen_call' so you can put this info in the call insn.
3932 VALREG is a hard register in which a value is returned,
3933 or 0 if the call does not return a value.
3935 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
3936 the args to this call were processed.
3937 We restore `inhibit_defer_pop' to that value.
3939 USE_INSNS is a SEQUENCE of USE insns to be emitted immediately before
3940 the actual CALL insn. */
3942 static void
3943 emit_call_1 (funexp, funtype, stack_size, next_arg_reg, valreg, old_inhibit_defer_pop, use_insns)
3944 rtx funexp;
3945 tree funtype;
3946 int stack_size;
3947 rtx next_arg_reg;
3948 rtx valreg;
3949 int old_inhibit_defer_pop;
3950 rtx use_insns;
3952 rtx stack_size_rtx = gen_rtx (CONST_INT, VOIDmode, stack_size);
3953 rtx call_insn;
3955 if (valreg)
3956 emit_call_insn (gen_call_value (valreg,
3957 gen_rtx (MEM, FUNCTION_MODE, funexp),
3958 stack_size_rtx, next_arg_reg));
3959 else
3960 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, funexp),
3961 stack_size_rtx, next_arg_reg));
3963 /* Find the CALL insn we just emitted and write the USE insns before it. */
3964 for (call_insn = get_last_insn();
3965 call_insn && GET_CODE (call_insn) != CALL_INSN;
3966 call_insn = PREV_INSN (call_insn))
3969 if (! call_insn)
3970 abort ();
3972 /* Put the USE insns before the CALL. */
3973 emit_insn_before (use_insns, call_insn);
3975 inhibit_defer_pop = old_inhibit_defer_pop;
3977 /* If returning from the subroutine does not automatically pop the args,
3978 we need an instruction to pop them sooner or later.
3979 Perhaps do it now; perhaps just record how much space to pop later. */
3981 if (! RETURN_POPS_ARGS (TREE_TYPE (funtype))
3982 && stack_size != 0)
3984 if (flag_defer_pop && inhibit_defer_pop == 0)
3985 pending_stack_adjust += stack_size;
3986 else
3987 adjust_stack (stack_size_rtx);
3991 /* At the start of a function, record that we have no previously-pushed
3992 arguments waiting to be popped. */
3994 void
3995 init_pending_stack_adjust ()
3997 pending_stack_adjust = 0;
4000 /* When exiting from function, if safe, clear out any pending stack adjust
4001 so the adjustment won't get done. */
4003 void
4004 clear_pending_stack_adjust ()
4006 #ifdef EXIT_IGNORE_STACK
4007 if (!flag_omit_frame_pointer && EXIT_IGNORE_STACK
4008 && ! TREE_INLINE (current_function_decl)
4009 && ! flag_inline_functions)
4010 pending_stack_adjust = 0;
4011 #endif
4014 /* Pop any previously-pushed arguments that have not been popped yet. */
4016 void
4017 do_pending_stack_adjust ()
4019 if (inhibit_defer_pop == 0)
4021 if (pending_stack_adjust != 0)
4022 adjust_stack (gen_rtx (CONST_INT, VOIDmode, pending_stack_adjust));
4023 pending_stack_adjust = 0;
4027 /* Expand all cleanups up to OLD_CLEANUPS.
4028 Needed here, and also for language-dependent calls. */
4029 void
4030 expand_cleanups_to (old_cleanups)
4031 tree old_cleanups;
4033 while (cleanups_this_call != old_cleanups)
4035 expand_expr (TREE_VALUE (cleanups_this_call), 0, VOIDmode, 0);
4036 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
4040 /* Data structure and subroutines used within expand_call. */
4042 struct arg_data
4044 /* Tree node for this argument. */
4045 tree tree_value;
4046 /* Precomputed RTL value, or 0 if it isn't precomputed. */
4047 rtx value;
4048 /* Register to pass this argument in, or 0 if passed on stack. */
4049 rtx reg;
4050 /* Number of registers to use. 0 means put the whole arg in registers.
4051 Also 0 if not passed in registers. */
4052 int partial;
4053 /* Offset of this argument from beginning of stack-args. */
4054 struct args_size offset;
4055 /* Size of this argument on the stack, rounded up for any padding it gets,
4056 parts of the argument passed in registers do not count.
4057 If the FIRST_PARM_CALLER_OFFSET is negative, then register parms
4058 are counted here as well. */
4059 struct args_size size;
4060 /* Nonzero if this arg has already been stored. */
4061 int stored;
4062 /* const0_rtx means should preallocate stack space for this arg.
4063 Other non0 value is the stack slot, preallocated.
4064 Used only for BLKmode. */
4065 rtx stack;
4068 static void store_one_arg ();
4069 static rtx target_for_arg ();
4071 /* Generate all the code for a function call
4072 and return an rtx for its value.
4073 Store the value in TARGET (specified as an rtx) if convenient.
4074 If the value is stored in TARGET then TARGET is returned.
4075 If IGNORE is nonzero, then we ignore the value of the function call. */
4077 static rtx
4078 expand_call (exp, target, ignore, modifier)
4079 tree exp;
4080 rtx target;
4081 int ignore;
4082 enum expand_modifier modifier;
4084 /* List of actual parameters. */
4085 tree actparms = TREE_OPERAND (exp, 1);
4086 /* RTX for the function to be called. */
4087 rtx funexp;
4088 /* Data type of the function. */
4089 tree funtype;
4090 /* Declaration of the function being called,
4091 or 0 if the function is computed (not known by name). */
4092 tree fndecl = 0;
4094 /* Register in which non-BLKmode value will be returned,
4095 or 0 if no value or if value is BLKmode. */
4096 rtx valreg;
4097 /* Address where we should return a BLKmode value;
4098 0 if value not BLKmode. */
4099 rtx structure_value_addr = 0;
4100 /* Nonzero if that address is being passed by treating it as
4101 an extra, implicit first parameter. Otherwise,
4102 it is passed by being copied directly into struct_value_rtx. */
4103 int structure_value_addr_parm = 0;
4104 /* Nonzero if called function returns an aggregate in memory PCC style,
4105 by returning the address of where to find it. */
4106 int pcc_struct_value = 0;
4108 /* Number of actual parameters in this call, including struct value addr. */
4109 int num_actuals;
4110 /* Number of named args. Args after this are anonymous ones
4111 and they must all go on the stack. */
4112 int n_named_args;
4114 /* Vector of information about each argument.
4115 Arguments are numbered in the order they will be pushed,
4116 not the order they are written. */
4117 struct arg_data *args;
4119 /* Total size in bytes of all the stack-parms scanned so far. */
4120 struct args_size args_size;
4121 /* Remember initial value of args_size.constant. */
4122 int starting_args_size;
4123 /* Nonzero means count reg-parms' size in ARGS_SIZE. */
4124 int stack_count_regparms = 0;
4125 /* Data on reg parms scanned so far. */
4126 CUMULATIVE_ARGS args_so_far;
4127 /* Nonzero if a reg parm has been scanned. */
4128 int reg_parm_seen;
4129 /* Nonzero if we must avoid push-insns in the args for this call. */
4130 int must_preallocate;
4131 /* 1 if scanning parms front to back, -1 if scanning back to front. */
4132 int inc;
4133 /* Address of space preallocated for stack parms
4134 (on machines that lack push insns), or 0 if space not preallocated. */
4135 rtx argblock = 0;
4137 /* Nonzero if it is plausible that this is a call to alloca. */
4138 int may_be_alloca;
4139 /* Nonzero if this is a call to setjmp or a related function. */
4140 int is_setjmp;
4141 /* Nonzero if this is a call to longjmp or a related function. */
4142 int is_longjmp;
4143 /* Nonzero if this is a call to an inline function. */
4144 int is_integrable = 0;
4145 /* Nonzero if this is a call to __builtin_new. */
4146 int is_builtin_new;
4147 /* Nonzero if this is a call to a `const' function. */
4148 int is_const = 0;
4150 /* Nonzero if there are BLKmode args whose data types require them
4151 to be passed in memory, not (even partially) in registers. */
4152 int BLKmode_parms_forced = 0;
4153 /* The offset of the first BLKmode parameter which
4154 *must* be passed in memory. */
4155 int BLKmode_parms_first_offset = 0;
4156 /* Total size of BLKmode parms which could usefully be preallocated. */
4157 int BLKmode_parms_sizes = 0;
4159 /* Amount stack was adjusted to protect BLKmode parameters
4160 which are below the nominal "stack address" value. */
4161 rtx protected_stack = 0;
4163 /* The last insn before the things that are intrinsically part of the call.
4164 The beginning reg-note goes on the insn after this one. */
4165 rtx insn_before;
4167 rtx old_stack_level = 0;
4168 int old_pending_adj;
4169 int old_inhibit_defer_pop = inhibit_defer_pop;
4170 tree old_cleanups = cleanups_this_call;
4171 rtx use_insns;
4173 register tree p;
4174 register int i;
4176 /* See if we can find a DECL-node for the actual function.
4177 As a result, decide whether this is a call to an integrable function. */
4179 p = TREE_OPERAND (exp, 0);
4180 if (TREE_CODE (p) == ADDR_EXPR)
4182 fndecl = TREE_OPERAND (p, 0);
4183 if (TREE_CODE (fndecl) != FUNCTION_DECL)
4185 /* May still be a `const' function if it is
4186 a call through a const function. */
4187 fndecl = 0;
4189 else
4191 extern tree current_function_decl;
4192 extern int flag_no_inline;
4194 if (!flag_no_inline
4195 && fndecl != current_function_decl
4196 && DECL_SAVED_INSNS (fndecl))
4197 is_integrable = 1;
4198 else if (! TREE_ADDRESSABLE (fndecl))
4200 /* In case this function later becomes inlineable,
4201 record that there was already a non-inline call to it.
4203 Use abstraction instead of setting TREE_ADDRESSABLE
4204 directly. */
4205 if (TREE_INLINE (fndecl) && extra_warnings)
4206 warning_with_decl (fndecl, "can't inline call to `%s' which was declared inline");
4207 mark_addressable (fndecl);
4210 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl))
4211 is_const = 1;
4215 /* When calling a const function, we must pop the stack args right away,
4216 so that the pop is deleted or moved with the call. */
4217 if (is_const)
4218 NO_DEFER_POP;
4220 /* Set up a place to return a structure. */
4222 /* Cater to broken compilers. */
4223 if (aggregate_value_p (exp))
4225 /* This call returns a big structure. */
4226 #ifdef PCC_STATIC_STRUCT_RETURN
4227 if (flag_pcc_struct_return)
4229 pcc_struct_value = 1;
4230 is_integrable = 0; /* Easier than making that case work right. */
4232 else
4233 #endif
4235 if (target && GET_CODE (target) == MEM)
4237 structure_value_addr = XEXP (target, 0);
4238 if (reg_mentioned_p (stack_pointer_rtx, structure_value_addr))
4239 structure_value_addr = copy_to_reg (structure_value_addr);
4241 else
4243 /* Make room on the stack to hold the value. */
4244 structure_value_addr
4245 = get_structure_value_addr (expr_size (exp));
4246 target = 0;
4251 /* If called function is inline, try to integrate it. */
4253 if (is_integrable)
4255 extern rtx expand_inline_function ();
4256 rtx temp;
4258 temp = expand_inline_function (fndecl, actparms, target,
4259 ignore, TREE_TYPE (exp),
4260 structure_value_addr);
4262 /* If inlining succeeded, return. */
4263 if ((int) temp != -1)
4265 /* Perform all cleanups needed for the arguments of this call
4266 (i.e. destructors in C++). It is ok if these destructors
4267 clobber RETURN_VALUE_REG, because the only time we care about
4268 this is when TARGET is that register. But in C++, we take
4269 care to never return that register directly. */
4270 expand_cleanups_to (old_cleanups);
4271 return temp;
4274 /* If inlining failed, mark FNDECL as needing to be compiled
4275 separately after all.
4277 Use abstraction instead of setting TREE_ADDRESSABLE directly. */
4278 mark_addressable (fndecl);
4281 #if 0
4282 /* Unless it's a call to a specific function that isn't alloca,
4283 if it has one argument, we must assume it might be alloca. */
4285 may_be_alloca =
4286 (!(fndecl != 0
4287 && strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4288 "alloca"))
4289 && actparms != 0
4290 && TREE_CHAIN (actparms) == 0);
4291 #else
4292 /* We assume that alloca will always be called by name. It
4293 makes no sense to pass it as a pointer-to-function to
4294 anything that does not understand its behavior. */
4295 may_be_alloca =
4296 (fndecl && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)), "alloca")
4297 || ! strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4298 "__builtin_alloca")));
4299 #endif
4301 /* See if this is a call to a function that can return more than once. */
4303 is_setjmp
4304 = (fndecl != 0
4305 && (!strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)), "setjmp")
4306 || !strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)), "_setjmp")));
4308 is_longjmp
4309 = (fndecl != 0
4310 && (!strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)), "longjmp")
4311 || !strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)), "_longjmp")));
4313 is_builtin_new
4314 = (fndecl != 0
4315 && (!strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)), "__builtin_new")));
4317 if (may_be_alloca)
4319 frame_pointer_needed = 1;
4320 may_call_alloca = 1;
4321 current_function_calls_alloca = 1;
4324 /* Don't let pending stack adjusts add up to too much.
4325 Also, do all pending adjustments now
4326 if there is any chance this might be a call to alloca (or longjmp). */
4328 if (pending_stack_adjust >= 32
4329 || (pending_stack_adjust > 0 && (may_be_alloca || is_setjmp || is_longjmp)))
4330 do_pending_stack_adjust ();
4332 /* Operand 0 is a pointer-to-function; get the type of the function. */
4333 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4334 if (TREE_CODE (funtype) != POINTER_TYPE)
4335 abort ();
4336 funtype = TREE_TYPE (funtype);
4338 /* If struct_value_rtx is 0, it means pass the address
4339 as if it were an extra parameter. */
4340 if (structure_value_addr && struct_value_rtx == 0)
4342 rtx tem;
4344 INIT_CUMULATIVE_ARGS (args_so_far, funtype);
4345 tem = FUNCTION_ARG (args_so_far, Pmode,
4346 build_pointer_type (TREE_TYPE (funtype)), 1);
4347 if (tem == 0)
4349 actparms = tree_cons (error_mark_node,
4350 build (SAVE_EXPR,
4351 type_for_size (GET_MODE_BITSIZE (Pmode), 0),
4353 force_reg (Pmode, structure_value_addr)),
4354 actparms);
4355 structure_value_addr_parm = 1;
4359 /* Count the arguments and set NUM_ACTUALS. */
4360 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
4361 num_actuals = i;
4363 /* Compute number of named args.
4364 Don't include the last named arg if anonymous args follow.
4365 (If no anonymous args follow, the result of list_length
4366 is actually one too large.) */
4367 if (TYPE_ARG_TYPES (funtype) != 0)
4368 n_named_args = list_length (TYPE_ARG_TYPES (funtype)) - 1;
4369 else
4370 /* If we know nothing, treat all args as named. */
4371 n_named_args = num_actuals;
4373 /* Make a vector to hold all the information about each arg. */
4374 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
4375 bzero (args, num_actuals * sizeof (struct arg_data));
4377 args_size.constant = 0;
4378 args_size.var = 0;
4379 #ifdef FIRST_PARM_CALLER_OFFSET
4380 args_size.constant = FIRST_PARM_CALLER_OFFSET (funtype);
4381 stack_count_regparms = 1;
4382 #endif
4383 starting_args_size = args_size.constant;
4385 /* In this loop, we consider args in the order they are written.
4386 We fill up ARGS from the front of from the back if necessary
4387 so that in any case the first arg to be pushed ends up at the front. */
4389 #ifdef PUSH_ARGS_REVERSED
4390 i = num_actuals - 1, inc = -1;
4391 /* In this case, must reverse order of args
4392 so that we compute and push the last arg first. */
4393 #else
4394 i = 0, inc = 1;
4395 #endif
4397 INIT_CUMULATIVE_ARGS (args_so_far, funtype);
4399 for (p = actparms; p; p = TREE_CHAIN (p), i += inc)
4401 tree type = TREE_TYPE (TREE_VALUE (p));
4402 args[i].tree_value = TREE_VALUE (p);
4403 args[i].offset = args_size;
4405 if (type == error_mark_node
4406 || TYPE_SIZE (type) == 0)
4407 continue;
4409 /* Decide where to pass this arg. */
4410 /* args[i].reg is nonzero if all or part is passed in registers.
4411 args[i].partial is nonzero if part but not all is passed in registers,
4412 and the exact value says how many words are passed in registers. */
4414 if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4415 && args_size.var == 0
4416 /* error_mark_node here is a flag for the fake argument
4417 for a structure value address. */
4418 && TREE_PURPOSE (p) != error_mark_node)
4420 args[i].reg = FUNCTION_ARG (args_so_far, TYPE_MODE (type), type,
4421 i < n_named_args);
4422 /* If this argument needs more than the usual parm alignment, do
4423 extrinsic padding to reach that alignment. */
4425 #ifdef MAX_PARM_BOUNDARY
4426 /* If MAX_PARM_BOUNDARY is not defined, it means that the usual
4427 alignment requirements are relaxed for parms, and that no parm
4428 needs more than PARM_BOUNDARY, regardless of data type. */
4430 if (PARM_BOUNDARY < TYPE_ALIGN (type))
4432 int boundary = PARM_BOUNDARY;
4434 /* Determine the boundary to pad up to. */
4435 if (TYPE_ALIGN (type) > boundary)
4436 boundary = TYPE_ALIGN (type);
4437 if (boundary > MAX_PARM_BOUNDARY)
4438 boundary = MAX_PARM_BOUNDARY;
4440 /* If the previous args don't reach such a boundary,
4441 advance to the next one. */
4442 boundary /= BITS_PER_UNIT;
4443 args[i].offset.constant += boundary - 1;
4444 args[i].offset.constant &= ~(boundary - 1);
4445 args_size.constant += boundary - 1;
4446 args_size.constant &= ~(boundary - 1);
4448 if (args_size.var != 0)
4449 abort (); /* This case not implemented yet */
4451 #endif /* MAX_PARM_BOUNDARY */
4453 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4454 args[i].partial
4455 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far,
4456 TYPE_MODE (type), type,
4457 i < n_named_args);
4458 #endif
4461 /* Compute the stack-size of this argument. */
4463 if (args[i].reg != 0 && args[i].partial == 0
4464 && ! stack_count_regparms)
4465 /* On most machines, don't count stack space for a register arg. */
4467 else if (TYPE_MODE (type) != BLKmode)
4469 register int size;
4471 size = GET_MODE_SIZE (TYPE_MODE (type));
4472 /* Compute how much space the push instruction will push.
4473 On many machines, pushing a byte will advance the stack
4474 pointer by a halfword. */
4475 #ifdef PUSH_ROUNDING
4476 size = PUSH_ROUNDING (size);
4477 #endif
4478 /* Compute how much space the argument should get:
4479 maybe pad to a multiple of the alignment for arguments. */
4480 if (none == FUNCTION_ARG_PADDING (TYPE_MODE (type), const0_rtx))
4481 args[i].size.constant = size;
4482 else
4483 args[i].size.constant
4484 = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4485 / (PARM_BOUNDARY / BITS_PER_UNIT))
4486 * (PARM_BOUNDARY / BITS_PER_UNIT));
4488 else
4490 register tree size = size_in_bytes (type);
4492 /* A nonscalar. Round its size up to a multiple
4493 of PARM_BOUNDARY bits, unless it is not supposed to be padded. */
4494 if (none
4495 != FUNCTION_ARG_PADDING (TYPE_MODE (type),
4496 expand_expr (size, 0, VOIDmode, 0)))
4497 size = convert_units (convert_units (size, BITS_PER_UNIT,
4498 PARM_BOUNDARY),
4499 PARM_BOUNDARY, BITS_PER_UNIT);
4500 ADD_PARM_SIZE (args[i].size, size);
4502 /* Certain data types may not be passed in registers
4503 (eg C++ classes with constructors).
4504 Also, BLKmode parameters initialized from CALL_EXPRs
4505 are treated specially, if it is a win to do so. */
4506 if (TREE_CODE (TREE_VALUE (p)) == CALL_EXPR
4507 || TREE_ADDRESSABLE (type))
4509 if (TREE_ADDRESSABLE (type))
4510 BLKmode_parms_forced = 1;
4511 /* This is a marker for such a parameter. */
4512 args[i].stack = const0_rtx;
4513 BLKmode_parms_sizes += TREE_INT_CST_LOW (size);
4515 /* If this parm's location is "below" the nominal stack pointer,
4516 note to decrement the stack pointer while it is computed. */
4517 #ifdef FIRST_PARM_CALLER_OFFSET
4518 if (BLKmode_parms_first_offset == 0)
4519 BLKmode_parms_first_offset
4520 /* If parameter's offset is variable, assume the worst. */
4521 = (args[i].offset.var
4522 ? FIRST_PARM_CALLER_OFFSET (funtype)
4523 : args[i].offset.constant);
4524 #endif
4528 /* If a part of the arg was put into registers,
4529 don't include that part in the amount pushed. */
4530 if (! stack_count_regparms)
4531 args[i].size.constant
4532 -= ((args[i].partial * UNITS_PER_WORD)
4533 / (PARM_BOUNDARY / BITS_PER_UNIT)
4534 * (PARM_BOUNDARY / BITS_PER_UNIT));
4536 /* Update ARGS_SIZE, the total stack space for args so far. */
4538 args_size.constant += args[i].size.constant;
4539 if (args[i].size.var)
4541 ADD_PARM_SIZE (args_size, args[i].size.var);
4544 /* Increment ARGS_SO_FAR, which has info about which arg-registers
4545 have been used, etc. */
4547 FUNCTION_ARG_ADVANCE (args_so_far, TYPE_MODE (type), type,
4548 i < n_named_args);
4551 /* If we would have to push a partially-in-regs parm
4552 before other stack parms, preallocate stack space instead. */
4553 must_preallocate = 0;
4555 int partial_seen = 0;
4556 for (i = 0; i < num_actuals; i++)
4558 if (args[i].partial > 0)
4559 partial_seen = 1;
4560 else if (partial_seen && args[i].reg == 0)
4561 must_preallocate = 1;
4565 /* Precompute all register parameters. It isn't safe to compute anything
4566 once we have started filling any specific hard regs.
4567 If this function call is cse'able, precompute all the parameters. */
4569 reg_parm_seen = 0;
4570 for (i = 0; i < num_actuals; i++)
4571 if (args[i].reg != 0 || is_const)
4573 int j;
4574 int struct_value_lossage = 0;
4576 /* First, see if this is a precomputed struct-returning function call
4577 and other subsequent parms are also such. */
4578 if ((TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
4579 || RETURN_IN_MEMORY (TREE_TYPE (args[i].tree_value)))
4580 && TREE_CODE (args[i].tree_value) == CALL_EXPR)
4581 for (j = i + 1; j < num_actuals; j++)
4582 if (((TYPE_MODE (TREE_TYPE (args[j].tree_value)) == BLKmode
4583 || RETURN_IN_MEMORY (TREE_TYPE (args[j].tree_value)))
4584 && TREE_CODE (args[j].tree_value) == CALL_EXPR
4585 && args[j].reg != 0) || is_const)
4587 /* We have two precomputed structure-values call expressions
4588 in our parm list. Both of them would normally use
4589 the structure-value block. To avoid the conflict,
4590 compute this parm with a different temporary block. */
4591 int size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
4592 rtx structval = assign_stack_local (BLKmode, size);
4593 args[i].value = expand_expr (args[i].tree_value, structval,
4594 VOIDmode, 0);
4595 struct_value_lossage = 1;
4596 break;
4598 if (!struct_value_lossage)
4599 args[i].value = expand_expr (args[i].tree_value, 0, VOIDmode, 0);
4601 if (args[i].reg != 0)
4602 reg_parm_seen = 1;
4604 if (GET_CODE (args[i].value) != MEM
4605 && ! CONSTANT_P (args[i].value)
4606 && GET_CODE (args[i].value) != CONST_DOUBLE)
4607 args[i].value
4608 = force_reg (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
4609 args[i].value);
4610 /* ANSI doesn't require a sequence point here,
4611 but PCC has one, so this will avoid some problems. */
4612 emit_queue ();
4615 /* Get the function to call, in the form of RTL, if it is a constant. */
4616 if (fndecl && is_const)
4618 /* Get a SYMBOL_REF rtx for the function address. */
4619 funexp = XEXP (DECL_RTL (fndecl), 0);
4621 #ifndef NO_FUNCTION_CSE
4622 /* Pass the address through a pseudoreg, if desired,
4623 before the "beginning" of the library call.
4624 So this insn isn't "part of" the library call, in case that
4625 is deleted, or cse'd. */
4626 if (! flag_no_function_cse)
4627 funexp = copy_to_mode_reg (Pmode, funexp);
4628 #endif
4631 /* Now we are about to start emitting insns that can be deleted
4632 if the libcall is deleted. */
4633 insn_before = get_last_insn ();
4635 /* Maybe do additional rounding on the size of the arguments. */
4636 #ifdef STACK_ARGS_ADJUST
4637 STACK_ARGS_ADJUST (args_size);
4638 #endif
4640 /* If we have no actual push instructions, or shouldn't use them,
4641 or we need a variable amount of space, make space for all args right now.
4642 Round the needed size up to multiple of STACK_BOUNDARY. */
4644 if (args_size.var != 0)
4646 old_stack_level = copy_to_mode_reg (Pmode, stack_pointer_rtx);
4647 old_pending_adj = pending_stack_adjust;
4648 argblock = push_block (round_push (ARGS_SIZE_RTX (args_size)));
4650 else if (args_size.constant > 0)
4652 int needed = args_size.constant;
4654 #ifdef STACK_BOUNDARY
4655 needed = (needed + STACK_BYTES - 1) / STACK_BYTES * STACK_BYTES;
4656 #endif
4657 args_size.constant = needed;
4659 if (
4660 #ifndef PUSH_ROUNDING
4661 1 /* Always preallocate if no push insns. */
4662 #else
4663 must_preallocate || BLKmode_parms_forced
4664 || BLKmode_parms_sizes > (args_size.constant >> 1)
4665 #endif
4668 /* Try to reuse some or all of the pending_stack_adjust
4669 to get this space. Maybe we can avoid any pushing. */
4670 if (needed > pending_stack_adjust)
4672 needed -= pending_stack_adjust;
4673 pending_stack_adjust = 0;
4675 else
4677 pending_stack_adjust -= needed;
4678 needed = 0;
4680 argblock = push_block (gen_rtx (CONST_INT, VOIDmode, needed));
4682 /* Once this space is used, we cannot give other calls
4683 (which might be part of the args to this call)
4684 access to this space. This is because there is
4685 no way to say that we are building arguments
4686 in a place which should not be deallocated by
4687 `emit_call_1', which, by the way, is the way this
4688 this space gets deallocated. */
4689 do_pending_stack_adjust ();
4692 #ifndef PUSH_ROUNDING
4693 else if (BLKmode_parms_forced)
4695 /* If we have reg-parms that need to be temporarily on the stack,
4696 set up an arg block address even though there is no space
4697 to be allocated for it. */
4698 argblock = push_block (const0_rtx);
4700 #endif
4702 #if 0
4703 /* If stack needs padding below the args, increase all arg offsets
4704 so the args are stored above the padding. */
4705 if (stack_padding)
4706 for (i = 0; i < num_actuals; i++)
4707 args[i].offset.constant += stack_padding;
4708 #endif
4710 /* Don't try to defer pops if preallocating, not even from the first arg,
4711 since ARGBLOCK probably refers to the SP. */
4712 if (argblock)
4713 NO_DEFER_POP;
4715 #ifdef STACK_GROWS_DOWNWARD
4716 /* If any BLKmode parms need to be preallocated in space
4717 below the nominal stack-pointer address, we need to adjust the
4718 stack pointer so that this location is temporarily above it.
4719 This ensures that computation won't clobber that space. */
4720 if (BLKmode_parms_first_offset < 0 && argblock != 0)
4722 int needed = -BLKmode_parms_first_offset;
4723 argblock = copy_to_reg (argblock);
4725 #ifdef STACK_BOUNDARY
4726 needed = (needed + STACK_BYTES - 1) / STACK_BYTES * STACK_BYTES;
4727 #endif
4728 protected_stack = gen_rtx (CONST_INT, VOIDmode, needed);
4729 anti_adjust_stack (protected_stack);
4731 #endif /* STACK_GROWS_DOWNWARD */
4733 /* Get the function to call, in the form of RTL. */
4734 if (fndecl)
4735 /* Get a SYMBOL_REF rtx for the function address. */
4736 funexp = XEXP (DECL_RTL (fndecl), 0);
4737 else
4738 /* Generate an rtx (probably a pseudo-register) for the address. */
4740 funexp = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
4741 emit_queue ();
4744 /* Figure out the register where the value, if any, will come back. */
4745 valreg = 0;
4746 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
4747 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
4748 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
4750 /* Now compute and store all non-register parms.
4751 These come before register parms, since they can require block-moves,
4752 which could clobber the registers used for register parms.
4753 Parms which have partial registers are not stored here,
4754 but we do preallocate space here if they want that. */
4756 for (i = 0; i < num_actuals; i++)
4758 /* Preallocate the stack space for a parm if appropriate
4759 so it can be computed directly in the stack space. */
4760 if (args[i].stack != 0 && argblock != 0)
4761 args[i].stack = target_for_arg (TREE_TYPE (args[i].tree_value),
4762 ARGS_SIZE_RTX (args[i].size),
4763 argblock, args[i].offset);
4764 else
4765 args[i].stack = 0;
4767 if (args[i].reg == 0
4768 && TYPE_SIZE (TREE_TYPE (args[i].tree_value)) != 0)
4769 store_one_arg (&args[i], argblock, may_be_alloca);
4772 /* Now store any partially-in-registers parm.
4773 This is the last place a block-move can happen. */
4774 if (reg_parm_seen)
4775 for (i = 0; i < num_actuals; i++)
4776 if (args[i].partial != 0)
4777 store_one_arg (&args[i], argblock, may_be_alloca);
4779 if (protected_stack != 0)
4780 adjust_stack (protected_stack);
4782 /* Pass the function the address in which to return a structure value. */
4783 if (structure_value_addr && ! structure_value_addr_parm)
4784 emit_move_insn (struct_value_rtx,
4785 force_reg (Pmode, force_operand (structure_value_addr, 0)));
4787 /* Now set up any wholly-register parms. They were computed already. */
4788 if (reg_parm_seen)
4789 for (i = 0; i < num_actuals; i++)
4790 if (args[i].reg != 0 && args[i].partial == 0)
4791 store_one_arg (&args[i], argblock, may_be_alloca);
4793 /* Perform postincrements before actually calling the function. */
4794 emit_queue ();
4796 /* All arguments and registers used for the call must be set up by now! */
4798 /* ??? Other languages need a nontrivial second argument (static chain). */
4799 funexp = prepare_call_address (funexp, 0);
4801 /* Mark all register-parms as living through the call. */
4802 start_sequence ();
4803 for (i = 0; i < num_actuals; i++)
4804 if (args[i].reg != 0)
4806 if (args[i].partial > 0)
4807 use_regs (REGNO (args[i].reg), args[i].partial);
4808 else if (GET_MODE (args[i].reg) == BLKmode)
4809 use_regs (REGNO (args[i].reg),
4810 ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
4811 + UNITS_PER_WORD - 1)
4812 / UNITS_PER_WORD));
4813 else
4814 emit_insn (gen_rtx (USE, VOIDmode, args[i].reg));
4817 if (structure_value_addr && ! structure_value_addr_parm
4818 && GET_CODE (struct_value_rtx) == REG)
4819 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
4821 use_insns = gen_sequence ();
4822 end_sequence ();
4824 /* Generate the actual call instruction. */
4825 /* This also has the effect of turning off any pop-inhibition
4826 done in expand_call. */
4827 if (args_size.constant < 0)
4828 args_size.constant = 0;
4829 emit_call_1 (funexp, funtype, args_size.constant,
4830 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4831 valreg, old_inhibit_defer_pop, use_insns);
4833 /* ??? Nothing has been done here to record control flow
4834 when contained functions can do nonlocal gotos. */
4836 /* For calls to `setjmp', etc., inform flow.c it should complain
4837 if nonvolatile values are live. */
4839 if (is_setjmp)
4841 emit_note (IDENTIFIER_POINTER (DECL_NAME (fndecl)), NOTE_INSN_SETJMP);
4842 current_function_calls_setjmp = 1;
4845 /* Notice functions that cannot return.
4846 If optimizing, insns emitted below will be dead.
4847 If not optimizing, they will exist, which is useful
4848 if the user uses the `return' command in the debugger. */
4850 if (fndecl && TREE_THIS_VOLATILE (fndecl))
4851 emit_barrier ();
4853 /* For calls to __builtin_new, note that it can never return 0.
4854 This is because a new handler will be called, and 0 it not
4855 among the numbers it is supposed to return. */
4856 #if 0
4857 if (is_builtin_new)
4858 emit_note (IDENTIFIER_POINTER (DECL_NAME (fndecl)), NOTE_INSN_BUILTIN_NEW);
4859 #endif
4861 /* If there are cleanups to be called, don't use a hard reg as target. */
4862 if (cleanups_this_call != old_cleanups
4863 && target && REG_P (target)
4864 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4865 target = 0;
4867 /* If value type not void, return an rtx for the value. */
4869 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
4870 || ignore)
4872 target = const0_rtx;
4874 else if (structure_value_addr)
4876 if (target == 0 || GET_CODE (target) != MEM)
4877 target = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
4878 memory_address (BLKmode, structure_value_addr));
4880 else if (pcc_struct_value)
4882 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
4883 fndecl);
4884 if (target == 0)
4885 target = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
4886 copy_to_reg (valreg));
4887 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
4888 emit_move_insn (target, gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
4889 copy_to_reg (valreg)));
4890 else
4891 emit_block_move (target, gen_rtx (MEM, BLKmode, copy_to_reg (valreg)),
4892 expr_size (exp),
4893 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
4895 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp)))
4897 if (!rtx_equal_p (target, valreg))
4898 emit_move_insn (target, valreg);
4899 else
4900 /* This tells expand_inline_function to copy valreg to its target. */
4901 emit_insn (gen_rtx (USE, VOIDmode, valreg));
4903 else
4904 target = copy_to_reg (valreg);
4906 /* Perform all cleanups needed for the arguments of this call
4907 (i.e. destructors in C++). */
4908 expand_cleanups_to (old_cleanups);
4910 /* If size of args is variable, restore saved stack-pointer value. */
4912 if (old_stack_level)
4914 emit_move_insn (stack_pointer_rtx, old_stack_level);
4915 pending_stack_adjust = old_pending_adj;
4918 /* If call is cse'able, make appropriate pair of reg-notes around it. */
4919 if (is_const)
4921 rtx insn_first = NEXT_INSN (insn_before);
4922 rtx insn_last = get_last_insn ();
4923 rtx note = 0;
4925 /* Don't put the notes on if we don't have insns that can hold them. */
4926 if ((GET_CODE (insn_first) == INSN
4927 || GET_CODE (insn_first) == CALL_INSN
4928 || GET_CODE (insn_first) == JUMP_INSN)
4929 && (GET_CODE (insn_last) == INSN
4930 || GET_CODE (insn_last) == CALL_INSN
4931 || GET_CODE (insn_last) == JUMP_INSN))
4933 /* Construct an "equal form" for the value
4934 which mentions all the arguments in order
4935 as well as the function name. */
4936 for (i = 0; i < num_actuals; i++)
4937 if (args[i].reg != 0 || is_const)
4938 note = gen_rtx (EXPR_LIST, VOIDmode, args[i].value, note);
4939 note = gen_rtx (EXPR_LIST, VOIDmode,
4940 XEXP (DECL_RTL (fndecl), 0), note);
4942 REG_NOTES (insn_last)
4943 = gen_rtx (EXPR_LIST, REG_EQUAL, note,
4944 gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
4945 REG_NOTES (insn_last)));
4946 REG_NOTES (insn_first)
4947 = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
4948 REG_NOTES (insn_first));
4952 return target;
4955 /* Return an rtx which represents a suitable home on the stack
4956 given TYPE, the type of the argument looking for a home.
4957 This is called only for BLKmode arguments.
4959 SIZE is the size needed for this target.
4960 ARGS_ADDR is the address of the bottom of the argument block for this call.
4961 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
4962 if this machine uses push insns. */
4964 static rtx
4965 target_for_arg (type, size, args_addr, offset)
4966 tree type;
4967 rtx size;
4968 rtx args_addr;
4969 struct args_size offset;
4971 rtx target;
4972 rtx offset_rtx = ARGS_SIZE_RTX (offset);
4974 /* We do not call memory_address if possible,
4975 because we want to address as close to the stack
4976 as possible. For non-variable sized arguments,
4977 this will be stack-pointer relative addressing. */
4978 if (GET_CODE (offset_rtx) == CONST_INT)
4979 target = plus_constant (args_addr, INTVAL (offset_rtx));
4980 else
4982 /* I have no idea how to guarantee that this
4983 will work in the presence of register parameters. */
4984 target = gen_rtx (PLUS, Pmode, args_addr, offset_rtx);
4985 target = memory_address (QImode, target);
4988 return gen_rtx (MEM, BLKmode, target);
4991 /* Store a single argument for a function call
4992 into the register or memory area where it must be passed.
4993 *ARG describes the argument value and where to pass it.
4994 ARGBLOCK is the address of the stack-block for all the arguments,
4995 or 0 on a machine where arguemnts are pushed individually.
4996 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4997 so must be careful about how the stack is used. */
4999 static void
5000 store_one_arg (arg, argblock, may_be_alloca)
5001 struct arg_data *arg;
5002 rtx argblock;
5003 int may_be_alloca;
5005 register tree pval = arg->tree_value;
5006 int used = 0;
5008 if (TREE_CODE (pval) == ERROR_MARK)
5009 return;
5011 if (arg->reg != 0 && arg->partial == 0)
5013 /* Being passed entirely in a register. */
5014 if (arg->value != 0)
5016 if (GET_MODE (arg->value) == BLKmode)
5017 move_block_to_reg (REGNO (arg->reg), arg->value,
5018 ((int_size_in_bytes (TREE_TYPE (pval))
5019 + UNITS_PER_WORD - 1)
5020 / UNITS_PER_WORD));
5021 else
5022 emit_move_insn (arg->reg, arg->value);
5024 else
5025 store_expr (pval, arg->reg, 0);
5027 /* Don't allow anything left on stack from computation
5028 of argument to alloca. */
5029 if (may_be_alloca)
5030 do_pending_stack_adjust ();
5032 else if (TYPE_MODE (TREE_TYPE (pval)) != BLKmode)
5034 register int size;
5035 rtx tem;
5037 /* Argument is a scalar, not entirely passed in registers.
5038 (If part is passed in registers, arg->partial says how much
5039 and emit_push_insn will take care of putting it there.)
5041 Push it, and if its size is less than the
5042 amount of space allocated to it,
5043 also bump stack pointer by the additional space.
5044 Note that in C the default argument promotions
5045 will prevent such mismatches. */
5047 used = size = GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (pval)));
5048 /* Compute how much space the push instruction will push.
5049 On many machines, pushing a byte will advance the stack
5050 pointer by a halfword. */
5051 #ifdef PUSH_ROUNDING
5052 size = PUSH_ROUNDING (size);
5053 #endif
5054 /* Compute how much space the argument should get:
5055 round up to a multiple of the alignment for arguments. */
5056 if (none != FUNCTION_ARG_PADDING (TYPE_MODE (TREE_TYPE (pval)), const0_rtx))
5057 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
5058 / (PARM_BOUNDARY / BITS_PER_UNIT))
5059 * (PARM_BOUNDARY / BITS_PER_UNIT));
5061 tem = arg->value;
5062 if (tem == 0)
5064 tem = expand_expr (pval, 0, VOIDmode, 0);
5065 /* ANSI doesn't require a sequence point here,
5066 but PCC has one, so this will avoid some problems. */
5067 emit_queue ();
5070 /* Don't allow anything left on stack from computation
5071 of argument to alloca. */
5072 if (may_be_alloca)
5073 do_pending_stack_adjust ();
5075 emit_push_insn (tem, TYPE_MODE (TREE_TYPE (pval)), 0, 0,
5076 arg->partial, arg->reg, used - size,
5077 argblock, ARGS_SIZE_RTX (arg->offset));
5079 else if (arg->stack != 0)
5081 /* BLKmode parm, not entirely passed in registers,
5082 and with space already allocated. */
5084 tree sizetree = size_in_bytes (TREE_TYPE (pval));
5086 /* Find out if the parm needs padding, and whether above or below. */
5087 enum direction where_pad
5088 = FUNCTION_ARG_PADDING (TYPE_MODE (TREE_TYPE (pval)),
5089 expand_expr (sizetree, 0, VOIDmode, 0));
5091 /* If it is padded below, adjust the stack address
5092 upward over the padding. */
5094 if (where_pad == downward)
5096 rtx offset_rtx;
5097 rtx address = XEXP (arg->stack, 0);
5098 struct args_size stack_offset;
5099 /* Hack for C++: see assign_parms for symmetric code. */
5100 int extra = 0;
5102 stack_offset.constant = 0;
5103 stack_offset.var = 0;
5105 if (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT != PARM_BOUNDARY)
5107 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5108 tree s1 = convert_units (sizetree, BITS_PER_UNIT, PARM_BOUNDARY);
5109 tree s2 = convert_units (s1, PARM_BOUNDARY, BITS_PER_UNIT);
5110 /* Compute amount of padding. */
5111 ADD_PARM_SIZE (stack_offset, s2);
5112 SUB_PARM_SIZE (stack_offset, sizetree);
5113 extra = stack_offset.constant % UNITS_PER_WORD;
5114 stack_offset.constant -= extra;
5116 offset_rtx = ARGS_SIZE_RTX (stack_offset);
5118 /* If there is rounding to do for a BLKmode parameter,
5119 add it in here, since STACK_OFFSET is not used for the
5120 rest of this iteration. */
5121 stack_offset.constant += extra;
5123 /* Adjust the address to store at. */
5124 if (GET_CODE (offset_rtx) == CONST_INT)
5125 address = plus_constant (address, INTVAL (offset_rtx));
5126 else
5128 address = gen_rtx (PLUS, Pmode, address, offset_rtx);
5129 address = memory_address (QImode, address);
5131 arg->stack = change_address (arg->stack, VOIDmode, address);
5134 /* ARG->stack probably refers to the stack-pointer. If so,
5135 stabilize it, in case stack-pointer changes during evaluation. */
5136 if (reg_mentioned_p (stack_pointer_rtx, arg->stack))
5137 arg->stack = change_address (arg->stack, VOIDmode,
5138 copy_to_reg (XEXP (arg->stack, 0)));
5139 /* BLKmode argument that should go in a prespecified stack location. */
5140 if (arg->value == 0)
5141 /* Not yet computed => compute it there. */
5142 /* ??? This should be changed to tell expand_expr
5143 that it can store directly in the target. */
5144 arg->value = store_expr (arg->tree_value, arg->stack, 0);
5145 else if (arg->value != arg->stack)
5146 /* It was computed somewhere, but not where we wanted.
5147 For example, the value may have come from an official
5148 local variable or parameter. In that case, expand_expr
5149 does not fill our suggested target. */
5150 emit_block_move (arg->stack, arg->value, ARGS_SIZE_RTX (arg->size),
5151 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT);
5153 /* Now, if this value wanted to be partly in registers,
5154 move the value from the stack to the registers
5155 that are supposed to hold the values. */
5156 if (arg->partial > 0)
5157 move_block_to_reg (REGNO (arg->reg), arg->stack, arg->partial);
5159 else
5161 /* BLKmode, at least partly to be pushed. */
5163 register rtx tem
5164 = arg->value ? arg->value : expand_expr (pval, 0, VOIDmode, 0);
5165 register int excess;
5166 rtx size_rtx;
5168 /* Pushing a nonscalar.
5169 If part is passed in registers, arg->partial says how much
5170 and emit_push_insn will take care of putting it there. */
5172 /* Round its size up to a multiple
5173 of the allocation unit for arguments. */
5175 if (arg->size.var != 0)
5177 excess = 0;
5178 size_rtx = ARGS_SIZE_RTX (arg->size);
5180 else
5182 register tree size = size_in_bytes (TREE_TYPE (pval));
5183 /* PUSH_ROUNDING has no effect on us, because
5184 emit_push_insn for BLKmode is careful to avoid it. */
5185 excess = (arg->size.constant - TREE_INT_CST_LOW (size)
5186 + arg->partial * UNITS_PER_WORD);
5187 size_rtx = expand_expr (size, 0, VOIDmode, 0);
5190 /* if (arg->stack) */
5191 /* abort (); */
5193 emit_push_insn (tem, TYPE_MODE (TREE_TYPE (pval)), size_rtx,
5194 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT,
5195 arg->partial, arg->reg, excess, argblock,
5196 ARGS_SIZE_RTX (arg->offset));
5199 /* Once we have pushed something, pops can't safely
5200 be deferred during the rest of the arguments. */
5201 NO_DEFER_POP;
5204 /* Expand conditional expressions. */
5206 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
5207 LABEL is an rtx of code CODE_LABEL, in this function and all the
5208 functions here. */
5210 void
5211 jumpifnot (exp, label)
5212 tree exp;
5213 rtx label;
5215 do_jump (exp, label, 0);
5218 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
5220 void
5221 jumpif (exp, label)
5222 tree exp;
5223 rtx label;
5225 do_jump (exp, 0, label);
5228 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
5229 the result is zero, or IF_TRUE_LABEL if the result is one.
5230 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
5231 meaning fall through in that case.
5233 This function is responsible for optimizing cases such as
5234 &&, || and comparison operators in EXP. */
5236 void
5237 do_jump (exp, if_false_label, if_true_label)
5238 tree exp;
5239 rtx if_false_label, if_true_label;
5241 register enum tree_code code = TREE_CODE (exp);
5242 /* Some cases need to create a label to jump to
5243 in order to properly fall through.
5244 These cases set DROP_THROUGH_LABEL nonzero. */
5245 rtx drop_through_label = 0;
5246 rtx temp;
5247 rtx comparison = 0;
5249 emit_queue ();
5251 switch (code)
5253 case ERROR_MARK:
5254 break;
5256 case INTEGER_CST:
5257 temp = integer_zerop (exp) ? if_false_label : if_true_label;
5258 if (temp)
5259 emit_jump (temp);
5260 break;
5262 case ADDR_EXPR:
5263 /* The address of something can never be zero. */
5264 if (if_true_label)
5265 emit_jump (if_true_label);
5266 break;
5268 case NOP_EXPR:
5269 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
5270 break;
5272 case TRUTH_NOT_EXPR:
5273 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
5274 break;
5276 case TRUTH_ANDIF_EXPR:
5277 if (if_false_label == 0)
5278 if_false_label = drop_through_label = gen_label_rtx ();
5279 do_jump (TREE_OPERAND (exp, 0), if_false_label, 0);
5280 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
5281 break;
5283 case TRUTH_ORIF_EXPR:
5284 if (if_true_label == 0)
5285 if_true_label = drop_through_label = gen_label_rtx ();
5286 do_jump (TREE_OPERAND (exp, 0), 0, if_true_label);
5287 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
5288 break;
5290 case COMPOUND_EXPR:
5291 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5292 emit_queue ();
5293 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
5294 break;
5296 case COND_EXPR:
5298 register rtx label1 = gen_label_rtx ();
5299 drop_through_label = gen_label_rtx ();
5300 do_jump (TREE_OPERAND (exp, 0), label1, 0);
5301 /* Now the THEN-expression. */
5302 do_jump (TREE_OPERAND (exp, 1),
5303 if_false_label ? if_false_label : drop_through_label,
5304 if_true_label ? if_true_label : drop_through_label);
5305 emit_label (label1);
5306 /* Now the ELSE-expression. */
5307 do_jump (TREE_OPERAND (exp, 2),
5308 if_false_label ? if_false_label : drop_through_label,
5309 if_true_label ? if_true_label : drop_through_label);
5311 break;
5313 case EQ_EXPR:
5314 comparison = compare (exp, EQ, EQ, EQ, EQ);
5315 break;
5317 case NE_EXPR:
5318 comparison = compare (exp, NE, NE, NE, NE);
5319 break;
5321 case LT_EXPR:
5322 comparison = compare (exp, LT, LTU, GT, GTU);
5323 break;
5325 case LE_EXPR:
5326 comparison = compare (exp, LE, LEU, GE, GEU);
5327 break;
5329 case GT_EXPR:
5330 comparison = compare (exp, GT, GTU, LT, LTU);
5331 break;
5333 case GE_EXPR:
5334 comparison = compare (exp, GE, GEU, LE, LEU);
5335 break;
5337 default:
5338 temp = expand_expr (exp, 0, VOIDmode, 0);
5339 /* Copy to register to avoid generating bad insns by cse
5340 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
5341 if (!cse_not_expected && GET_CODE (temp) == MEM)
5342 temp = copy_to_reg (temp);
5343 do_pending_stack_adjust ();
5345 rtx zero = CONST0_RTX (GET_MODE (temp));
5347 if (GET_CODE (temp) == CONST_INT)
5348 comparison = compare_constants (NE, 0,
5349 INTVAL (temp), 0, BITS_PER_WORD);
5350 else if (GET_MODE (temp) != VOIDmode)
5351 comparison = compare1 (temp, zero, NE, NE, 0, GET_MODE (temp));
5352 else
5353 abort ();
5357 /* Do any postincrements in the expression that was tested. */
5358 emit_queue ();
5360 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
5361 straight into a conditional jump instruction as the jump condition.
5362 Otherwise, all the work has been done already. */
5364 if (comparison == const1_rtx)
5366 if (if_true_label)
5367 emit_jump (if_true_label);
5369 else if (comparison == const0_rtx)
5371 if (if_false_label)
5372 emit_jump (if_false_label);
5374 else if (comparison)
5376 if (if_true_label)
5378 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
5379 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
5380 else
5381 abort ();
5383 if (if_false_label)
5384 emit_jump (if_false_label);
5386 else if (if_false_label)
5388 rtx pat;
5390 if (bcc_gen_fctn[(int) GET_CODE (comparison)] == 0)
5391 abort ();
5393 pat = (*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label);
5394 /* Now invert the sense of the jump by exchanging the two arms
5395 of each IF_THEN_ELSE. Note that inverting the condition
5396 would be incorrect for IEEE floating point with nans! */
5397 if (GET_CODE (pat) == SEQUENCE)
5399 int i;
5400 /* We can invert a sequence if the only jump is at the end. */
5401 for (i = 0; i < (int) (XVECLEN (pat, 0) - 1); i++)
5402 if (GET_CODE (XVECEXP (pat, 0, i)) == JUMP_INSN)
5403 abort ();
5404 invert_exp (PATTERN (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)),
5405 0, 0);
5407 else
5408 invert_exp (pat, 0, 0);
5410 emit_jump_insn (pat);
5414 if (drop_through_label)
5415 emit_label (drop_through_label);
5418 /* Compare two integer constant rtx's, OP0 and OP1.
5419 The comparison operation is OPERATION.
5420 Return an rtx representing the value 1 or 0.
5421 WIDTH is the width in bits that is significant. */
5423 static rtx
5424 compare_constants (operation, unsignedp, op0, op1, width)
5425 enum rtx_code operation;
5426 int unsignedp;
5427 int op0, op1;
5428 int width;
5430 int val;
5432 /* Sign-extend or zero-extend the operands to a full word
5433 from an initial width of WIDTH bits. */
5434 if (width < HOST_BITS_PER_INT)
5436 op0 &= (1 << width) - 1;
5437 op1 &= (1 << width) - 1;
5439 if (! unsignedp)
5441 if (op0 & (1 << (width - 1)))
5442 op0 |= ((-1) << width);
5443 if (op1 & (1 << (width - 1)))
5444 op1 |= ((-1) << width);
5448 switch (operation)
5450 case EQ:
5451 val = op0 == op1;
5452 break;
5454 case NE:
5455 val = op0 != op1;
5456 break;
5458 case GT:
5459 case GTU:
5460 val = op0 > op1;
5461 break;
5463 case LT:
5464 case LTU:
5465 val = op0 < op1;
5466 break;
5468 case GE:
5469 case GEU:
5470 val = op0 >= op1;
5471 break;
5473 case LE:
5474 case LEU:
5475 val = op0 <= op1;
5478 return val ? const1_rtx : const0_rtx;
5481 /* Generate code for a comparison expression EXP
5482 (including code to compute the values to be compared)
5483 and set (CC0) according to the result.
5484 SIGNED_FORWARD should be the rtx operation for this comparison for
5485 signed data; UNSIGNED_FORWARD, likewise for use if data is unsigned.
5486 SIGNED_REVERSE and UNSIGNED_REVERSE are used if it is desirable
5487 to interchange the operands for the compare instruction.
5489 We force a stack adjustment unless there are currently
5490 things pushed on the stack that aren't yet used. */
5492 static rtx
5493 compare (exp, signed_forward, unsigned_forward,
5494 signed_reverse, unsigned_reverse)
5495 register tree exp;
5496 enum rtx_code signed_forward, unsigned_forward;
5497 enum rtx_code signed_reverse, unsigned_reverse;
5500 register rtx op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5501 register rtx op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5502 register enum machine_mode mode = GET_MODE (op0);
5503 int unsignedp;
5505 /* If one operand is 0, make it the second one. */
5507 if (op0 == const0_rtx
5508 || (GET_MODE_CLASS (mode) == MODE_FLOAT && op0 == CONST0_RTX (mode)))
5510 rtx tem = op0;
5511 op0 = op1;
5512 op1 = tem;
5513 signed_forward = signed_reverse;
5514 unsigned_forward = unsigned_reverse;
5517 if (flag_force_mem)
5519 op0 = force_not_mem (op0);
5520 op1 = force_not_mem (op1);
5523 do_pending_stack_adjust ();
5525 unsignedp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
5526 || TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))));
5528 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
5529 return compare_constants (signed_forward, unsignedp,
5530 INTVAL (op0), INTVAL (op1),
5531 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
5533 emit_cmp_insn (op0, op1,
5534 (mode == BLKmode) ? expr_size (TREE_OPERAND (exp, 0)) : 0,
5535 unsignedp,
5536 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
5538 return gen_rtx ((unsignedp ? unsigned_forward : signed_forward),
5539 VOIDmode, cc0_rtx, const0_rtx);
5542 /* Like compare but expects the values to compare as two rtx's.
5543 The decision as to signed or unsigned comparison must be made by the caller.
5544 BLKmode is not allowed. */
5546 static rtx
5547 compare1 (op0, op1, forward_op, reverse_op, unsignedp, mode)
5548 register rtx op0, op1;
5549 enum rtx_code forward_op, reverse_op;
5550 int unsignedp;
5551 enum machine_mode mode;
5553 /* If one operand is 0, make it the second one. */
5555 if (op0 == const0_rtx
5556 || (GET_MODE_CLASS (mode) == MODE_FLOAT && op0 == CONST0_RTX (mode)))
5558 rtx tem = op0;
5559 op0 = op1;
5560 op1 = tem;
5561 forward_op = reverse_op;
5564 if (flag_force_mem)
5566 op0 = force_not_mem (op0);
5567 op1 = force_not_mem (op1);
5570 do_pending_stack_adjust ();
5572 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
5573 return compare_constants (forward_op, unsignedp,
5574 INTVAL (op0), INTVAL (op1),
5575 GET_MODE_BITSIZE (mode));
5577 emit_cmp_insn (op0, op1, 0, unsignedp, 0);
5579 return gen_rtx (forward_op, VOIDmode, cc0_rtx, const0_rtx);
5582 /* Generate code to calculate EXP using a store-flag instruction
5583 and return an rtx for the result.
5584 If TARGET is nonzero, store the result there if convenient.
5586 Return zero if there is no suitable set-flag instruction
5587 available on this machine. */
5589 static rtx
5590 do_store_flag (exp, target, mode)
5591 tree exp;
5592 rtx target;
5593 enum machine_mode mode;
5595 register enum tree_code code = TREE_CODE (exp);
5596 register rtx comparison = 0;
5597 enum machine_mode compare_mode;
5598 rtx prev_insn = get_last_insn ();
5599 enum insn_code icode;
5601 switch (code)
5603 #ifdef HAVE_seq
5604 case EQ_EXPR:
5605 if (HAVE_seq)
5607 comparison = compare (exp, EQ, EQ, EQ, EQ);
5608 icode = CODE_FOR_seq;
5609 compare_mode = insn_operand_mode[(int) CODE_FOR_seq][0];
5611 break;
5612 #endif
5614 #ifdef HAVE_sne
5615 case NE_EXPR:
5616 if (HAVE_sne)
5618 comparison = compare (exp, NE, NE, NE, NE);
5619 icode = CODE_FOR_sne;
5620 compare_mode = insn_operand_mode[(int) CODE_FOR_sne][0];
5622 break;
5623 #endif
5625 #if defined (HAVE_slt) && defined (HAVE_sltu) && defined (HAVE_sgt) && defined (HAVE_sgtu)
5626 case LT_EXPR:
5627 if (HAVE_slt && HAVE_sltu && HAVE_sgt && HAVE_sgtu)
5629 comparison = compare (exp, LT, LTU, GT, GTU);
5630 icode = CODE_FOR_slt;
5631 compare_mode = insn_operand_mode[(int) CODE_FOR_slt][0];
5633 break;
5635 case GT_EXPR:
5636 if (HAVE_slt && HAVE_sltu && HAVE_sgt && HAVE_sgtu)
5638 comparison = compare (exp, GT, GTU, LT, LTU);
5639 icode = CODE_FOR_slt;
5640 compare_mode = insn_operand_mode[(int) CODE_FOR_slt][0];
5642 break;
5643 #endif
5645 #if defined (HAVE_sle) && defined (HAVE_sleu) && defined (HAVE_sge) && defined (HAVE_sgeu)
5646 case LE_EXPR:
5647 if (HAVE_sle && HAVE_sleu && HAVE_sge && HAVE_sgeu)
5649 comparison = compare (exp, LE, LEU, GE, GEU);
5650 icode = CODE_FOR_sle;
5651 compare_mode = insn_operand_mode[(int) CODE_FOR_sle][0];
5653 break;
5655 case GE_EXPR:
5656 if (HAVE_sle && HAVE_sleu && HAVE_sge && HAVE_sgeu)
5658 comparison = compare (exp, GE, GEU, LE, LEU);
5659 icode = CODE_FOR_sle;
5660 compare_mode = insn_operand_mode[(int) CODE_FOR_sle][0];
5662 break;
5663 #endif
5665 if (comparison == 0)
5666 return 0;
5668 if (target == 0 || GET_MODE (target) != mode
5669 /* Don't use specified target unless the insn can handle it. */
5670 || ! (*insn_operand_predicate[(int) icode][0]) (target, mode)
5671 /* When modes don't match, don't use specified target,
5672 because it might be the same as an operand,
5673 and then the CLOBBER output below would screw up. */
5674 || (mode != compare_mode && GET_CODE (comparison) != CONST_INT))
5675 target = gen_reg_rtx (mode);
5677 /* Store the comparison in its proper mode. */
5678 if (GET_CODE (comparison) == CONST_INT)
5679 emit_move_insn (target, comparison);
5680 else if (GET_MODE (target) != compare_mode)
5682 /* We want a different mode: store result in its natural mode.
5683 Combine the mode conversion with the truncation we must do anyway. */
5684 /* Put a CLOBBER before the compare, so we don't come between
5685 the compare and the insn that uses the result. */
5686 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, target), prev_insn);
5687 emit_insn ((*setcc_gen_fctn[(int) GET_CODE (comparison)])
5688 (gen_rtx (SUBREG, compare_mode, target, 0)));
5689 /* If the desired mode is wider than what we got,
5690 use an AND to convert it, but not if we will do one anyway. */
5691 #if STORE_FLAG_VALUE == 1
5692 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (compare_mode))
5693 expand_bit_and (mode, target, const1_rtx, target);
5694 #endif
5696 else
5697 emit_insn ((*setcc_gen_fctn[(int) GET_CODE (comparison)]) (target));
5699 #if STORE_FLAG_VALUE != 1
5700 #if STORE_FLAG_VALUE & 1
5701 expand_bit_and (mode, target, const1_rtx, target);
5702 #else
5703 expand_shift (RSHIFT_EXPR, mode, target,
5704 build_int_2 (GET_MODE_BITSIZE (mode) - 1, 0),
5705 target, TRUE);
5706 #endif
5707 #endif
5708 return target;
5711 /* Generate a tablejump instruction (used for switch statements). */
5713 #ifdef HAVE_tablejump
5715 /* INDEX is the value being switched on, with the lowest value
5716 in the table already subtracted.
5717 RANGE is the length of the jump table.
5718 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
5720 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
5721 index value is out of range. */
5723 void
5724 do_tablejump (index, range, table_label, default_label)
5725 rtx index, range, table_label, default_label;
5727 register rtx temp;
5729 emit_cmp_insn (range, index, 0, 0, 0);
5730 emit_jump_insn (gen_bltu (default_label));
5731 /* If flag_force_addr were to affect this address
5732 it could interfere with the tricky assumptions made
5733 about addresses that contain label-refs,
5734 which may be valid only very near the tablejump itself. */
5735 index = memory_address_noforce
5736 (CASE_VECTOR_MODE,
5737 gen_rtx (PLUS, Pmode,
5738 gen_rtx (MULT, Pmode, index,
5739 gen_rtx (CONST_INT, VOIDmode,
5740 GET_MODE_SIZE (CASE_VECTOR_MODE))),
5741 gen_rtx (LABEL_REF, VOIDmode, table_label)));
5742 temp = gen_reg_rtx (CASE_VECTOR_MODE);
5743 convert_move (temp, gen_rtx (MEM, CASE_VECTOR_MODE, index), 0);
5745 emit_jump_insn (gen_tablejump (temp, table_label));
5748 #endif /* HAVE_tablejump */