No empty .Rs/.Re
[netbsd-mini2440.git] / gnu / usr.bin / g++ / cc1plus / integrate.c
blob9d99a911cbe3818ae1d23503742528a494c7906f
1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@mcc.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 1, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
22 #include <stdio.h>
24 #include "config.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "insn-flags.h"
29 #include "expr.h"
31 #include "obstack.h"
32 #define obstack_chunk_alloc xmalloc
33 #define obstack_chunk_free free
34 extern int xmalloc ();
35 extern void free ();
37 extern struct obstack permanent_obstack, maybepermanent_obstack;
38 extern struct obstack *rtl_obstack, *saveable_obstack, *current_obstack;
40 extern rtx stack_slot_list;
42 #define MIN(x,y) ((x < y) ? x : y)
44 extern tree pushdecl ();
45 extern tree poplevel ();
47 /* Default max number of insns a function can have and still be inline.
48 This is overridden on RISC machines. */
49 #ifndef INTEGRATE_THRESHOLD
50 #define INTEGRATE_THRESHOLD(DECL) \
51 (8 * (8 + list_length (DECL_ARGUMENTS (DECL)) + 16*TREE_INLINE (DECL)))
52 #endif
54 /* This is the target of the inline function being expanded,
55 or NULL if there is none. */
56 static rtx inline_target;
58 /* We must take special care not to disrupt life too severely
59 when performing procedure integration. One thing that that
60 involves is not creating illegitimate address which reload
61 cannot fix. Since we don't know what the frame pointer is
62 not capable of (in a machine independent way), we create
63 a pseudo-frame pointer which will have to do for now. */
64 static rtx before_inline_fp_rtx, inline_fp_rtx;
66 /* Convert old frame-pointer offsets to new. Parameters which only
67 produce values (no addresses, and are never assigned), map directly
68 to the pseudo-reg of the incoming value. Parameters that are
69 assigned to but do not have their address taken are given a fresh
70 pseudo-register. Parameters that have their address take are
71 given a fresh stack-slot. */
72 static rtx *parm_map;
74 /* ?? Should this be done here?? It is not right now.
75 Keep track of whether a given pseudo-register is the sum
76 of the frame pointer and a const_int (or zero). */
77 static char *fp_addr_p;
79 /* For the local variables of the procdure being integrated that live
80 on the frame, FRAME_POINTER_DELTA says how much to change their
81 offsets by, so that they now live in the correct place on the
82 frame of the function being compiled. */
83 static int fp_delta;
85 /* When an insn is being copied by copy_rtx_and_substitute,
86 this is nonzero if we have copied an ASM_OPERANDS.
87 In that case, it is the original input-operand vector.
88 Likewise in copy_for_inline. */
89 static rtvec orig_asm_operands_vector;
91 /* When an insn is being copied by copy_rtx_and_substitute,
92 this is nonzero if we have copied an ASM_OPERANDS.
93 In that case, it is the copied input-operand vector.
94 Likewise in copy_for_inline. */
95 static rtvec copy_asm_operands_vector;
97 /* Likewise, this is the copied constraints vector. */
98 static rtvec copy_asm_constraints_vector;
100 /* In save_for_inline, nonzero if past the parm-initialization insns. */
101 static int in_nonparm_insns;
103 /* Return a copy of an rtx (as needed), substituting pseudo-register,
104 labels, and frame-pointer offsets as necessary. */
105 static rtx copy_rtx_and_substitute ();
106 /* Variant, used for memory addresses that are not memory_address_p. */
107 static rtx copy_address ();
109 /* Return the rtx corresponding to a given index in the stack arguments. */
110 static rtx access_parm_map ();
112 static void copy_parm_decls ();
113 static void copy_decl_tree ();
115 static rtx try_fold_cc0 ();
117 /* We do some simple constant folding optimization. This optimization
118 really exists primarily to save time inlining a function. It
119 also helps users who ask for inline functions without -O. */
120 static rtx fold_out_const_cc0 ();
122 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
123 is safe and reasonable to integrate into other functions.
124 Nonzero means value is a warning message with a single %s
125 for the function's name. */
127 char *
128 function_cannot_inline_p (fndecl)
129 register tree fndecl;
131 register rtx insn;
132 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
133 int max_insns = INTEGRATE_THRESHOLD (fndecl);
134 register int ninsns = 0;
135 register tree parms;
137 /* No inlines with varargs. `grokdeclarator' gives a warning
138 message about that if `inline' is specified. This code
139 it put in to catch the volunteers. */
140 if (last && TREE_VALUE (last) != void_type_node)
141 return "varargs function cannot be inline";
143 /* If its not even close, don't even look. */
144 if (get_max_uid () > 4 * max_insns)
145 return "function too large to be inline";
147 /* Don't inline functions with large stack usage,
148 since they can make other recursive functions burn up stack. */
149 if (!TREE_INLINE (fndecl) && get_frame_size () > 100)
150 return "function stack frame for inlining";
152 /* We can't inline functions that return structures
153 the old-fashioned PCC way, copying into a static block. */
154 #ifdef PCC_STATIC_STRUCT_RETURN
155 if (flag_pcc_struct_return
156 && (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
157 || RETURN_IN_MEMORY (TREE_TYPE (TREE_TYPE (fndecl)))))
158 return "inline functions not supported for this return value type";
159 #endif
161 /* Don't inline functions which have BLKmode arguments.
162 Don't inline functions that take the address of
163 a parameter and do not specify a function prototype. */
164 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
166 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
168 #if 0
169 return "function with large aggregate parameter cannot be inline";
170 #else
171 TREE_ADDRESSABLE (parms) = 1;
172 #endif
174 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
175 return "no prototype, and parameter address used; cannot be inline";
176 #if 0
177 /* If an aggregate is thought of as "in memory"
178 then its components are referred to by narrower memory refs.
179 If the actual parameter is a reg, these refs can't be translated,
180 esp. since copy_rtx_and_substitute doesn't know whether it is
181 reading or writing. */
182 if ((TREE_CODE (TREE_TYPE (parms)) == RECORD_TYPE
183 || TREE_CODE (TREE_TYPE (parms)) == UNION_TYPE)
184 && GET_CODE (DECL_RTL (parms)) == MEM)
185 return "address of an aggregate parameter is used; cannot be inline";
186 #endif
189 if (get_max_uid () > max_insns)
191 for (ninsns = 0, insn = get_first_nonparm_insn (); insn && ninsns < max_insns;
192 insn = NEXT_INSN (insn))
194 if (GET_CODE (insn) == INSN
195 || GET_CODE (insn) == JUMP_INSN
196 || GET_CODE (insn) == CALL_INSN)
197 ninsns++;
200 if (ninsns >= max_insns)
201 return "function too large to be inline";
204 return 0;
207 /* Variables used within save_for_inline. */
209 /* Mapping from old pesudo-register to new pseudo-registers.
210 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
211 It is allocated in `save_for_inline' and `expand_inline_function',
212 and deallocated on exit from each of those routines. */
213 static rtx *reg_map;
215 /* Mapping from old code-labels to new code-labels.
216 The first element of this map is label_map[min_labelno].
217 It is allocated in `save_for_inline' and `expand_inline_function',
218 and deallocated on exit from each of those routines. */
219 static rtx *label_map;
221 /* Mapping from old insn uid's to copied insns.
222 It is allocated in `save_for_inline' and `expand_inline_function',
223 and deallocated on exit from each of those routines. */
224 static rtx *insn_map;
226 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
227 Zero for a reg that isn't a parm's home.
228 Only reg numbers less than max_parm_reg are mapped here. */
229 static tree *parmdecl_map;
231 /* Map pseudo reg number to equivalent constant. We cannot in general
232 substitute constants into parameter pseudo registers, since a
233 machine descriptions (the Sparc md, maybe others) won't always handle
234 the resulting insns. So if an incoming parameter has a constant
235 equivalent, we record it here, and if the resulting insn is
236 recognizable, we go with it. */
237 static rtx *const_equiv_map;
239 /* Nonzero if we should try using a constant equivalent.
240 Set to zero if constant equivalent resulted in insn which could
241 not be recognized. */
242 static int try_use_const;
244 /* Use "iteration numbering" to speedily pull constant values
245 from registers when testing conditionals. */
246 static unsigned int *const_age_map, const_age;
248 /* Cleared before attempting to inline any functions.
249 Set when const equiv is used. Used to test whether insn
250 is safe for md or not. */
251 static int used_const_equiv;
253 /* Keep track of first pseudo-register beyond those that are parms. */
254 static int max_parm_reg;
256 /* Offset from arg ptr to the first parm of this inline function. */
257 static int first_parm_offset;
259 /* On machines that perform a function return with a single
260 instruction, such as the VAX, these return insns must be
261 mapped into branch statements. */
262 extern rtx return_label;
264 /* Save any constant pool constants in an insn. */
265 static void save_constants ();
267 /* Note when parameter registers are the destination of a SET. */
268 static void note_modified_parmregs ();
270 /* Copy an rtx for save_for_inline. */
271 static rtx copy_for_inline ();
273 /* Make the insns and PARM_DECLs of the current function permanent
274 and record other information in DECL_SAVED_INSNS to allow inlining
275 of this function in subsequent calls. */
277 void
278 save_for_inline (fndecl)
279 tree fndecl;
281 extern rtx *regno_reg_rtx; /* in emit-rtl.c. */
282 extern current_function_args_size;
284 rtx first_insn, last_insn, insn;
285 rtx head, copy;
286 tree parms;
287 int max_labelno, min_labelno, i, len;
288 int max_reg;
289 int max_uid;
290 rtx first_nonparm_insn;
292 /* Make and emit a return-label if we have not already done so. */
294 if (return_label == 0)
296 return_label = gen_label_rtx ();
297 emit_label (return_label);
300 /* Get some bounds on the labels and registers used. */
302 max_labelno = max_label_num ();
303 min_labelno = get_first_label_num ();
304 max_parm_reg = max_parm_reg_num ();
305 max_reg = max_reg_num ();
307 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
309 Set TREE_VOLATILE to 0 if the parm is in a register, otherwise 1.
310 Later we set TREE_READONLY to 0 if the parm is modified inside the fn. */
312 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
313 bzero (parmdecl_map, max_parm_reg * sizeof (tree));
315 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
317 rtx p = DECL_RTL (parms);
319 if (GET_CODE (p) == REG)
321 parmdecl_map[REGNO (p)] = parms;
322 TREE_VOLATILE (parms) = 0;
324 else
325 TREE_VOLATILE (parms) = 1;
326 TREE_READONLY (parms) = 1;
329 /* Replace any constant pool references with the actual constant. We will
330 put the constant back in the copy made below. */
331 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
332 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
333 || GET_CODE (insn) == CALL_INSN)
334 save_constants (PATTERN (insn));
336 /* The list of DECL_SAVED_INSNS, starts off with a header which
337 contains the following information:
339 the first insn of the function (not including the insns that copy
340 parameters into registers).
341 the first parameter insn of the function,
342 the first label used by that function,
343 the last label used by that function,
344 the highest register number used for parameters,
345 the total number of registers used,
346 the stack slot list,
347 @@ not yet: and some flags that are used to restore compiler globals. */
349 head = gen_inline_header_rtx (NULL, NULL, min_labelno, max_labelno,
350 max_parm_reg, max_reg,
351 current_function_args_size, stack_slot_list);
352 max_uid = INSN_UID (head);
354 /* We have now allocated all that needs to be allocated permanently
355 on the rtx obstack. Set our high-water mark, so that we
356 can free the rest of this when the time comes. */
358 preserve_data ();
360 /* Copy the chain insns of this function.
361 Install the copied chain as the insns of this function,
362 for continued compilation;
363 the original chain is recorded as the DECL_SAVED_INSNS
364 for inlining future calls. */
366 /* If there are insns that copy parms from the stack into pseudo registers,
367 those insns are not copied. `expand_inline_function' must
368 emit the correct code to handle such things. */
370 insn = get_insns ();
371 if (GET_CODE (insn) != NOTE)
372 abort ();
373 first_insn = rtx_alloc (NOTE);
374 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
375 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
376 INSN_UID (first_insn) = INSN_UID (insn);
377 PREV_INSN (first_insn) = NULL;
378 NEXT_INSN (first_insn) = NULL;
379 last_insn = first_insn;
381 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
382 Make these new rtx's now, and install them in regno_reg_rtx, so they
383 will be the official pseudo-reg rtx's for the rest of compilation. */
385 reg_map = (rtx *) alloca ((max_reg + 1) * sizeof (rtx));
387 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
388 for (i = max_reg - 1; i >= FIRST_PSEUDO_REGISTER; i--)
389 reg_map[i] = (rtx)obstack_copy (&maybepermanent_obstack, regno_reg_rtx[i], len);
390 bcopy (reg_map + FIRST_PSEUDO_REGISTER,
391 regno_reg_rtx + FIRST_PSEUDO_REGISTER,
392 (max_reg - FIRST_PSEUDO_REGISTER) * sizeof (rtx));
394 /* Likewise each label rtx must have a unique rtx as its copy. */
396 label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
397 label_map -= min_labelno;
399 for (i = min_labelno; i < max_labelno; i++)
400 label_map[i] = gen_label_rtx ();
402 /* Record the mapping of old insns to copied insns. */
404 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
405 bzero (insn_map, max_uid * sizeof (rtx));
407 in_nonparm_insns = 0;
408 first_nonparm_insn = get_first_nonparm_insn ();
410 /* Now copy the chain of insns. */
412 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
414 orig_asm_operands_vector = 0;
415 copy_asm_operands_vector = 0;
417 if (insn == first_nonparm_insn)
418 in_nonparm_insns = 1;
420 switch (GET_CODE (insn))
422 case NOTE:
423 /* No need to keep these. */
424 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
425 continue;
427 copy = rtx_alloc (NOTE);
428 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
429 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
430 break;
432 case INSN:
433 case CALL_INSN:
434 case JUMP_INSN:
435 copy = rtx_alloc (GET_CODE (insn));
436 PATTERN (copy) = copy_for_inline (PATTERN (insn));
437 INSN_CODE (copy) = -1;
438 LOG_LINKS (copy) = NULL;
439 REG_NOTES (copy) = copy_for_inline (REG_NOTES (insn));
440 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
441 break;
443 case CODE_LABEL:
444 copy = label_map[CODE_LABEL_NUMBER (insn)];
445 break;
447 case BARRIER:
448 copy = rtx_alloc (BARRIER);
449 break;
451 default:
452 abort ();
454 INSN_UID (copy) = INSN_UID (insn);
455 insn_map[INSN_UID (insn)] = copy;
456 NEXT_INSN (last_insn) = copy;
457 PREV_INSN (copy) = last_insn;
458 last_insn = copy;
461 NEXT_INSN (last_insn) = NULL;
463 NEXT_INSN (head) = get_first_nonparm_insn ();
464 FIRST_PARM_INSN (head) = get_insns ();
465 DECL_SAVED_INSNS (fndecl) = head;
466 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
467 TREE_INLINE (fndecl) = 1;
469 parmdecl_map = 0;
470 label_map = 0;
471 reg_map = 0;
472 return_label = 0;
474 set_new_first_and_last_insn (first_insn, last_insn);
476 /* The following code does not need preprocessing in the assembler. */
478 app_disable ();
480 output_constant_pool (XSTR (XEXP (DECL_RTL (fndecl), 0), 0), fndecl);
483 /* Make the insns and PARM_DECLs of the current function permanent
484 and record other information in DECL_SAVED_INSNS to allow inlining
485 of this function in subsequent calls.
487 Other version. */
489 void
490 save_for_outline (fndecl)
491 tree fndecl;
493 extern rtx *regno_reg_rtx; /* in emit-rtl.c. */
494 extern current_function_args_size;
496 rtx first_insn, last_insn, insn;
497 rtx head, copy;
498 tree parms;
499 int max_labelno, min_labelno, i, len;
500 int max_reg;
501 int max_uid;
502 rtx first_nonparm_insn;
504 /* Make and emit a return-label if we have not already done so. */
506 if (return_label == 0)
508 return_label = gen_label_rtx ();
509 emit_label (return_label);
512 /* Get some bounds on the labels and registers used. */
514 max_labelno = max_label_num ();
515 min_labelno = get_first_label_num ();
516 max_parm_reg = max_parm_reg_num ();
517 max_reg = max_reg_num ();
519 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
521 Set TREE_VOLATILE to 0 if the parm is in a register, otherwise 1.
522 Later we set TREE_READONLY to 0 if the parm is modified inside the fn. */
524 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
525 bzero (parmdecl_map, max_parm_reg * sizeof (tree));
527 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
529 rtx p = DECL_RTL (parms);
531 if (GET_CODE (p) == REG)
533 parmdecl_map[REGNO (p)] = parms;
534 TREE_VOLATILE (parms) = 0;
536 else
537 TREE_VOLATILE (parms) = 1;
538 TREE_READONLY (parms) = 1;
541 /* The list of DECL_SAVED_INSNS, starts off with a header which
542 contains the following information:
544 the first insn of the function (not including the insns that copy
545 parameters into registers).
546 the first parameter insn of the function,
547 the first label used by that function,
548 the last label used by that function,
549 the highest register number used for parameters,
550 the total number of registers used,
551 the stack slot list,
552 @@ not yet: and some flags that are used to restore compiler globals. */
554 head = gen_inline_header_rtx (NULL, NULL, min_labelno, max_labelno,
555 max_parm_reg, max_reg,
556 current_function_args_size, stack_slot_list);
557 /* We have now allocated all that needs to be allocated permanently
558 on the rtx obstack. Set our high-water mark, so that we
559 can free the rest of this when the time comes. */
561 preserve_data ();
563 /* Copy the chain insns of this function.
564 Install the copied chain as the insns of this function,
565 for continued compilation;
566 the original chain is recorded as the DECL_SAVED_INSNS
567 for inlining future calls. */
569 /* If there are insns that copy parms from the stack into pseudo registers,
570 those insns are not copied. `expand_inline_function' must
571 emit the correct code to handle such things. */
573 insn = get_insns ();
574 if (GET_CODE (insn) != NOTE)
575 abort ();
576 first_insn = rtx_alloc (NOTE);
577 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
578 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
579 INSN_UID (first_insn) = INSN_UID (insn);
580 PREV_INSN (first_insn) = NULL;
581 NEXT_INSN (first_insn) = NULL;
582 last_insn = first_insn;
584 in_nonparm_insns = 0;
585 first_nonparm_insn = get_first_nonparm_insn ();
587 /* Now copy the chain of insns. */
589 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
591 orig_asm_operands_vector = 0;
592 copy_asm_operands_vector = 0;
594 if (insn == first_nonparm_insn)
595 in_nonparm_insns = 1;
597 switch (GET_CODE (insn))
599 case INSN:
600 case CALL_INSN:
601 case JUMP_INSN:
602 note_modified_parmregs (PATTERN (insn));
603 break;
605 case NOTE:
606 case CODE_LABEL:
607 case BARRIER:
608 break;
610 default:
611 abort ();
613 last_insn = insn;
616 NEXT_INSN (head) = get_first_nonparm_insn ();
617 FIRST_PARM_INSN (head) = get_insns ();
618 DECL_SAVED_INSNS (fndecl) = head;
619 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
620 TREE_INLINE (fndecl) = 1;
622 /* Have to output these, since other functions may refer to them. */
624 /* The following code does not need preprocessing in the assembler. */
626 app_disable ();
628 output_constant_pool (XSTR (XEXP (DECL_RTL (fndecl), 0), 0), fndecl);
631 /* References to the constant pool are replaced by the actual constant
632 encapsulated with a CONST giving the mode and with RTX_INTEGRATED_P set.
634 *** Note that the above only works if the address was not manipulated.
635 If the address was not valid and had to be loaded into a register,
636 we lose track of the fact that it was in the constant pool, which will
637 result in either an abort or generating a reference to an undefined
638 label in the assembler code. No current machine will run into this, but
639 this should probably be fixed someday. */
641 static void
642 save_constants (x)
643 rtx x;
645 int i, j;
646 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
648 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
650 switch (*fmt++)
652 case 'E':
653 for (j = 0; j < XVECLEN (x, i); j++)
654 if (GET_CODE (XVECEXP (x, i, j)) == MEM
655 && GET_CODE (XEXP (XVECEXP (x, i, j), 0)) == SYMBOL_REF
656 && CONSTANT_POOL_ADDRESS_P (XEXP (XVECEXP (x, i, j), 0)))
658 XVECEXP (x, i, j) =
659 gen_rtx (CONST, get_pool_mode (XEXP (XVECEXP (x, i, j), 0)),
660 get_pool_constant (XEXP (XVECEXP (x, i, j), 0)));
661 RTX_INTEGRATED_P (XVECEXP (x, i, j)) = 1;
663 else
664 save_constants (XVECEXP (x, i, j));
665 break;
667 case 'e':
668 if (GET_CODE (XEXP (x, i)) == MEM
669 && GET_CODE (XEXP (XEXP (x, i), 0)) == SYMBOL_REF
670 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (x, i), 0)))
672 XEXP (x, i) = gen_rtx (CONST,
673 get_pool_mode (XEXP (XEXP (x, i), 0)),
674 get_pool_constant (XEXP (XEXP (x, i), 0)));
675 RTX_INTEGRATED_P (XEXP (x, i)) = 1;
677 else
678 save_constants (XEXP (x, i));
679 break;
684 /* Note (recursively) whether a parameter is modified or not. */
686 static void
687 note_modified_parmregs (orig)
688 rtx orig;
690 register rtx x = orig;
691 register int i, len;
692 register enum rtx_code code;
693 register char *format_ptr;
695 again:
697 if (x == 0)
698 return;
700 code = GET_CODE (x);
702 /* These types may be freely shared. */
704 if (code == SET && in_nonparm_insns)
706 rtx dest = SET_DEST (x);
708 if (GET_CODE (dest) == REG
709 && REGNO (dest) < max_parm_reg
710 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
711 && parmdecl_map[REGNO (dest)] != 0)
712 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
713 return;
716 /* Now scan the subexpressions recursively. */
718 format_ptr = GET_RTX_FORMAT (code);
719 len = GET_RTX_LENGTH (code);
721 if (len > 2)
722 for (i = len-1; i > 1; i--)
723 if (format_ptr[i] == 'e')
724 note_modified_parmregs (XEXP (x, 2));
725 else if (format_ptr[i] == 'E')
726 goto hard;
728 if (len > 1)
729 if (format_ptr[1] == 'e')
730 note_modified_parmregs (XEXP (x, 1));
731 else if (format_ptr[i=1] == 'E')
732 goto hard;
734 if (len > 0)
735 if (format_ptr[0] == 'e')
737 x = XEXP (x, 0);
738 goto again;
740 else if (format_ptr[i=0] == 'E')
741 goto hard;
743 return;
745 i = len-1;
747 hard:
749 while (i >= 0)
751 switch (format_ptr[i])
753 case 'e':
754 if (i == 0)
756 x = XEXP (x, 0);
757 goto again;
759 note_modified_parmregs (XEXP (x, i));
760 break;
762 case 'E':
763 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
765 register int j;
767 for (j = 0; j < XVECLEN (x, i); j++)
768 note_modified_parmregs (XVECEXP (x, i, j));
770 break;
772 i -= 1;
776 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
777 according to `reg_map' and `label_map'.
779 If we find a saved constant pool entry, replace it with the constant.
780 Since the pool wasn't touched, this should simply restore the old
781 address.
783 All other kinds of rtx are copied except those that can never be
784 changed during compilation. */
786 static rtx
787 copy_for_inline (orig)
788 rtx orig;
790 register rtx x = orig;
791 register int i;
792 register enum rtx_code code;
793 register char *format_ptr;
795 if (x == 0)
796 return x;
798 code = GET_CODE (x);
800 /* These types may be freely shared. */
802 switch (code)
804 case QUEUED:
805 case CONST_INT:
806 case CONST_DOUBLE:
807 case SYMBOL_REF:
808 case PC:
809 case CC0:
810 return x;
812 case CONST:
813 /* Get constant pool entry for constant in the pool. */
814 if (RTX_INTEGRATED_P (x))
815 return force_const_mem (GET_MODE (x), XEXP (x, 0));
816 break;
818 case ASM_OPERANDS:
819 /* If a single asm insn contains multiple output operands
820 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
821 We must make sure that the copied insn continues to share it. */
822 if (orig_asm_operands_vector == XVEC (orig, 3))
824 x = rtx_alloc (ASM_OPERANDS);
825 XSTR (x, 0) = XSTR (orig, 0);
826 XSTR (x, 1) = XSTR (orig, 1);
827 XINT (x, 2) = XINT (orig, 2);
828 XVEC (x, 3) = copy_asm_operands_vector;
829 XVEC (x, 4) = copy_asm_constraints_vector;
830 XSTR (x, 5) = XSTR (orig, 5);
831 XINT (x, 6) = XINT (orig, 6);
832 return x;
834 break;
836 case MEM:
837 /* A MEM is allowed to be shared if its address is constant
838 or is a constant plus one of the special registers. */
839 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
840 return x;
841 #if 0 /* This is turned off because it is possible for
842 unshare_all_rtl to copy the address, into memory that won't be saved.
843 Although the MEM can safely be shared, and won't be copied there,
844 the address itself cannot be shared, and may need to be copied. */
845 if (GET_CODE (XEXP (x, 0)) == PLUS
846 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
847 && (REGNO (XEXP (XEXP (x, 0), 0)) == FRAME_POINTER_REGNUM
848 || REGNO (XEXP (XEXP (x, 0), 0)) == ARG_POINTER_REGNUM)
849 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
850 #if 0
851 /* This statement was accidentally deleted in the remote past.
852 Reinsert it for 1.37. Don't take the risk now. */
853 return x;
854 #endif
855 if (GET_CODE (XEXP (x, 0)) == REG
856 && (REGNO (XEXP (x, 0)) == FRAME_POINTER_REGNUM
857 || REGNO (XEXP (x, 0)) == ARG_POINTER_REGNUM)
858 && CONSTANT_ADDRESS_P (XEXP (x, 1)))
859 return x;
860 #endif /* 0 */
861 break;
863 case LABEL_REF:
865 /* Must point to the new insn. */
866 return gen_rtx (LABEL_REF, GET_MODE (orig),
867 label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
870 case REG:
871 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
872 return reg_map [REGNO (x)];
873 else
874 return x;
876 /* If a parm that gets modified lives in a pseudo-reg,
877 set its TREE_VOLATILE to prevent certain optimizations. */
878 case SET:
879 if (in_nonparm_insns)
881 rtx dest = SET_DEST (x);
883 if (GET_CODE (dest) == REG
884 && REGNO (dest) < max_parm_reg
885 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
886 && parmdecl_map[REGNO (dest)] != 0)
887 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
889 /* The insn to load an arg pseudo from a stack slot
890 does not count as modifying it. */
891 break;
893 /* Arrange that CONST_INTs always appear as the second operand
894 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
895 always appear as the first. */
896 case PLUS:
897 if (GET_CODE (XEXP (x, 0)) == CONST_INT
898 || (XEXP (x, 1) == frame_pointer_rtx
899 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
900 && XEXP (x, 1) == arg_pointer_rtx)))
902 rtx t = XEXP (x, 0);
903 XEXP (x, 0) = XEXP (x, 1);
904 XEXP (x, 1) = t;
906 break;
909 /* Replace this rtx with a copy of itself. */
911 x = rtx_alloc (code);
912 bcopy (orig, x, (sizeof (*x) - sizeof (x->fld)
913 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
915 /* Now scan the subexpressions recursively.
916 We can store any replaced subexpressions directly into X
917 since we know X is not shared! Any vectors in X
918 must be copied if X was copied. */
920 format_ptr = GET_RTX_FORMAT (code);
922 for (i = 0; i < GET_RTX_LENGTH (code); i++)
924 switch (*format_ptr++)
926 case 'e':
927 XEXP (x, i) = copy_for_inline (XEXP (x, i));
928 break;
930 case 'u':
931 /* Change any references to old-insns to point to the
932 corresponding copied insns. */
933 return insn_map[INSN_UID (XEXP (x, i))];
935 case 'E':
936 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
938 register int j;
940 XVEC (x, i) = gen_rtvec_v (XVECLEN (x, i), &XVECEXP (x, i, 0));
941 for (j = 0; j < XVECLEN (x, i); j++)
942 XVECEXP (x, i, j)
943 = copy_for_inline (XVECEXP (x, i, j));
945 break;
949 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
951 orig_asm_operands_vector = XVEC (orig, 3);
952 copy_asm_operands_vector = XVEC (x, 3);
953 copy_asm_constraints_vector = XVEC (x, 4);
956 return x;
959 /* Helper function to deal with using constants for kinds of INSNs.
960 Return zero if trouble arose by using constants.
961 Return one if not. Caller must know what to do in either case. */
962 static int
963 note_integrated (copy)
964 rtx copy;
966 if (used_const_equiv)
968 used_const_equiv = 0;
969 if (recog (PATTERN (copy), copy) < 0)
971 int old_optimize = optimize;
972 optimize = 1;
973 delete_insn (copy);
974 optimize = old_optimize;
975 try_use_const = 0;
976 return 0;
979 try_use_const = 1;
980 RTX_INTEGRATED_P (copy) = 1;
981 return 1;
985 /* Non-zero if we are trying to reduce the amount of debug information output */
986 extern int flag_inline_debug;
988 /* Integrate the procedure defined by FNDECL. Note that this function
989 may wind up calling itself. Since the static variables are not
990 reentrant, we do not assign them until after the possibility
991 or recursion is eliminated.
993 If IGNORE is nonzero, do not produce a value.
994 Otherwise store the value in TARGET if it is nonzero and that is convenient.
996 Value is:
997 (rtx)-1 if we could not substitute the function
998 0 if we substituted it and it does not produce a value
999 else an rtx for where the value is stored. */
1002 expand_inline_function (fndecl, parms, target, ignore, type, structure_value_addr)
1003 tree fndecl, parms;
1004 rtx target;
1005 int ignore;
1006 tree type;
1007 rtx structure_value_addr;
1009 extern int lineno;
1010 tree formal, actual;
1011 rtx header = DECL_SAVED_INSNS (fndecl);
1012 rtx insns = FIRST_FUNCTION_INSN (header);
1013 rtx insn;
1014 int max_regno = MAX_REGNUM (header) + 1;
1015 register int i;
1016 int min_labelno = FIRST_LABELNO (header);
1017 int max_labelno = LAST_LABELNO (header);
1018 int nargs;
1019 rtx *arg_vec;
1020 rtx local_return_label = 0;
1021 rtx follows_call = 0;
1022 rtx this_struct_value_rtx = 0;
1024 /* Hack around non-reentrancy of static variables. */
1025 rtx *old_const_equiv_map = const_equiv_map;
1026 unsigned *old_const_age_map = const_age_map;
1027 unsigned old_const_age = const_age;
1029 /* If we need INLINE_FP_RTX, set it up immediately
1030 following this insn. */
1032 if (max_regno < FIRST_PSEUDO_REGISTER)
1033 abort ();
1035 nargs = list_length (DECL_ARGUMENTS (fndecl));
1037 /* We expect PARMS to have the right length; don't crash if not. */
1038 if (list_length (parms) != nargs)
1039 return (rtx)-1;
1041 /* Also check that the parms type match. Since the appropriate
1042 conversions or default promotions have already been applied,
1043 the machine modes should match exactly. */
1044 for (formal = DECL_ARGUMENTS (fndecl),
1045 actual = parms;
1046 formal;
1047 formal = TREE_CHAIN (formal),
1048 actual = TREE_CHAIN (actual))
1050 tree arg = TREE_VALUE (actual);
1051 enum machine_mode mode = TYPE_MODE (DECL_ARG_TYPE (formal));
1052 if (mode != TYPE_MODE (TREE_TYPE (arg)))
1053 return (rtx)-1;
1054 /* If they are block mode, the types should match exactly.
1055 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1056 which could happen if the parameter has incomplete type. */
1057 if (mode == BLKmode && TREE_TYPE (arg) != TREE_TYPE (formal))
1058 return (rtx)-1;
1061 const_equiv_map = (rtx *)alloca (max_regno * sizeof (rtx));
1062 bzero (const_equiv_map, max_regno * sizeof (rtx));
1063 const_age_map = (unsigned *)alloca (max_regno * sizeof (unsigned));
1064 bzero (const_age_map, max_regno * sizeof (unsigned));
1065 try_use_const = 1;
1066 /* Trick: set to large number so that variables set in first
1067 basic block keep their values. After first label is seen,
1068 we wrap. */
1069 const_age = (unsigned)-1;
1071 /* Make a binding contour to keep inline cleanups called at
1072 outer function-scope level from looking like they are shadowing
1073 parameter declarations. */
1074 pushlevel (0);
1076 /* Make a fresh binding contour that we can easily remove. */
1077 pushlevel (0);
1078 expand_start_bindings (0);
1080 /* Get all the actual args as RTL, and store them in ARG_VEC. */
1082 arg_vec = (rtx *)alloca (nargs * sizeof (rtx));
1084 if (flag_inline_debug)
1085 /* Say where this function starts. */
1086 emit_note (DECL_SOURCE_FILE (fndecl), DECL_SOURCE_LINE (fndecl));
1088 for (formal = DECL_ARGUMENTS (fndecl),
1089 actual = parms,
1090 i = 0;
1091 formal;
1092 formal = TREE_CHAIN (formal),
1093 actual = TREE_CHAIN (actual),
1094 i++)
1096 /* Actual parameter, already converted to DECL_ARG_TYPE (formal). */
1097 tree arg = TREE_VALUE (actual);
1098 /* Mode of the value supplied. */
1099 enum machine_mode tmode = TYPE_MODE (DECL_ARG_TYPE (formal));
1100 /* Mode of the variable used within the function. */
1101 enum machine_mode imode = TYPE_MODE (TREE_TYPE (formal));
1102 rtx copy;
1104 #if 0
1105 /* PARM_DECL nodes no longer have this. */
1106 emit_note (DECL_SOURCE_FILE (formal), DECL_SOURCE_LINE (formal));
1107 #endif
1109 /* Make a place to hold the argument value, still in mode TMODE,
1110 and put it in COPY. */
1111 if (TREE_ADDRESSABLE (formal))
1113 int size = int_size_in_bytes (DECL_ARG_TYPE (formal));
1114 copy = assign_stack_local (tmode, size);
1115 if (!memory_address_p (DECL_MODE (formal), XEXP (copy, 0)))
1116 copy = change_address (copy, VOIDmode, copy_rtx (XEXP (copy, 0)));
1117 store_expr (arg, copy, 0);
1119 else if (! TREE_READONLY (formal)
1120 || TREE_VOLATILE (formal))
1122 /* If parm is modified or if it hasn't a pseudo reg,
1123 we may not simply substitute the actual value;
1124 copy it through a register. */
1125 copy = gen_reg_rtx (tmode);
1126 store_expr (arg, copy, 0);
1128 else
1130 copy = expand_expr (arg, 0, tmode, 0);
1132 /* We do not use CONSTANT_ADDRESS_P here because
1133 the set of cases where that might make a difference
1134 are a subset of the cases that arise even when
1135 it is a CONSTANT_ADDRESS_P (i.e., fp_delta
1136 gets into the act. */
1137 if (GET_CODE (copy) != REG)
1139 #if 0
1140 if (! CONSTANT_P (copy))
1141 copy = copy_to_reg (copy);
1142 else if (! optimize)
1143 copy = copy_to_mode_reg (imode, copy);
1144 #else
1145 /* Sigh. */
1146 if (! CONSTANT_P (copy))
1147 copy = copy_to_reg (copy);
1148 else
1150 if (GET_CODE (DECL_RTL (formal)) == REG)
1152 int regno = REGNO (DECL_RTL (formal));
1153 const_equiv_map[regno] = copy;
1154 const_age_map[regno] = (unsigned)-2;
1156 copy = copy_to_mode_reg (imode, copy);
1158 #endif
1161 /* If passed mode != nominal mode, COPY is now the passed mode.
1162 Convert it to the nominal mode (i.e. truncate it). */
1163 if (tmode != imode)
1164 copy = convert_to_mode (imode, copy, 0);
1165 arg_vec[i] = copy;
1168 copy_parm_decls (DECL_ARGUMENTS (fndecl), arg_vec);
1170 /* Perform postincrements before actually calling the function. */
1171 emit_queue ();
1173 /* clean up stack so that variables might have smaller offsets. */
1174 do_pending_stack_adjust ();
1176 /* Pass the function the address in which to return a structure value.
1177 Note that a constructor can cause someone to call us with
1178 STRUCTURE_VALUE_ADDR, but the initialization takes place
1179 via the first parameter, rather than the struct return address. */
1180 if (structure_value_addr && aggregate_value_p (DECL_RESULT (fndecl)))
1182 if (GET_CODE (structure_value_addr) == REG
1183 && (struct_value_rtx == 0 || GET_CODE (struct_value_rtx) == MEM))
1184 this_struct_value_rtx = structure_value_addr;
1185 else
1186 this_struct_value_rtx = copy_to_mode_reg (Pmode, structure_value_addr);
1189 /* Now prepare for copying the insns.
1190 Set up reg_map, parm_map and label_map saying how to translate
1191 the pseudo-registers, stack-parm references and labels when copying. */
1193 reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1194 bzero (reg_map, max_regno * sizeof (rtx));
1196 parm_map = (rtx *)alloca ((FUNCTION_ARGS_SIZE (header) + UNITS_PER_WORD - 1)
1197 / UNITS_PER_WORD * sizeof (rtx));
1198 bzero (parm_map, ((FUNCTION_ARGS_SIZE (header) + UNITS_PER_WORD - 1)
1199 / UNITS_PER_WORD * sizeof (rtx)));
1201 /* Note that expand_expr (called above) can clobber first_parm_offset. */
1202 first_parm_offset = FIRST_PARM_OFFSET (fndecl);
1203 parm_map -= first_parm_offset / UNITS_PER_WORD;
1205 if (DECL_ARGUMENTS (fndecl))
1207 tree decl = DECL_ARGUMENTS (fndecl);
1209 for (formal = decl, i = 0; formal; formal = TREE_CHAIN (formal), i++)
1211 /* Create an entry in PARM_MAP that says what pseudo register
1212 is associated with an address we might compute. */
1213 if (DECL_OFFSET (formal) >= 0)
1215 /* This parameter has a home in the stack. */
1216 parm_map[DECL_OFFSET (formal) / BITS_PER_WORD] = arg_vec[i];
1218 else
1220 /* Parameter that was passed in a register;
1221 does it have a home on the stack (as a local)? */
1222 rtx frtx = DECL_RTL (formal);
1223 rtx offset = 0;
1224 if (GET_CODE (frtx) == MEM)
1226 frtx = XEXP (frtx, 0);
1227 if (GET_CODE (frtx) == PLUS)
1229 if (XEXP (frtx, 0) == frame_pointer_rtx
1230 && GET_CODE (XEXP (frtx, 1)) == CONST_INT)
1231 offset = XEXP (frtx, 1);
1232 else if (XEXP (frtx, 1) == frame_pointer_rtx
1233 && GET_CODE (XEXP (frtx, 0)) == CONST_INT)
1234 offset = XEXP (frtx, 0);
1235 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1236 /* If there is a separate arg pointer
1237 and REG_PARM_STACK_SPACE is defined,
1238 parms passed in regs can be copied
1239 to slots reached via the arg pointer. */
1240 if (XEXP (frtx, 0) == arg_pointer_rtx
1241 && GET_CODE (XEXP (frtx, 1)) == CONST_INT)
1242 offset = XEXP (frtx, 1);
1243 else if (XEXP (frtx, 1) == arg_pointer_rtx
1244 && GET_CODE (XEXP (frtx, 0)) == CONST_INT)
1245 offset = XEXP (frtx, 0);
1246 #endif
1248 if (offset)
1249 parm_map[INTVAL (offset) / UNITS_PER_WORD] = arg_vec[i];
1250 else if (TREE_TYPE (formal) != error_mark_node)
1251 abort ();
1253 else if (GET_CODE (frtx) != REG)
1254 abort ();
1256 /* Create an entry in REG_MAP that says what rtx is associated
1257 with a pseudo register from the function being inlined. */
1258 if (GET_CODE (DECL_RTL (formal)) == REG)
1259 reg_map[REGNO (DECL_RTL (formal))] = arg_vec[i];
1263 /* Make certain that we can accept struct_value_{incoming_rtx,rtx},
1264 and map it. */
1265 if (this_struct_value_rtx == 0)
1267 else if (GET_CODE (struct_value_incoming_rtx) == REG)
1268 reg_map[REGNO (XEXP (DECL_RTL (DECL_RESULT (fndecl)), 0))]
1269 = this_struct_value_rtx;
1270 else if (GET_CODE (struct_value_incoming_rtx) == MEM
1271 && XEXP (XEXP (struct_value_incoming_rtx, 0), 0) == frame_pointer_rtx
1272 && GET_CODE (XEXP (XEXP (struct_value_incoming_rtx, 0), 1)) == CONST_INT)
1273 #if 1
1274 reg_map[REGNO (XEXP (DECL_RTL (DECL_RESULT (fndecl)), 0))]
1275 = this_struct_value_rtx;
1276 #else
1277 parm_map[INTVAL (XEXP (XEXP (struct_value_incoming_rtx, 0), 1)) / UNITS_PER_WORD]
1278 = this_struct_value_rtx;
1279 #endif
1280 else
1281 abort ();
1283 label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
1284 label_map -= min_labelno;
1286 for (i = min_labelno; i < max_labelno; i++)
1287 label_map[i] = gen_label_rtx ();
1289 /* As we copy insns, record the correspondence, so that inter-insn
1290 references can be copied into isomorphic structure. */
1292 insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1293 bzero (insn_map, INSN_UID (header) * sizeof (rtx));
1295 /* Set up a target to translate the inline function's value-register. */
1297 if (structure_value_addr != 0 || TYPE_MODE (type) == VOIDmode)
1298 inline_target = 0;
1299 else
1301 /* Machine mode function was declared to return. */
1302 enum machine_mode departing_mode = TYPE_MODE (type);
1303 /* (Possibly wider) machine mode it actually computes
1304 (for the sake of callers that fail to declare it right). */
1305 enum machine_mode arriving_mode
1306 = TYPE_MODE (DECL_RESULT_TYPE (fndecl));
1308 /* Don't use MEMs as direct targets because on some machines
1309 substituting a MEM for a REG makes invalid insns.
1310 Let the combiner substitute the MEM if that is valid. */
1311 if (target && GET_CODE (target) == REG
1312 && GET_MODE (target) == departing_mode)
1313 inline_target = target;
1314 else
1316 inline_target = gen_reg_rtx (departing_mode);
1317 if (target == 0)
1318 target = inline_target;
1321 /* If function's value was promoted before return,
1322 avoid machine mode mismatch when we substitute INLINE_TARGET.
1323 But TARGET is what we will return to the caller. */
1324 if (arriving_mode != departing_mode)
1325 inline_target = gen_rtx (SUBREG, arriving_mode, inline_target, 0);
1328 /* Make space in current function's stack frame
1329 for the stack frame of the inline function.
1330 Adjust all frame-pointer references by the difference
1331 between the offset to this space
1332 and the offset to the equivalent space in the inline
1333 function's frame.
1334 This difference equals the size of preexisting locals. */
1336 fp_delta = get_frame_size ();
1337 #ifdef FRAME_GROWS_DOWNWARD
1338 fp_delta = - fp_delta;
1339 #endif
1341 before_inline_fp_rtx = get_last_insn ();
1342 inline_fp_rtx = 0;
1344 /* Now allocate the space for that to point at. */
1346 assign_stack_local (VOIDmode, DECL_FRAME_SIZE (fndecl));
1348 /* Now copy the insns one by one. */
1350 for (insn = insns; insn; insn = NEXT_INSN (insn))
1352 rtx copy, pattern, next = 0;
1354 retry:
1355 orig_asm_operands_vector = 0;
1356 copy_asm_operands_vector = 0;
1358 switch (GET_CODE (insn))
1360 case INSN:
1361 pattern = PATTERN (insn);
1363 /* Special handling for the insn immediately after a CALL_INSN
1364 that returned a value:
1365 If it does copy the value, we must avoid the usual translation
1366 of the return-register into INLINE_TARGET.
1367 If it just USEs the value, the inline function expects it to
1368 stay in the return-register and be returned,
1369 so copy it into INLINE_TARGET. */
1371 if (follows_call
1372 /* Allow a stack-adjust, handled normally, to come in between
1373 the call and the value-copying insn. */
1374 && ! (GET_CODE (pattern) == SET
1375 && SET_DEST (pattern) == stack_pointer_rtx))
1377 if (GET_CODE (pattern) == SET
1378 && rtx_equal_p (SET_SRC (pattern), follows_call))
1379 /* This insn copies the value: take special care to copy
1380 that value to this insn's destination. */
1382 copy = emit_insn (gen_rtx (SET, VOIDmode,
1383 copy_rtx_and_substitute (SET_DEST (pattern)),
1384 follows_call));
1385 if (! note_integrated (copy))
1387 next = 0;
1388 goto retry;
1390 follows_call = 0;
1391 break;
1393 else if (GET_CODE (pattern) == USE
1394 && rtx_equal_p (XEXP (pattern, 0), follows_call))
1395 /* This insn does nothing but says the value is expected
1396 to flow through to the inline function's return-value.
1397 Make that happen, then ignore this insn. */
1399 copy = emit_insn (gen_rtx (SET, VOIDmode, inline_target,
1400 follows_call));
1401 if (! note_integrated (copy))
1403 next = 0;
1404 goto retry;
1406 follows_call = 0;
1407 break;
1409 /* If it does neither, this value must be ignored. */
1410 follows_call = 0;
1413 copy = 0;
1414 if (GET_CODE (pattern) == USE
1415 && GET_CODE (XEXP (pattern, 0)) == REG)
1417 /* The (USE (REG n)) at return from the function should
1418 be ignored since we are changing (REG n) into
1419 inline_target. */
1420 if (! ignore && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1421 break;
1422 /* Don't emit a (USE (REG n)) of something which
1423 is now constant. */
1424 if (REGNO (XEXP (pattern, 0)) >= FIRST_PSEUDO_REGISTER
1425 && (const_age == (unsigned)-1
1426 || const_age_map[REGNO (XEXP (pattern, 0))] >= const_age))
1427 break;
1430 /* Ignore setting a function value that we don't want to use. */
1431 if (inline_target == 0
1432 && GET_CODE (pattern) == SET
1433 && GET_CODE (SET_DEST (pattern)) == REG
1434 && REG_FUNCTION_VALUE_P (SET_DEST (pattern)))
1435 break;
1437 /* Try to do some quick constant folding here.
1438 This will save save execution time of the compiler,
1439 as well time and space of the program if done here. */
1440 if (GET_CODE (pattern) == SET
1441 && SET_DEST (pattern) == cc0_rtx)
1442 next = try_fold_cc0 (insn);
1444 if (next != 0)
1446 used_const_equiv = 0;
1447 insn = next;
1449 else
1451 rtx note = find_reg_note (insn, REG_EQUIV, 0);
1452 copy = emit_insn (copy_rtx_and_substitute (pattern));
1453 if (! note_integrated (copy))
1455 next = 0;
1456 goto retry;
1459 /* If we are copying an insn that loads a constant,
1460 record the constantness. */
1461 if (note)
1462 REG_NOTES (copy)
1463 = gen_rtx (EXPR_LIST, REG_EQUIV, XEXP (note, 0),
1464 REG_NOTES (copy));
1466 if (GET_CODE (pattern) == SET)
1468 rtx dest = SET_DEST (pattern);
1469 if (GET_CODE (dest) == REG)
1471 int regno = REGNO (dest);
1473 if (regno >= FIRST_PSEUDO_REGISTER
1474 && CONSTANT_P (SET_SRC (pattern))
1475 && (const_equiv_map[regno] == 0
1476 /* Following clause is a hack to make
1477 case work where GNU C++ reassigns
1478 a variable to make cse work right. */
1479 || ! rtx_equal_p (const_equiv_map[regno],
1480 SET_SRC (pattern))))
1482 const_equiv_map[regno] = SET_SRC (pattern);
1483 const_age_map[regno] = const_age;
1486 else
1488 while (GET_CODE (dest) == SUBREG
1489 || GET_CODE (dest) == STRICT_LOW_PART
1490 || GET_CODE (dest) == SIGN_EXTRACT
1491 || GET_CODE (dest) == ZERO_EXTRACT)
1492 dest = SUBREG_REG (dest);
1494 /* Forget everything we thought we knew. */
1495 if (GET_CODE (dest) == REG
1496 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1497 const_equiv_map[REGNO (dest)] = 0;
1501 break;
1503 case JUMP_INSN:
1504 follows_call = 0;
1505 if (GET_CODE (PATTERN (insn)) == RETURN)
1507 if (local_return_label == 0)
1508 local_return_label = gen_label_rtx ();
1509 emit_jump (local_return_label);
1510 break;
1512 copy = emit_jump_insn (copy_rtx_and_substitute (PATTERN (insn)));
1513 if (! note_integrated (copy))
1515 next = 0;
1516 goto retry;
1518 break;
1520 case CALL_INSN:
1521 copy = emit_call_insn (copy_rtx_and_substitute (PATTERN (insn)));
1522 if (! note_integrated (copy))
1524 next = 0;
1525 goto retry;
1527 /* Special handling needed for the following INSN depending on
1528 whether it copies the value from the fcn return reg. */
1529 if (GET_CODE (PATTERN (insn)) == SET)
1530 follows_call = SET_DEST (PATTERN (insn));
1531 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1532 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1533 follows_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1534 break;
1536 case CODE_LABEL:
1537 const_age += 2;
1538 copy = emit_label (label_map[CODE_LABEL_NUMBER (insn)]);
1539 follows_call = 0;
1540 if (const_age & 1)
1542 int i;
1544 const_age += 1;
1545 for (i = max_regno; i >= 0; i--)
1546 if (const_age_map[i] == (unsigned)-1)
1547 const_age_map[i] = 0;
1549 break;
1551 case BARRIER:
1552 const_age += 2;
1553 copy = emit_barrier ();
1554 break;
1556 case NOTE:
1557 /* It is important to discard function-end and function-beg notes,
1558 so we have only one of each in the current function. */
1559 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1560 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1561 && (flag_inline_debug || NOTE_LINE_NUMBER (insn) < 0))
1562 copy = emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
1563 else
1564 copy = 0;
1565 break;
1567 default:
1568 abort ();
1569 break;
1571 if (! (used_const_equiv == 0))
1572 abort ();
1573 insn_map[INSN_UID (insn)] = copy;
1576 if (local_return_label)
1577 emit_label (local_return_label);
1579 /* Don't try substituting constants when making up a DECLs rtl.
1580 That would probably only confuse the debugger, but I don't
1581 know for sure. */
1582 try_use_const = 0;
1583 /* Make copies of the decls of the symbols in the inline function, so that
1584 the copies of the variables get declared in the current function. */
1585 copy_decl_tree (DECL_INITIAL (fndecl), 0);
1586 /* For safety. */
1587 if (try_use_const)
1588 used_const_equiv = 0;
1590 /* End the scope containing the copied formal parameter variables. */
1592 expand_end_bindings (getdecls (), 1, 1);
1593 poplevel (1, 1, 0);
1594 poplevel (0, 0, 0);
1595 emit_line_note (input_filename, lineno);
1597 reg_map = NULL;
1598 label_map = NULL;
1600 const_equiv_map = old_const_equiv_map;
1601 const_age_map = old_const_age_map;
1602 const_age = old_const_age;
1604 if (ignore || TYPE_MODE (type) == VOIDmode)
1605 return 0;
1607 if (structure_value_addr)
1609 if (target)
1610 return target;
1611 return gen_rtx (MEM, TYPE_MODE (type),
1612 memory_address (BLKmode, structure_value_addr));
1614 else if (target && target != inline_target
1615 && (GET_CODE (inline_target) != SUBREG
1616 || SUBREG_REG (inline_target) != target))
1618 /* Copy result back to TARGET if TARGET is not INLINE_TARGET.
1619 In general, these should always wind up being the same mode,
1620 after SUBREGs, if any, are stripped. */
1621 convert_move (target, inline_target, 0);
1624 return target;
1627 /* Given a chain of PARM_DECLs, ARGS, and a vector of RTL homes VEC,
1628 copy each decl into a VAR_DECL, push all of those decls
1629 and give each one the corresponding home. */
1631 static void
1632 copy_parm_decls (args, vec)
1633 tree args;
1634 rtx *vec;
1636 register tree tail;
1637 register int i;
1639 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1641 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
1642 TREE_TYPE (tail));
1643 /* These args would always appear unused, if not for this. */
1644 TREE_USED (decl) = 1;
1645 /* Prevent warning for shadowing with these. */
1646 TREE_INLINE (decl) = 1;
1647 pushdecl (decl);
1648 DECL_RTL (decl) = vec[i];
1652 /* Given a LET_STMT node, push decls and levels
1653 so as to construct in the current function a tree of contexts
1654 isomorphic to the one that is given. */
1656 static void
1657 copy_decl_tree (let, level)
1658 tree let;
1659 int level;
1661 tree t, node;
1663 pushlevel (0);
1665 for (t = STMT_VARS (let); t; t = TREE_CHAIN (t))
1667 tree d = build_decl (TREE_CODE (t), DECL_NAME (t), TREE_TYPE (t));
1668 DECL_SOURCE_LINE (d) = DECL_SOURCE_LINE (t);
1669 DECL_SOURCE_FILE (d) = DECL_SOURCE_FILE (t);
1670 if (DECL_RTL (t) != 0)
1672 if (GET_CODE (DECL_RTL (t)) == MEM
1673 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (t), 0)))
1674 /* copy_rtx_and_substitute would call memory_address
1675 which would copy the address into a register.
1676 Then debugging-output wouldn't know how to handle it. */
1677 DECL_RTL (d) = DECL_RTL (t);
1678 else
1679 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t));
1681 TREE_EXTERNAL (d) = TREE_EXTERNAL (t);
1682 TREE_STATIC (d) = TREE_STATIC (t);
1683 TREE_PUBLIC (d) = TREE_PUBLIC (t);
1684 TREE_LITERAL (d) = TREE_LITERAL (t);
1685 TREE_ADDRESSABLE (d) = TREE_ADDRESSABLE (t);
1686 TREE_READONLY (d) = TREE_READONLY (t);
1687 TREE_VOLATILE (d) = TREE_VOLATILE (t);
1688 /* These args would always appear unused, if not for this. */
1689 TREE_USED (d) = 1;
1690 /* Prevent warning for shadowing with these. */
1691 TREE_INLINE (d) = 1;
1692 pushdecl (d);
1695 for (t = STMT_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1696 copy_decl_tree (t, level + 1);
1698 node = poplevel (level > 0, 0, 0);
1699 if (node)
1700 TREE_USED (node) = TREE_USED (let);
1703 /* Create a new copy of an rtx.
1704 Recursively copies the operands of the rtx,
1705 except for those few rtx codes that are sharable.
1707 Handle constants that need to be placed in the constant pool by
1708 calling `force_const_mem'. */
1710 static rtx
1711 copy_rtx_and_substitute (orig)
1712 register rtx orig;
1714 register rtx copy, temp;
1715 register int i, j;
1716 register RTX_CODE code;
1717 register enum machine_mode mode;
1718 register char *format_ptr;
1719 int regno;
1721 if (orig == 0)
1722 return 0;
1724 code = GET_CODE (orig);
1725 mode = GET_MODE (orig);
1727 switch (code)
1729 case REG:
1730 /* If a frame-pointer register shows up, then we
1731 must `fix' the reference. If the stack pointer
1732 register shows up, it must be part of stack-adjustments
1733 (*not* because we eliminated the frame pointer!).
1734 Small hard registers are returned as-is. Pseudo-registers
1735 go through their `reg_map'. */
1736 regno = REGNO (orig);
1737 if (regno < FIRST_PSEUDO_REGISTER)
1739 /* Some hard registers are also mapped,
1740 but others are not translated. */
1741 if (reg_map[regno] != 0)
1742 return reg_map[regno];
1743 if (REG_FUNCTION_VALUE_P (orig))
1745 /* This is a reference to the function return value. If
1746 the function doesn't have a return value, error.
1747 If it does, it may not be the same mode as `inline_target'
1748 because SUBREG is not required for hard regs.
1749 If not, adjust mode of inline_target to fit the context. */
1750 if (inline_target == 0)
1752 /* If there isn't an inline target, so be it.
1753 Just fake up a reg--it won't get used
1754 for anything important anyway. */
1755 inline_target = gen_reg_rtx (mode);
1756 return inline_target;
1758 if (mode == GET_MODE (inline_target))
1759 return inline_target;
1760 return gen_rtx (SUBREG, mode, inline_target, 0);
1762 if (regno == FRAME_POINTER_REGNUM)
1763 return plus_constant (orig, fp_delta);
1764 return orig;
1766 if (try_use_const
1767 && const_equiv_map[regno] != 0
1768 && const_age_map[regno] == (unsigned)-2)
1770 used_const_equiv = 1;
1771 return const_equiv_map[regno];
1773 if (reg_map[regno] == NULL)
1774 reg_map[regno] = gen_reg_rtx (mode);
1775 return reg_map[regno];
1777 case SUBREG:
1778 copy = copy_rtx_and_substitute (SUBREG_REG (orig));
1779 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1780 if (GET_CODE (copy) == SUBREG)
1781 return gen_rtx (SUBREG, GET_MODE (orig), SUBREG_REG (copy),
1782 SUBREG_WORD (orig) + SUBREG_WORD (copy));
1783 /* Don't build a SUBREG of a CONST_INT. */
1784 if (GET_CODE (copy) == CONST_INT)
1785 return copy;
1786 return gen_rtx (SUBREG, GET_MODE (orig), copy,
1787 SUBREG_WORD (orig));
1789 case CODE_LABEL:
1790 return label_map[CODE_LABEL_NUMBER (orig)];
1792 case LABEL_REF:
1793 copy = rtx_alloc (LABEL_REF);
1794 PUT_MODE (copy, mode);
1795 XEXP (copy, 0) = label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))];
1796 return copy;
1798 case PC:
1799 case CC0:
1800 case CONST_INT:
1801 case CONST_DOUBLE:
1802 case SYMBOL_REF:
1803 return orig;
1805 case CONST:
1806 /* Make new constant pool entry for a constant
1807 that was in the pool of the inline function. */
1808 if (RTX_INTEGRATED_P (orig))
1809 return force_const_mem (GET_MODE (orig), XEXP (orig, 0));
1810 break;
1812 case ASM_OPERANDS:
1813 /* If a single asm insn contains multiple output operands
1814 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1815 We must make sure that the copied insn continues to share it. */
1816 if (orig_asm_operands_vector == XVEC (orig, 3))
1818 copy = rtx_alloc (ASM_OPERANDS);
1819 XSTR (copy, 0) = XSTR (orig, 0);
1820 XSTR (copy, 1) = XSTR (orig, 1);
1821 XINT (copy, 2) = XINT (orig, 2);
1822 XVEC (copy, 3) = copy_asm_operands_vector;
1823 XVEC (copy, 4) = copy_asm_constraints_vector;
1824 XSTR (copy, 5) = XSTR (orig, 5);
1825 XINT (copy, 6) = XINT (orig, 6);
1826 return copy;
1828 break;
1830 case CALL:
1831 /* This is given special treatment because the first
1832 operand of a CALL is a (MEM ...) which may get
1833 forced into a register for cse. This is undesirable
1834 if function-address cse isn't wanted or if we won't do cse. */
1835 #ifndef NO_FUNCTION_CSE
1836 if (! (optimize && ! flag_no_function_cse))
1837 #endif
1838 return gen_rtx (CALL, GET_MODE (orig),
1839 gen_rtx (MEM, GET_MODE (XEXP (orig, 0)),
1840 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0))),
1841 copy_rtx_and_substitute (XEXP (orig, 1)));
1842 break;
1844 case PLUS:
1845 /* Note: treat the PLUS case as though it might be needed
1846 to be part of an address. If it turns out that the machine's
1847 PLUS insns can handle something more exciting than a ``load
1848 effective address'', the optimizer will discover this fact. */
1849 /* Take care of the easy case quickly. */
1850 if (XEXP (orig, 0) == frame_pointer_rtx
1851 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1852 && XEXP (orig, 0) == arg_pointer_rtx))
1854 rtx reg = XEXP (orig, 0), copy = XEXP (orig, 1);
1856 if (GET_CODE (copy) == CONST_INT)
1858 int c = INTVAL (copy);
1860 if (reg == arg_pointer_rtx && c >= first_parm_offset)
1862 copy = access_parm_map (c, VOIDmode);
1863 if (GET_CODE (copy) != MEM)
1864 /* Should not happen, because a parm we need to address
1865 should not be living in a register.
1866 (expand_inline_function copied it to a stack slot.) */
1867 abort ();
1868 return XEXP (copy, 0);
1870 return gen_rtx (PLUS, mode,
1871 frame_pointer_rtx,
1872 gen_rtx (CONST_INT, SImode,
1873 c + fp_delta));
1875 copy = copy_rtx_and_substitute (copy);
1876 temp = force_reg (mode, gen_rtx (PLUS, mode, frame_pointer_rtx, copy));
1877 return plus_constant (temp, fp_delta);
1879 else if (reg_mentioned_p (frame_pointer_rtx, orig)
1880 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1881 && reg_mentioned_p (arg_pointer_rtx, orig)))
1883 if (GET_CODE (XEXP (orig, 1)) == CONST_INT)
1885 copy = copy_rtx_and_substitute (XEXP (orig, 0));
1886 temp = plus_constant (copy, INTVAL (XEXP (orig, 1)));
1888 else
1890 temp = gen_rtx (PLUS, GET_MODE (orig),
1891 copy_rtx_and_substitute (XEXP (orig, 0)),
1892 copy_rtx_and_substitute (XEXP (orig, 1)));
1894 if (memory_address_p (mode, orig))
1895 temp = memory_address (mode, temp);
1897 else
1899 int old_used_const_equiv = used_const_equiv;
1901 used_const_equiv = 0;
1902 temp = gen_rtx (PLUS, GET_MODE (orig),
1903 copy_rtx_and_substitute (XEXP (orig, 0)),
1904 copy_rtx_and_substitute (XEXP (orig, 1)));
1905 if (used_const_equiv)
1907 if (GET_CODE (XEXP (temp, 0)) == CONST_INT)
1908 temp = plus_constant (XEXP (temp, 1), INTVAL (XEXP (temp, 0)));
1909 else if (GET_CODE (XEXP (temp, 1)) == CONST_INT)
1910 temp = plus_constant (XEXP (temp, 0), INTVAL (XEXP (temp, 1)));
1911 else if (memory_address_p (mode, orig))
1913 try_use_const = 0;
1914 used_const_equiv = 0;
1915 temp = gen_rtx (PLUS, GET_MODE (orig),
1916 copy_rtx_and_substitute (XEXP (orig, 0)),
1917 copy_rtx_and_substitute (XEXP (orig, 1)));
1920 else if (memory_address_p (mode, orig))
1921 temp = memory_address (mode, temp);
1923 used_const_equiv |= old_used_const_equiv;
1925 return temp;
1927 case MULT:
1929 int old_used_const_equiv = used_const_equiv;
1931 used_const_equiv = 0;
1933 temp = gen_rtx (MULT, GET_MODE (orig),
1934 copy_rtx_and_substitute (XEXP (orig, 0)),
1935 copy_rtx_and_substitute (XEXP (orig, 1)));
1937 if (used_const_equiv)
1939 if (GET_CODE (XEXP (temp, 0)) == CONST_INT
1940 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1941 temp = gen_rtx (CONST_INT, VOIDmode,
1942 INTVAL (XEXP (temp, 0)) * INTVAL (XEXP (temp, 1)));
1943 else
1945 try_use_const = 0;
1946 used_const_equiv = 0;
1947 temp = gen_rtx (MULT, GET_MODE (orig),
1948 copy_rtx_and_substitute (XEXP (orig, 0)),
1949 copy_rtx_and_substitute (XEXP (orig, 1)));
1952 used_const_equiv |= old_used_const_equiv;
1954 return temp;
1956 case MEM:
1957 /* Take care of easiest case here. */
1958 copy = XEXP (orig, 0);
1959 if (copy == frame_pointer_rtx || copy == arg_pointer_rtx)
1960 return gen_rtx (MEM, mode,
1961 plus_constant (frame_pointer_rtx, fp_delta));
1963 /* Allow a pushing-address even if that is not valid as an
1964 ordinary memory address. It indicates we are inlining a special
1965 push-insn. These must be copied; otherwise unshare_all_rtl
1966 might clobber them to point at temporary rtl of this function. */
1967 #ifdef STACK_GROWS_DOWNWARD
1968 if (GET_CODE (copy) == PRE_DEC && XEXP (copy, 0) == stack_pointer_rtx)
1969 return gen_rtx (MEM, mode, copy_rtx_and_substitute (copy));
1970 #else
1971 if (GET_CODE (copy) == PRE_INC && XEXP (copy, 0) == stack_pointer_rtx)
1972 return gen_rtx (MEM, mode, copy_rtx_and_substitute (copy));
1973 #endif
1975 /* If this is some other sort of address that isn't generally valid,
1976 break out all the registers referred to. */
1977 if (! memory_address_p (mode, copy))
1978 return gen_rtx (MEM, mode, copy_address (copy));
1980 /* There is no easy way to get our mode to `access_parm_map', which
1981 may need to know it, so here is most of the PLUS code duplicated. */
1982 if (GET_CODE (copy) == PLUS)
1984 if (XEXP (copy, 0) == frame_pointer_rtx
1985 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1986 && XEXP (copy, 0) == arg_pointer_rtx))
1988 rtx reg;
1989 reg = XEXP (copy, 0), copy = XEXP (copy, 1);
1991 if (GET_CODE (copy) == CONST_INT)
1993 int c = INTVAL (copy);
1995 if (reg == arg_pointer_rtx && c >= first_parm_offset)
1996 return access_parm_map (c, mode);
1998 temp = gen_rtx (PLUS, Pmode,
1999 frame_pointer_rtx,
2000 gen_rtx (CONST_INT, SImode,
2001 c + fp_delta));
2002 if (! memory_address_p (Pmode, temp))
2004 if (inline_fp_rtx == 0)
2006 rtx last = get_last_insn ();
2007 inline_fp_rtx
2008 = copy_to_mode_reg (Pmode,
2009 plus_constant (frame_pointer_rtx, fp_delta));
2010 reorder_insns (NEXT_INSN (last), get_last_insn (), before_inline_fp_rtx);
2012 return gen_rtx (MEM, mode, plus_constant (inline_fp_rtx, c));
2015 copy = copy_rtx_and_substitute (copy);
2016 temp = gen_rtx (PLUS, Pmode, frame_pointer_rtx, copy);
2017 temp = plus_constant (temp, fp_delta);
2018 temp = memory_address (Pmode, temp);
2020 else if (reg_mentioned_p (frame_pointer_rtx, copy)
2021 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2022 && reg_mentioned_p (arg_pointer_rtx, copy)))
2024 if (GET_CODE (XEXP (copy, 1)) == CONST_INT)
2026 temp = copy_rtx_and_substitute (XEXP (copy, 0));
2027 temp = plus_constant (temp, INTVAL (XEXP (copy, 1)));
2029 else
2031 temp = gen_rtx (PLUS, GET_MODE (copy),
2032 copy_rtx_and_substitute (XEXP (copy, 0)),
2033 copy_rtx_and_substitute (XEXP (copy, 1)));
2036 else
2038 if (GET_CODE (XEXP (copy, 1)) == CONST_INT)
2039 temp = plus_constant (copy_rtx_and_substitute (XEXP (copy, 0)),
2040 INTVAL (XEXP (copy, 1)));
2041 else
2043 rtx left = copy_rtx_and_substitute (XEXP (copy, 0));
2044 rtx right = copy_rtx_and_substitute (XEXP (copy, 1));
2046 temp = gen_rtx (PLUS, GET_MODE (copy), left, right);
2050 else
2051 temp = copy_rtx_and_substitute (copy);
2053 temp = change_address (orig, mode, temp);
2054 /* Deals with GCC bug for now. */
2055 RTX_UNCHANGING_P (temp) = 0;
2056 return temp;
2058 case RETURN:
2059 abort ();
2062 copy = rtx_alloc (code);
2063 PUT_MODE (copy, mode);
2064 copy->in_struct = orig->in_struct;
2065 copy->volatil = orig->volatil;
2066 copy->unchanging = orig->unchanging;
2068 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2070 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2072 switch (*format_ptr++)
2074 case '0':
2075 break;
2077 case 'e':
2078 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i));
2079 break;
2081 case 'u':
2082 /* Change any references to old-insns to point to the
2083 corresponding copied insns. */
2084 XEXP (copy, i) = insn_map[INSN_UID (XEXP (orig, i))];
2085 break;
2087 case 'E':
2088 XVEC (copy, i) = XVEC (orig, i);
2089 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2091 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2092 for (j = 0; j < XVECLEN (copy, i); j++)
2093 XVECEXP (copy, i, j) = copy_rtx_and_substitute (XVECEXP (orig, i, j));
2095 break;
2097 case 'i':
2098 XINT (copy, i) = XINT (orig, i);
2099 break;
2101 case 's':
2102 XSTR (copy, i) = XSTR (orig, i);
2103 break;
2105 default:
2106 abort ();
2110 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
2112 orig_asm_operands_vector = XVEC (orig, 3);
2113 copy_asm_operands_vector = XVEC (copy, 3);
2114 copy_asm_constraints_vector = XVEC (copy, 4);
2117 return copy;
2120 /* Get the value corresponding to an address relative to the arg pointer
2121 at index RELADDRESS. MODE is the machine mode of the reference.
2122 MODE is used only when the value is a REG.
2123 Pass VOIDmode for MODE when the mode is not known;
2124 in such cases, you should make sure the value is a MEM. */
2126 static rtx
2127 access_parm_map (reladdress, mode)
2128 int reladdress;
2129 enum machine_mode mode;
2131 /* Index in parm_map. */
2132 int index = reladdress / UNITS_PER_WORD;
2133 /* Offset of the data being referenced
2134 from the beginning of the value for that parm. */
2135 int offset = reladdress % UNITS_PER_WORD;
2136 rtx copy;
2138 /* If we are referring to the middle of a multiword parm,
2139 find the beginning of that parm.
2140 OFFSET gets the offset of the reference from
2141 the beginning of the parm. */
2143 while (parm_map[index] == 0)
2145 index--;
2146 if (index < first_parm_offset / UNITS_PER_WORD)
2147 /* If this abort happens, it means we need
2148 to handle "decrementing" INDEX back far
2149 enough to start looking among the reg parms
2150 instead of the stack parms. What a mess! */
2151 abort ();
2152 offset += UNITS_PER_WORD;
2155 copy = parm_map[index];
2157 #ifdef BYTES_BIG_ENDIAN
2158 /* Subtract from OFFSET the offset of where
2159 the actual (non-BLKmode) parm value would start. */
2160 if (GET_MODE (copy) != BLKmode
2161 && GET_MODE_SIZE (GET_MODE (copy)) < UNITS_PER_WORD)
2162 offset
2163 -= (UNITS_PER_WORD
2164 - GET_MODE_SIZE (GET_MODE (copy)));
2165 #endif
2167 /* For memory ref, adjust it by the desired offset. */
2168 if (GET_CODE (copy) == MEM)
2170 if (offset != 0 || GET_MODE (copy) != mode)
2171 return change_address (copy, mode,
2172 plus_constant (XEXP (copy, 0),
2173 offset));
2174 return copy;
2177 if (GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG
2178 && ! CONSTANT_P (copy))
2179 abort ();
2180 if (mode == VOIDmode)
2181 abort ();
2183 /* A REG cannot be offset by bytes, so use a subreg
2184 (which is possible only in certain cases). */
2185 if (GET_MODE (copy) != mode
2186 && GET_MODE (copy) != VOIDmode)
2188 int word;
2189 /* Crash if the portion of the arg wanted
2190 is not the least significant.
2191 Functions with refs to other parts of a
2192 parameter should not be inline--
2193 see function_cannot_inline_p. */
2194 #ifdef BYTES_BIG_ENDIAN
2195 if (offset + GET_MODE_SIZE (mode)
2196 != GET_MODE_SIZE (GET_MODE (copy)))
2197 abort ();
2198 #else
2199 if (offset != 0)
2200 abort ();
2201 #endif
2202 word = 0;
2203 if (GET_CODE (copy) == SUBREG)
2204 word = SUBREG_WORD (copy), copy = SUBREG_REG (copy);
2205 if (CONSTANT_P (copy))
2206 copy = force_reg (GET_MODE (copy), copy);
2207 return gen_rtx (SUBREG, mode, copy, word);
2210 return copy;
2213 /* Like copy_rtx_and_substitute but produces different output, suitable
2214 for an ideosyncractic address that isn't memory_address_p.
2215 The output resembles the input except that REGs and MEMs are replaced
2216 with new psuedo registers. All the "real work" is done in separate
2217 insns which set up the values of these new registers. */
2219 static rtx
2220 copy_address (orig)
2221 register rtx orig;
2223 register rtx copy;
2224 register int i, j;
2225 register RTX_CODE code;
2226 register enum machine_mode mode;
2227 register char *format_ptr;
2229 if (orig == 0)
2230 return 0;
2232 code = GET_CODE (orig);
2233 mode = GET_MODE (orig);
2235 switch (code)
2237 case REG:
2238 if (REGNO (orig) != FRAME_POINTER_REGNUM)
2239 return copy_rtx_and_substitute (orig);
2240 return plus_constant (frame_pointer_rtx, fp_delta);
2242 case PLUS:
2243 if (XEXP (orig, 0) == frame_pointer_rtx)
2244 return plus_constant (orig, fp_delta);
2245 break;
2247 case MEM:
2248 return copy_to_reg (copy_rtx_and_substitute (orig));
2250 case CODE_LABEL:
2251 case LABEL_REF:
2252 return copy_rtx_and_substitute (orig);
2254 case PC:
2255 case CC0:
2256 case CONST_INT:
2257 case CONST_DOUBLE:
2258 case SYMBOL_REF:
2259 return orig;
2262 copy = rtx_alloc (code);
2263 PUT_MODE (copy, mode);
2264 copy->in_struct = orig->in_struct;
2265 copy->volatil = orig->volatil;
2266 copy->unchanging = orig->unchanging;
2268 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2270 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2272 switch (*format_ptr++)
2274 case '0':
2275 break;
2277 case 'e':
2278 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i));
2279 break;
2281 case 'u':
2282 /* Change any references to old-insns to point to the
2283 corresponding copied insns. */
2284 XEXP (copy, i) = insn_map[INSN_UID (XEXP (orig, i))];
2285 break;
2287 case 'E':
2288 XVEC (copy, i) = XVEC (orig, i);
2289 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2291 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2292 for (j = 0; j < XVECLEN (copy, i); j++)
2293 XVECEXP (copy, i, j) = copy_rtx_and_substitute (XVECEXP (orig, i, j));
2295 break;
2297 case 'i':
2298 XINT (copy, i) = XINT (orig, i);
2299 break;
2301 case 's':
2302 XSTR (copy, i) = XSTR (orig, i);
2303 break;
2305 default:
2306 abort ();
2309 return copy;
2312 /* Return the constant equivalent of X. If X is a manifest
2313 constant, it is returned. If X is a register, we check
2314 to see if we happen to know its value as a constant. */
2315 static rtx
2316 const_equiv (x)
2317 rtx x;
2319 if (GET_CODE (x) == REG)
2321 int regno = REGNO (x);
2322 if (const_equiv_map[regno]
2323 && const_age_map[regno] >= const_age)
2324 return const_equiv_map[regno];
2325 return 0;
2327 if (CONSTANT_P (x))
2328 return x;
2329 return 0;
2332 /* Attempt to simplify INSN while copying it from an inline fn,
2333 assuming it is a SET that sets CC0.
2335 If we simplify it, we emit the appropriate insns and return
2336 the last insn that we have handled (since we may handle the insn
2337 that follows INSN as well as INSN itself).
2339 Otherwise we do nothing and return zero. */
2341 static rtx
2342 try_fold_cc0 (insn)
2343 rtx insn;
2345 rtx cnst = copy_rtx_and_substitute (SET_SRC (PATTERN (insn)));
2346 rtx pat, copy;
2348 if (! CONSTANT_P (cnst))
2349 /* Constant equivlancies are with old, not new rtl. */
2350 cnst = const_equiv (SET_SRC (PATTERN (insn)));
2351 if (cnst
2352 /* @@ Cautious: Don't know how many of these tests we need. */
2353 && NEXT_INSN (insn)
2354 && GET_CODE (pat = PATTERN (NEXT_INSN (insn))) == SET
2355 && SET_DEST (pat) == pc_rtx
2356 && GET_CODE (pat = SET_SRC (pat)) == IF_THEN_ELSE
2357 && GET_RTX_LENGTH (GET_CODE (XEXP (pat, 0))) == 2)
2359 rtx cnst2;
2360 rtx cond = XEXP (pat, 0);
2362 if ((XEXP (cond, 0) == cc0_rtx
2363 && (cnst2 = const_equiv (XEXP (cond, 1))))
2364 || (XEXP (cond, 1) == cc0_rtx
2365 && (cnst2 = const_equiv (XEXP (cond, 0)))))
2367 copy = fold_out_const_cc0 (cond, XEXP (pat, 1), XEXP (pat, 2),
2368 cnst, cnst2);
2369 if (copy)
2371 if (GET_CODE (copy) == LABEL_REF)
2373 /* We will branch unconditionally to
2374 the label specified by COPY.
2375 Eliminate dead code by running down the
2376 list of insn until we see a CODE_LABEL.
2377 If the CODE_LABEL is the one specified
2378 by COPY, we win, and can delete all code
2379 up to (but not necessarily including)
2380 that label. Otherwise only win a little:
2381 emit the branch insn, and continue expanding. */
2382 rtx tmp = NEXT_INSN (insn);
2383 while (tmp && GET_CODE (tmp) != CODE_LABEL)
2384 tmp = NEXT_INSN (tmp);
2385 if (! tmp)
2386 abort ();
2387 if (label_map[CODE_LABEL_NUMBER (tmp)] == XEXP (copy, 0))
2389 /* Big win. */
2390 return PREV_INSN (tmp);
2392 else
2394 /* Small win. Emit the unconditional branch,
2395 followed by a BARRIER, so that jump optimization
2396 will know what to do. */
2397 emit_jump (XEXP (copy, 0));
2398 return NEXT_INSN (insn);
2401 else if (copy == pc_rtx)
2403 /* Do not take the branch, just fall through.
2404 Jump optimize should handle the elimination of
2405 dead code if appropriate. */
2406 return NEXT_INSN (insn);
2408 else
2409 abort ();
2413 return 0;
2416 /* If (COND_RTX CNST1 CNST2) yield a result we can treat
2417 as being constant, return THEN_RTX if the result is always
2418 non-zero, and return ELSE_RTX otherwise. */
2419 static rtx
2420 fold_out_const_cc0 (cond_rtx, then_rtx, else_rtx, cnst1, cnst2)
2421 rtx cond_rtx, then_rtx, else_rtx;
2422 rtx cnst1, cnst2;
2424 int value1, value2;
2425 int int1 = GET_CODE (cnst1) == CONST_INT;
2426 int int2 = GET_CODE (cnst2) == CONST_INT;
2427 if (int1)
2428 value1 = INTVAL (cnst1);
2429 else
2430 value1 = 1;
2431 if (int2)
2432 value2 = INTVAL (cnst2);
2433 else
2434 value2 = 1;
2436 switch (GET_CODE (cond_rtx))
2438 case NE:
2439 if (int1 && int2)
2440 if (value1 != value2)
2441 return copy_rtx_and_substitute (then_rtx);
2442 else
2443 return copy_rtx_and_substitute (else_rtx);
2444 if (value1 == 0 || value2 == 0)
2445 return copy_rtx_and_substitute (then_rtx);
2446 if (int1 == 0 && int2 == 0)
2447 if (rtx_equal_p (cnst1, cnst2))
2448 return copy_rtx_and_substitute (else_rtx);
2449 break;
2450 case EQ:
2451 if (int1 && int2)
2452 if (value1 == value2)
2453 return copy_rtx_and_substitute (then_rtx);
2454 else
2455 return copy_rtx_and_substitute (else_rtx);
2456 if (value1 == 0 || value2 == 0)
2457 return copy_rtx_and_substitute (else_rtx);
2458 if (int1 == 0 && int2 == 0)
2459 if (rtx_equal_p (cnst1, cnst2))
2460 return copy_rtx_and_substitute (then_rtx);
2461 break;
2462 case GE:
2463 if (int1 && int2)
2464 if (value1 >= value2)
2465 return copy_rtx_and_substitute (then_rtx);
2466 else
2467 return copy_rtx_and_substitute (else_rtx);
2468 if (value1 == 0)
2469 return copy_rtx_and_substitute (else_rtx);
2470 if (value2 == 0)
2471 return copy_rtx_and_substitute (then_rtx);
2472 break;
2473 case GT:
2474 if (int1 && int2)
2475 if (value1 > value2)
2476 return copy_rtx_and_substitute (then_rtx);
2477 else
2478 return copy_rtx_and_substitute (else_rtx);
2479 if (value1 == 0)
2480 return copy_rtx_and_substitute (else_rtx);
2481 if (value2 == 0)
2482 return copy_rtx_and_substitute (then_rtx);
2483 break;
2484 case LE:
2485 if (int1 && int2)
2486 if (value1 <= value2)
2487 return copy_rtx_and_substitute (then_rtx);
2488 else
2489 return copy_rtx_and_substitute (else_rtx);
2490 if (value1 == 0)
2491 return copy_rtx_and_substitute (then_rtx);
2492 if (value2 == 0)
2493 return copy_rtx_and_substitute (else_rtx);
2494 break;
2495 case LT:
2496 if (int1 && int2)
2497 if (value1 < value2)
2498 return copy_rtx_and_substitute (then_rtx);
2499 else
2500 return copy_rtx_and_substitute (else_rtx);
2501 if (value1 == 0)
2502 return copy_rtx_and_substitute (then_rtx);
2503 if (value2 == 0)
2504 return copy_rtx_and_substitute (else_rtx);
2505 break;
2506 case GEU:
2507 if (int1 && int2)
2508 if ((unsigned)value1 >= (unsigned)value2)
2509 return copy_rtx_and_substitute (then_rtx);
2510 else
2511 return copy_rtx_and_substitute (else_rtx);
2512 if (value1 == 0)
2513 return copy_rtx_and_substitute (else_rtx);
2514 if (value2 == 0)
2515 return copy_rtx_and_substitute (then_rtx);
2516 break;
2517 case GTU:
2518 if (int1 && int2)
2519 if ((unsigned)value1 > (unsigned)value2)
2520 return copy_rtx_and_substitute (then_rtx);
2521 else
2522 return copy_rtx_and_substitute (else_rtx);
2523 if (value1 == 0)
2524 return copy_rtx_and_substitute (else_rtx);
2525 if (value2 == 0)
2526 return copy_rtx_and_substitute (then_rtx);
2527 break;
2528 case LEU:
2529 if (int1 && int2)
2530 if ((unsigned)value1 <= (unsigned)value2)
2531 return copy_rtx_and_substitute (then_rtx);
2532 else
2533 return copy_rtx_and_substitute (else_rtx);
2534 if (value1 == 0)
2535 return copy_rtx_and_substitute (then_rtx);
2536 if (value2 == 0)
2537 return copy_rtx_and_substitute (else_rtx);
2538 break;
2539 case LTU:
2540 if (int1 && int2)
2541 if ((unsigned)value1 < (unsigned)value2)
2542 return copy_rtx_and_substitute (then_rtx);
2543 else
2544 return copy_rtx_and_substitute (else_rtx);
2545 if (value1 == 0)
2546 return copy_rtx_and_substitute (then_rtx);
2547 if (value2 == 0)
2548 return copy_rtx_and_substitute (else_rtx);
2549 break;
2551 /* Could not hack it. */
2552 return 0;
2555 /* If any CONST expressions with RTX_INTEGRATED_P are present in X,
2556 they should be in the constant pool.
2557 Run force_const_mem to put them there. */
2559 static void
2560 restore_constants (x)
2561 rtx x;
2563 int i, j;
2564 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
2566 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
2568 switch (*fmt++)
2570 case 'E':
2571 for (j = 0; j < XVECLEN (x, i); j++)
2572 if (RTX_INTEGRATED_P (XVECEXP (x, i, j))
2573 && GET_CODE (XVECEXP (x, i, j)) == CONST)
2574 XVECEXP (x, i, j) = force_const_mem (GET_MODE (XVECEXP (x, i, j)),
2575 XEXP (XVECEXP (x, i, j), 0));
2576 else
2577 restore_constants (XVECEXP (x, i, j));
2578 break;
2580 case 'e':
2581 if (RTX_INTEGRATED_P (XEXP (x, i))
2582 && GET_CODE (XEXP (x, i)) == CONST)
2583 XEXP (x, i) = force_const_mem (GET_MODE (XEXP (x, i)),
2584 XEXP (XEXP (x, i), 0));
2585 else
2586 restore_constants (XEXP (x, i));
2587 break;
2592 /* Output the assembly language code for the function FNDECL
2593 from its DECL_SAVED_INSNS. Used for inline functions that are output
2594 at end of compilation instead of where they came in the source. */
2596 void
2597 output_inline_function (fndecl)
2598 tree fndecl;
2600 rtx head = DECL_SAVED_INSNS (fndecl);
2601 rtx last;
2602 extern rtx stack_slot_list;
2604 temporary_allocation ();
2606 current_function_decl = fndecl;
2608 /* This call is only used to initialize global variables. */
2609 init_function_start (fndecl);
2611 /* Set stack frame size. */
2612 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl));
2614 restore_reg_data (FIRST_PARM_INSN (head));
2616 stack_slot_list = XEXP (head, 9);
2618 /* There is no need to output a return label again. */
2619 return_label = 0;
2620 expand_function_end (DECL_SOURCE_FILE (fndecl), DECL_SOURCE_LINE (fndecl));
2622 /* Find last insn and rebuild the constant pool. */
2623 for (last = FIRST_PARM_INSN (head);
2624 NEXT_INSN (last); last = NEXT_INSN (last))
2626 #if 0
2627 /* No need to restore these constants again. */
2628 if (GET_CODE (last) == INSN || GET_CODE (last) == JUMP_INSN
2629 || GET_CODE (last) == CALL_INSN)
2630 restore_constants (PATTERN (last));
2631 #endif
2634 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
2636 /* Compile this function all the way down to assembly code. */
2637 rest_of_compilation (fndecl);
2639 current_function_decl = 0;
2641 permanent_allocation ();
2643 #if 0
2645 /* Hashing of rtxs so that we don't make duplicates.
2646 The entry point is `rtx_hash_canon'. */
2648 /* Each hash table slot is a bucket containing a chain
2649 of these structures. */
2651 struct rtx_hash
2653 struct rtx_hash *next; /* Next structure in the bucket. */
2654 int hashcode; /* Hash code of this type. */
2655 rtx x; /* The rtx recorded here. */
2658 /* Now here is the hash table. This works exactly the same way
2659 that types are hashed in tree.c, except this is for rtxs. */
2661 #define RTX_HASH_SIZE 199
2662 struct rtx_hash *rtx_hash_table[RTX_HASH_SIZE];
2664 /* Here is how primitive or already-canonicalized types' hash
2665 codes are made. */
2666 #define RTX_HASH(RTX) (RTX)
2668 /* Look in the type hash table for a type isomorphic to RTX.
2669 If one is found, return it. Otherwise return 0. */
2671 tree
2672 rtx_hash_lookup (hashcode, x)
2673 int hashcode;
2674 tree x;
2676 register struct rtx_hash *h;
2677 for (h = rtx_hash_table[hashcode % RTX_HASH_SIZE]; h; h = h->next)
2678 if (h->hashcode == hashcode
2679 && GET_CODE (h->x) == GET_CODE (x)
2680 && GET_MODE (h->x) == GET_MODE (x)
2681 #if 0
2682 && h->x->jump == x->jump
2683 && h->x->call == x->call
2684 && h->x->unchanging == x->unchanging
2685 && h->x->volatil == x->volatil
2686 && h->x->in_struct == x->in_struct
2687 && h->x->used == x->used
2688 && h->x->integrated == x->integrated
2689 #endif
2692 int i, j;
2693 int len = GET_RTX_LENGTH (GET_CODE (x));
2694 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
2696 for (i = 0; i < len; i++)
2697 switch (fmt[i])
2699 case '0':
2700 break;
2702 case 'e':
2703 case 'u':
2704 case 's':
2705 case 'S':
2706 if (XEXP (h->x, i) != XEXP (x, i))
2707 goto no_dice;
2708 break;
2710 case 'E':
2711 if (XVECLEN (h->x, i) != XVECLEN (x, i))
2712 goto no_dice;
2713 for (j = 0; j < XVECLEN (x, i); j++)
2714 if (XVECEXP (h->x, i, j) != XVECEXP (x, i, j))
2715 goto no_dice;
2716 break;
2718 case 'i':
2719 case 'n':
2720 if (INTVAL (XEXP (h->x, i)) != INTVAL (XEXP (x, i)))
2721 goto no_dice;
2723 default:
2724 abort ();
2727 /* Everything matched. */
2728 return h->x;
2730 /* Try more. */
2731 no_dice:
2735 /* Nothing matched. */
2736 return 0;
2739 /* Add an entry to the rtx-hash-table
2740 for a type RTX whose hash code is HASHCODE. */
2742 void
2743 rtx_hash_add (hashcode, x)
2744 int hashcode;
2745 tree x;
2747 register struct rtx_hash *h;
2749 h = (struct rtx_hash *) oballoc (sizeof (struct rtx_hash));
2750 h->hashcode = hashcode;
2751 h->x = x;
2752 h->next = rtx_hash_table[hashcode % RTX_HASH_SIZE];
2753 rtx_hash_table[hashcode % RTX_HASH_SIZE] = h;
2756 /* Given RTX, and HASHCODE its hash code, return the canonical
2757 object for an identical rtx if one already exists.
2758 Otherwise, return RTX, and record it as the canonical object
2759 if it is a permanent object.
2761 To use this function, first create a rtx of the sort you want.
2762 Then compute its hash code from the fields of the rtx that
2763 make it different from other similar rtxs.
2764 Then call this function and use the value.
2765 This function frees the rtx you pass in if it is a duplicate. */
2767 /* Set to 1 to debug without canonicalization. Never set by program. */
2768 int debug_no_rtx_hash = 0;
2770 tree
2771 rtx_hash_canon (hashcode, x)
2772 int hashcode;
2773 tree x;
2775 tree x1;
2777 if (debug_no_rtx_hash)
2778 return x;
2780 x1 = rtx_hash_lookup (hashcode, x);
2781 if (x1 != 0)
2783 struct obstack *o = maybepermanent_obstack;
2784 obstack_free (o, x);
2785 return x1;
2788 /* If this is a new type, record it for later reuse. */
2789 rtx_hash_add (hashcode, x);
2791 return x;
2793 #endif