1 /* Subroutines for insn-output.c for VAX.
2 Copyright (C) 1987, 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002,
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
25 #include "coretypes.h"
30 #include "hard-reg-set.h"
32 #include "insn-config.h"
33 #include "conditions.h"
36 #include "insn-attr.h"
45 #include "target-def.h"
47 static void vax_output_function_prologue (FILE *, HOST_WIDE_INT
);
48 static void vax_file_start (void);
49 static void vax_init_libfuncs (void);
50 static void vax_output_mi_thunk (FILE *, tree
, HOST_WIDE_INT
,
52 static int vax_address_cost_1 (rtx
);
53 static int vax_address_cost (rtx
);
54 static bool vax_rtx_costs (rtx
, int, int, int *);
55 static rtx
vax_struct_value_rtx (tree
, int);
57 /* Initialize the GCC target structure. */
58 #undef TARGET_ASM_ALIGNED_HI_OP
59 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
61 #undef TARGET_ASM_FUNCTION_PROLOGUE
62 #define TARGET_ASM_FUNCTION_PROLOGUE vax_output_function_prologue
64 #undef TARGET_ASM_FILE_START
65 #define TARGET_ASM_FILE_START vax_file_start
66 #undef TARGET_ASM_FILE_START_APP_OFF
67 #define TARGET_ASM_FILE_START_APP_OFF true
69 #undef TARGET_INIT_LIBFUNCS
70 #define TARGET_INIT_LIBFUNCS vax_init_libfuncs
72 #undef TARGET_ASM_OUTPUT_MI_THUNK
73 #define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
74 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
75 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
77 #undef TARGET_DEFAULT_TARGET_FLAGS
78 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
80 #undef TARGET_RTX_COSTS
81 #define TARGET_RTX_COSTS vax_rtx_costs
82 #undef TARGET_ADDRESS_COST
83 #define TARGET_ADDRESS_COST vax_address_cost
85 #undef TARGET_PROMOTE_PROTOTYPES
86 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
88 #undef TARGET_STRUCT_VALUE_RTX
89 #define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
91 struct gcc_target targetm
= TARGET_INITIALIZER
;
93 /* Set global variables as needed for the options enabled. */
96 override_options (void)
98 /* We're VAX floating point, not IEEE floating point. */
100 REAL_MODE_FORMAT (DFmode
) = &vax_g_format
;
103 /* Generate the assembly code for function entry. FILE is a stdio
104 stream to output the code to. SIZE is an int: how many units of
105 temporary storage to allocate.
107 Refer to the array `regs_ever_live' to determine which registers to
108 save; `regs_ever_live[I]' is nonzero if register number I is ever
109 used in the function. This function is responsible for knowing
110 which registers should not be saved even if used. */
113 vax_output_function_prologue (FILE * file
, HOST_WIDE_INT size
)
118 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
119 if (regs_ever_live
[regno
] && !call_used_regs
[regno
])
122 fprintf (file
, "\t.word 0x%x\n", mask
);
124 if (dwarf2out_do_frame ())
126 const char *label
= dwarf2out_cfi_label ();
129 for (regno
= FIRST_PSEUDO_REGISTER
-1; regno
>= 0; --regno
)
130 if (regs_ever_live
[regno
] && !call_used_regs
[regno
])
131 dwarf2out_reg_save (label
, regno
, offset
-= 4);
133 dwarf2out_reg_save (label
, PC_REGNUM
, offset
-= 4);
134 dwarf2out_reg_save (label
, FRAME_POINTER_REGNUM
, offset
-= 4);
135 dwarf2out_reg_save (label
, ARG_POINTER_REGNUM
, offset
-= 4);
136 dwarf2out_def_cfa (label
, FRAME_POINTER_REGNUM
, -(offset
- 4));
139 size
-= STARTING_FRAME_OFFSET
;
141 asm_fprintf (file
, "\tmovab %wd(%Rsp),%Rsp\n", -size
);
143 asm_fprintf (file
, "\tsubl2 $%wd,%Rsp\n", size
);
146 /* When debugging with stabs, we want to output an extra dummy label
147 so that gas can distinguish between D_float and G_float prior to
148 processing the .stabs directive identifying type double. */
150 vax_file_start (void)
152 default_file_start ();
154 if (write_symbols
== DBX_DEBUG
)
155 fprintf (asm_out_file
, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR
);
158 /* We can use the BSD C library routines for the libgcc calls that are
159 still generated, since that's what they boil down to anyways. When
160 ELF, avoid the user's namespace. */
163 vax_init_libfuncs (void)
165 set_optab_libfunc (udiv_optab
, SImode
, TARGET_ELF
? "*__udiv" : "*udiv");
166 set_optab_libfunc (umod_optab
, SImode
, TARGET_ELF
? "*__urem" : "*urem");
169 /* This is like nonimmediate_operand with a restriction on the type of MEM. */
172 split_quadword_operands (rtx insn
, enum rtx_code code
, rtx
* operands
,
177 for (i
= 0; i
< n
; i
++)
180 for (i
= 0; i
< n
; i
++)
182 if (MEM_P (operands
[i
])
183 && (GET_CODE (XEXP (operands
[i
], 0)) == PRE_DEC
184 || GET_CODE (XEXP (operands
[i
], 0)) == POST_INC
))
186 rtx addr
= XEXP (operands
[i
], 0);
187 operands
[i
] = low
[i
] = gen_rtx_MEM (SImode
, addr
);
189 else if (optimize_size
&& MEM_P (operands
[i
])
190 && REG_P (XEXP (operands
[i
], 0))
191 && (code
!= MINUS
|| operands
[1] != const0_rtx
)
192 && find_regno_note (insn
, REG_DEAD
,
193 REGNO (XEXP (operands
[i
], 0))))
195 low
[i
] = gen_rtx_MEM (SImode
,
196 gen_rtx_POST_INC (Pmode
,
197 XEXP (operands
[i
], 0)));
198 operands
[i
] = gen_rtx_MEM (SImode
, XEXP (operands
[i
], 0));
202 low
[i
] = operand_subword (operands
[i
], 0, 0, DImode
);
203 operands
[i
] = operand_subword (operands
[i
], 1, 0, DImode
);
209 register_name (rtx reg
)
213 if (regno
>= FIRST_PSEUDO_REGISTER
)
214 regno
= reg_renumber
[regno
];
215 gcc_assert (regno
>= 0);
216 return reg_names
[regno
];
220 print_operand_address (FILE * file
, rtx addr
)
223 rtx reg1
, breg
, ireg
;
227 switch (GET_CODE (addr
))
231 addr
= XEXP (addr
, 0);
235 fprintf (file
, "(%s)", register_name (addr
));
239 fprintf (file
, "-(%s)", register_name (XEXP (addr
, 0)));
243 fprintf (file
, "(%s)+", register_name (XEXP (addr
, 0)));
247 /* There can be either two or three things added here. One must be a
248 REG. One can be either a REG or a MULT of a REG and an appropriate
249 constant, and the third can only be a constant or a MEM.
251 We get these two or three things and put the constant or MEM in
252 OFFSET, the MULT or REG in IREG, and the REG in BREG. If we have
253 a register and can't tell yet if it is a base or index register,
256 reg1
= 0; ireg
= 0; breg
= 0; offset
= 0;
258 if (CONSTANT_ADDRESS_P (XEXP (addr
, 0))
259 || MEM_P (XEXP (addr
, 0)))
261 offset
= XEXP (addr
, 0);
262 addr
= XEXP (addr
, 1);
264 else if (CONSTANT_ADDRESS_P (XEXP (addr
, 1))
265 || MEM_P (XEXP (addr
, 1)))
267 offset
= XEXP (addr
, 1);
268 addr
= XEXP (addr
, 0);
270 else if (GET_CODE (XEXP (addr
, 1)) == MULT
)
272 ireg
= XEXP (addr
, 1);
273 addr
= XEXP (addr
, 0);
275 else if (GET_CODE (XEXP (addr
, 0)) == MULT
)
277 ireg
= XEXP (addr
, 0);
278 addr
= XEXP (addr
, 1);
280 else if (REG_P (XEXP (addr
, 1)))
282 reg1
= XEXP (addr
, 1);
283 addr
= XEXP (addr
, 0);
285 else if (REG_P (XEXP (addr
, 0)))
287 reg1
= XEXP (addr
, 0);
288 addr
= XEXP (addr
, 1);
300 else if (GET_CODE (addr
) == MULT
)
304 gcc_assert (GET_CODE (addr
) == PLUS
);
305 if (CONSTANT_ADDRESS_P (XEXP (addr
, 0))
306 || MEM_P (XEXP (addr
, 0)))
310 if (CONST_INT_P (offset
))
311 offset
= plus_constant (XEXP (addr
, 0), INTVAL (offset
));
314 gcc_assert (CONST_INT_P (XEXP (addr
, 0)));
315 offset
= plus_constant (offset
, INTVAL (XEXP (addr
, 0)));
318 offset
= XEXP (addr
, 0);
320 else if (REG_P (XEXP (addr
, 0)))
323 ireg
= reg1
, breg
= XEXP (addr
, 0), reg1
= 0;
325 reg1
= XEXP (addr
, 0);
329 gcc_assert (GET_CODE (XEXP (addr
, 0)) == MULT
);
331 ireg
= XEXP (addr
, 0);
334 if (CONSTANT_ADDRESS_P (XEXP (addr
, 1))
335 || MEM_P (XEXP (addr
, 1)))
339 if (CONST_INT_P (offset
))
340 offset
= plus_constant (XEXP (addr
, 1), INTVAL (offset
));
343 gcc_assert (CONST_INT_P (XEXP (addr
, 1)));
344 offset
= plus_constant (offset
, INTVAL (XEXP (addr
, 1)));
347 offset
= XEXP (addr
, 1);
349 else if (REG_P (XEXP (addr
, 1)))
352 ireg
= reg1
, breg
= XEXP (addr
, 1), reg1
= 0;
354 reg1
= XEXP (addr
, 1);
358 gcc_assert (GET_CODE (XEXP (addr
, 1)) == MULT
);
360 ireg
= XEXP (addr
, 1);
364 /* If REG1 is nonzero, figure out if it is a base or index register. */
368 || (flag_pic
&& GET_CODE (addr
) == SYMBOL_REF
)
371 || (flag_pic
&& symbolic_operand (offset
, SImode
)))))
382 if (flag_pic
&& symbolic_operand (offset
, SImode
))
387 output_operand_lossage ("symbol used with both base and indexed registers");
390 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
391 if (flag_pic
> 1 && GET_CODE (offset
) == CONST
392 && GET_CODE (XEXP (XEXP (offset
, 0), 0)) == SYMBOL_REF
393 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset
, 0), 0)))
396 output_operand_lossage ("symbol with offset used in PIC mode");
400 /* symbol(reg) isn't PIC, but symbol[reg] is. */
409 output_address (offset
);
413 fprintf (file
, "(%s)", register_name (breg
));
417 if (GET_CODE (ireg
) == MULT
)
418 ireg
= XEXP (ireg
, 0);
419 gcc_assert (REG_P (ireg
));
420 fprintf (file
, "[%s]", register_name (ireg
));
425 output_addr_const (file
, addr
);
430 print_operand (FILE *file
, rtx x
, int code
)
433 fputc (ASM_DOUBLE_CHAR
, file
);
434 else if (code
== '|')
435 fputs (REGISTER_PREFIX
, file
);
436 else if (code
== 'C')
437 fputs (rev_cond_name (x
), file
);
438 else if (code
== 'D' && CONST_INT_P (x
) && INTVAL (x
) < 0)
439 fprintf (file
, "$" NEG_HWI_PRINT_HEX16
, INTVAL (x
));
440 else if (code
== 'P' && CONST_INT_P (x
))
441 fprintf (file
, "$" HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) + 1);
442 else if (code
== 'N' && CONST_INT_P (x
))
443 fprintf (file
, "$" HOST_WIDE_INT_PRINT_DEC
, ~ INTVAL (x
));
444 /* rotl instruction cannot deal with negative arguments. */
445 else if (code
== 'R' && CONST_INT_P (x
))
446 fprintf (file
, "$" HOST_WIDE_INT_PRINT_DEC
, 32 - INTVAL (x
));
447 else if (code
== 'H' && CONST_INT_P (x
))
448 fprintf (file
, "$%d", (int) (0xffff & ~ INTVAL (x
)));
449 else if (code
== 'h' && CONST_INT_P (x
))
450 fprintf (file
, "$%d", (short) - INTVAL (x
));
451 else if (code
== 'B' && CONST_INT_P (x
))
452 fprintf (file
, "$%d", (int) (0xff & ~ INTVAL (x
)));
453 else if (code
== 'b' && CONST_INT_P (x
))
454 fprintf (file
, "$%d", (int) (0xff & - INTVAL (x
)));
455 else if (code
== 'M' && CONST_INT_P (x
))
456 fprintf (file
, "$%d", ~((1 << INTVAL (x
)) - 1));
458 fprintf (file
, "%s", register_name (x
));
460 output_address (XEXP (x
, 0));
461 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
464 real_to_decimal (dstr
, CONST_DOUBLE_REAL_VALUE (x
),
465 sizeof (dstr
), 0, 1);
466 fprintf (file
, "$0f%s", dstr
);
468 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
471 real_to_decimal (dstr
, CONST_DOUBLE_REAL_VALUE (x
),
472 sizeof (dstr
), 0, 1);
473 fprintf (file
, "$0%c%s", ASM_DOUBLE_CHAR
, dstr
);
477 if (flag_pic
> 1 && symbolic_operand (x
, SImode
))
480 output_operand_lossage ("symbol used as immediate operand");
483 output_addr_const (file
, x
);
488 rev_cond_name (rtx op
)
490 switch (GET_CODE (op
))
519 vax_float_literal(rtx c
)
521 enum machine_mode mode
;
522 REAL_VALUE_TYPE r
, s
;
525 if (GET_CODE (c
) != CONST_DOUBLE
)
530 if (c
== const_tiny_rtx
[(int) mode
][0]
531 || c
== const_tiny_rtx
[(int) mode
][1]
532 || c
== const_tiny_rtx
[(int) mode
][2])
535 REAL_VALUE_FROM_CONST_DOUBLE (r
, c
);
537 for (i
= 0; i
< 7; i
++)
541 REAL_VALUE_FROM_INT (s
, x
, 0, mode
);
543 if (REAL_VALUES_EQUAL (r
, s
))
545 ok
= exact_real_inverse (mode
, &s
);
547 if (REAL_VALUES_EQUAL (r
, s
))
554 /* Return the cost in cycles of a memory address, relative to register
557 Each of the following adds the indicated number of cycles:
561 1 - indexing and/or offset(register)
566 vax_address_cost_1 (rtx addr
)
568 int reg
= 0, indexed
= 0, indir
= 0, offset
= 0, predec
= 0;
569 rtx plus_op0
= 0, plus_op1
= 0;
571 switch (GET_CODE (addr
))
581 indexed
= 1; /* 2 on VAX 2 */
584 /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
586 offset
= (unsigned HOST_WIDE_INT
)(INTVAL(addr
)+128) > 256;
590 offset
= 1; /* 2 on VAX 2 */
592 case LABEL_REF
: /* this is probably a byte offset from the pc */
598 plus_op1
= XEXP (addr
, 0);
600 plus_op0
= XEXP (addr
, 0);
601 addr
= XEXP (addr
, 1);
604 indir
= 2; /* 3 on VAX 2 */
605 addr
= XEXP (addr
, 0);
611 /* Up to 3 things can be added in an address. They are stored in
612 plus_op0, plus_op1, and addr. */
626 /* Indexing and register+offset can both be used (except on a VAX 2)
627 without increasing execution time over either one alone. */
628 if (reg
&& indexed
&& offset
)
629 return reg
+ indir
+ offset
+ predec
;
630 return reg
+ indexed
+ indir
+ offset
+ predec
;
634 vax_address_cost (rtx x
)
636 return (1 + (REG_P (x
) ? 0 : vax_address_cost_1 (x
)));
639 /* Cost of an expression on a VAX. This version has costs tuned for the
640 CVAX chip (found in the VAX 3 series) with comments for variations on
643 FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
644 and FLOAT_TRUNCATE. We need a -mcpu option to allow provision of
645 costs on a per cpu basis. */
648 vax_rtx_costs (rtx x
, int code
, int outer_code
, int *total
)
650 enum machine_mode mode
= GET_MODE (x
);
651 int i
= 0; /* may be modified in switch */
652 const char *fmt
= GET_RTX_FORMAT (code
); /* may be modified in switch */
656 /* On a VAX, constants from 0..63 are cheap because they can use the
657 1 byte literal constant format. Compare to -1 should be made cheap
658 so that decrement-and-branch insns can be formed more easily (if
659 the value -1 is copied to a register some decrement-and-branch
660 patterns will not match). */
667 if (outer_code
== AND
)
669 *total
= ((unsigned HOST_WIDE_INT
) ~INTVAL (x
) <= 077) ? 1 : 2;
672 if ((unsigned HOST_WIDE_INT
) INTVAL (x
) <= 077
673 || (outer_code
== COMPARE
675 || ((outer_code
== PLUS
|| outer_code
== MINUS
)
676 && (unsigned HOST_WIDE_INT
) -INTVAL (x
) <= 077))
690 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
691 *total
= vax_float_literal (x
) ? 5 : 8;
693 *total
= ((CONST_DOUBLE_HIGH (x
) == 0
694 && (unsigned HOST_WIDE_INT
) CONST_DOUBLE_LOW (x
) < 64)
695 || (outer_code
== PLUS
696 && CONST_DOUBLE_HIGH (x
) == -1
697 && (unsigned HOST_WIDE_INT
)-CONST_DOUBLE_LOW (x
) < 64))
703 return true; /* Implies register operand. */
707 return true; /* Implies register operand. */
713 *total
= 16; /* 4 on VAX 9000 */
716 *total
= 9; /* 4 on VAX 9000, 12 on VAX 2 */
719 *total
= 16; /* 6 on VAX 9000, 28 on VAX 2 */
724 *total
= 10; /* 3-4 on VAX 9000, 20-28 on VAX 2 */
727 *total
= MAX_COST
; /* Mode is not supported. */
735 *total
= MAX_COST
; /* Mode is not supported. */
743 *total
= 30; /* Highly variable. */
744 else if (mode
== DFmode
)
745 /* divide takes 28 cycles if the result is not zero, 13 otherwise */
748 *total
= 11; /* 25 on VAX 2 */
758 *total
= MAX_COST
; /* Mode is not supported. */
765 *total
= (6 /* 4 on VAX 9000 */
766 + (mode
== DFmode
) + (GET_MODE (XEXP (x
, 0)) != SImode
));
770 *total
= 7; /* 17 on VAX 2 */
779 *total
= 10; /* 6 on VAX 9000 */
784 *total
= 6; /* 5 on VAX 2, 4 on VAX 9000 */
785 if (CONST_INT_P (XEXP (x
, 1)))
786 fmt
= "e"; /* all constant rotate counts are short */
791 *total
= (mode
== DFmode
) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
792 /* Small integer operands can use subl2 and addl2. */
793 if ((CONST_INT_P (XEXP (x
, 1)))
794 && (unsigned HOST_WIDE_INT
)(INTVAL (XEXP (x
, 1)) + 63) < 127)
804 /* AND is special because the first operand is complemented. */
806 if (CONST_INT_P (XEXP (x
, 0)))
808 if ((unsigned HOST_WIDE_INT
)~INTVAL (XEXP (x
, 0)) > 63)
818 else if (mode
== SFmode
)
820 else if (mode
== DImode
)
836 if (mode
== DImode
|| mode
== DFmode
)
837 *total
= 5; /* 7 on VAX 2 */
839 *total
= 3; /* 4 on VAX 2 */
841 if (!REG_P (x
) && GET_CODE (x
) != POST_INC
)
842 *total
+= vax_address_cost_1 (x
);
848 *total
= 3; /* FIXME: Costs need to be checked */
855 /* Now look inside the expression. Operands which are not registers or
856 short constants add to the cost.
858 FMT and I may have been adjusted in the switch above for instructions
859 which require special handling. */
861 while (*fmt
++ == 'e')
863 rtx op
= XEXP (x
, i
);
866 code
= GET_CODE (op
);
868 /* A NOT is likely to be found as the first operand of an AND
869 (in which case the relevant cost is of the operand inside
870 the not) and not likely to be found anywhere else. */
872 op
= XEXP (op
, 0), code
= GET_CODE (op
);
877 if ((unsigned HOST_WIDE_INT
)INTVAL (op
) > 63
878 && GET_MODE (x
) != QImode
)
879 *total
+= 1; /* 2 on VAX 2 */
884 *total
+= 1; /* 2 on VAX 2 */
887 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
)
889 /* Registers are faster than floating point constants -- even
890 those constants which can be encoded in a single byte. */
891 if (vax_float_literal (op
))
894 *total
+= (GET_MODE (x
) == DFmode
) ? 3 : 2;
898 if (CONST_DOUBLE_HIGH (op
) != 0
899 || (unsigned HOST_WIDE_INT
)CONST_DOUBLE_LOW (op
) > 63)
904 *total
+= 1; /* 2 on VAX 2 */
905 if (!REG_P (XEXP (op
, 0)))
906 *total
+= vax_address_cost_1 (XEXP (op
, 0));
919 /* Output code to add DELTA to the first argument, and then jump to FUNCTION.
920 Used for C++ multiple inheritance.
921 .mask ^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11> #conservative entry mask
922 addl2 $DELTA, 4(ap) #adjust first argument
923 jmp FUNCTION+2 #jump beyond FUNCTION's entry mask
927 vax_output_mi_thunk (FILE * file
,
928 tree thunk ATTRIBUTE_UNUSED
,
930 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED
,
933 fprintf (file
, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC
, delta
);
934 asm_fprintf (file
, ",4(%Rap)\n");
935 fprintf (file
, "\tjmp ");
936 assemble_name (file
, XSTR (XEXP (DECL_RTL (function
), 0), 0));
937 fprintf (file
, "+2\n");
941 vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED
,
942 int incoming ATTRIBUTE_UNUSED
)
944 return gen_rtx_REG (Pmode
, VAX_STRUCT_VALUE_REGNUM
);
947 /* Worker function for NOTICE_UPDATE_CC. */
950 vax_notice_update_cc (rtx exp
, rtx insn ATTRIBUTE_UNUSED
)
952 if (GET_CODE (exp
) == SET
)
954 if (GET_CODE (SET_SRC (exp
)) == CALL
)
956 else if (GET_CODE (SET_DEST (exp
)) != ZERO_EXTRACT
957 && GET_CODE (SET_DEST (exp
)) != PC
)
960 /* The integer operations below don't set carry or
961 set it in an incompatible way. That's ok though
962 as the Z bit is all we need when doing unsigned
963 comparisons on the result of these insns (since
964 they're always with 0). Set CC_NO_OVERFLOW to
965 generate the correct unsigned branches. */
966 switch (GET_CODE (SET_SRC (exp
)))
969 if (GET_MODE_CLASS (GET_MODE (exp
)) == MODE_FLOAT
)
977 cc_status
.flags
= CC_NO_OVERFLOW
;
982 cc_status
.value1
= SET_DEST (exp
);
983 cc_status
.value2
= SET_SRC (exp
);
986 else if (GET_CODE (exp
) == PARALLEL
987 && GET_CODE (XVECEXP (exp
, 0, 0)) == SET
)
989 if (GET_CODE (SET_SRC (XVECEXP (exp
, 0, 0))) == CALL
)
991 else if (GET_CODE (SET_DEST (XVECEXP (exp
, 0, 0))) != PC
)
994 cc_status
.value1
= SET_DEST (XVECEXP (exp
, 0, 0));
995 cc_status
.value2
= SET_SRC (XVECEXP (exp
, 0, 0));
998 /* PARALLELs whose first element sets the PC are aob,
999 sob insns. They do change the cc's. */
1004 if (cc_status
.value1
&& REG_P (cc_status
.value1
)
1006 && reg_overlap_mentioned_p (cc_status
.value1
, cc_status
.value2
))
1007 cc_status
.value2
= 0;
1008 if (cc_status
.value1
&& MEM_P (cc_status
.value1
)
1010 && MEM_P (cc_status
.value2
))
1011 cc_status
.value2
= 0;
1012 /* Actual condition, one line up, should be that value2's address
1013 depends on value1, but that is too much of a pain. */
1016 /* Output integer move instructions. */
1019 vax_output_int_move (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
,
1020 enum machine_mode mode
)
1023 const char *pattern_hi
, *pattern_lo
;
1028 if (operands
[1] == const0_rtx
)
1030 if (TARGET_QMATH
&& optimize_size
1031 && (CONST_INT_P (operands
[1])
1032 || GET_CODE (operands
[1]) == CONST_DOUBLE
))
1034 unsigned HOST_WIDE_INT hval
, lval
;
1037 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1039 gcc_assert (HOST_BITS_PER_WIDE_INT
!= 64);
1041 /* Make sure only the low 32 bits are valid. */
1042 lval
= CONST_DOUBLE_LOW (operands
[1]) & 0xffffffff;
1043 hval
= CONST_DOUBLE_HIGH (operands
[1]) & 0xffffffff;
1047 lval
= INTVAL (operands
[1]);
1051 /* Here we see if we are trying to see if the 64bit value is really
1052 a 6bit shifted some arbitrary amount. If so, we can use ashq to
1053 shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1054 8 bytes - 1 shift byte - 1 short literal byte. */
1056 && (n
= exact_log2 (lval
& (- lval
))) != -1
1057 && (lval
>> n
) < 64)
1061 #if HOST_BITS_PER_WIDE_INT == 32
1062 /* On 32bit platforms, if the 6bits didn't overflow into the
1063 upper 32bit value that value better be 0. If we have
1064 overflowed, make sure it wasn't too much. */
1067 if (n
<= 26 || hval
>= ((unsigned)1 << (n
- 26)))
1068 n
= 0; /* failure */
1070 lval
|= hval
<< (32 - n
);
1073 /* If n is 0, then ashq is not the best way to emit this. */
1076 operands
[1] = GEN_INT (lval
);
1077 operands
[2] = GEN_INT (n
);
1078 return "ashq %2,%1,%0";
1080 #if HOST_BITS_PER_WIDE_INT == 32
1082 /* On 32bit platforms, if the low 32bit value is 0, checkout the
1083 upper 32bit value. */
1085 && (n
= exact_log2 (hval
& (- hval
)) - 1) != -1
1086 && (hval
>> n
) < 64)
1088 operands
[1] = GEN_INT (hval
>> n
);
1089 operands
[2] = GEN_INT (n
+ 32);
1090 return "ashq %2,%1,%0";
1096 && (!MEM_P (operands
[0])
1097 || GET_CODE (XEXP (operands
[0], 0)) == PRE_DEC
1098 || GET_CODE (XEXP (operands
[0], 0)) == POST_INC
1099 || !illegal_addsub_di_memory_operand (operands
[0], DImode
))
1100 && ((GET_CODE (operands
[1]) == CONST_INT
1101 && (unsigned HOST_WIDE_INT
) INTVAL (operands
[1]) >= 64)
1102 || GET_CODE (operands
[1]) == CONST_DOUBLE
))
1104 hi
[0] = operands
[0];
1105 hi
[1] = operands
[1];
1107 split_quadword_operands(insn
, SET
, hi
, lo
, 2);
1109 pattern_lo
= vax_output_int_move (NULL
, lo
, SImode
);
1110 pattern_hi
= vax_output_int_move (NULL
, hi
, SImode
);
1112 /* The patterns are just movl/movl or pushl/pushl then a movq will
1113 be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1114 bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1116 if ((!strncmp (pattern_lo
, "movl", 4)
1117 && !strncmp (pattern_hi
, "movl", 4))
1118 || (!strncmp (pattern_lo
, "pushl", 5)
1119 && !strncmp (pattern_hi
, "pushl", 5)))
1120 return "movq %1,%0";
1122 if (MEM_P (operands
[0])
1123 && GET_CODE (XEXP (operands
[0], 0)) == PRE_DEC
)
1125 output_asm_insn (vax_output_int_move (NULL
, hi
, SImode
), hi
);
1126 operands
[0] = lo
[0];
1127 operands
[1] = lo
[1];
1128 operands
[2] = lo
[2];
1133 output_asm_insn (vax_output_int_move (NULL
, lo
, SImode
), lo
);
1134 operands
[0] = hi
[0];
1135 operands
[1] = hi
[1];
1136 operands
[2] = hi
[2];
1140 return "movq %1,%0";
1143 if (symbolic_operand (operands
[1], SImode
))
1145 if (push_operand (operands
[0], SImode
))
1146 return "pushab %a1";
1147 return "movab %a1,%0";
1150 if (operands
[1] == const0_rtx
)
1152 if (push_operand (operands
[1], SImode
))
1157 if (CONST_INT_P (operands
[1])
1158 && (unsigned HOST_WIDE_INT
) INTVAL (operands
[1]) >= 64)
1160 HOST_WIDE_INT i
= INTVAL (operands
[1]);
1162 if ((unsigned HOST_WIDE_INT
)(~i
) < 64)
1163 return "mcoml %N1,%0";
1164 if ((unsigned HOST_WIDE_INT
)i
< 0x100)
1165 return "movzbl %1,%0";
1166 if (i
>= -0x80 && i
< 0)
1167 return "cvtbl %1,%0";
1169 && (n
= exact_log2 (i
& (-i
))) != -1
1170 && ((unsigned HOST_WIDE_INT
)i
>> n
) < 64)
1172 operands
[1] = GEN_INT ((unsigned HOST_WIDE_INT
)i
>> n
);
1173 operands
[2] = GEN_INT (n
);
1174 return "ashl %2,%1,%0";
1176 if ((unsigned HOST_WIDE_INT
)i
< 0x10000)
1177 return "movzwl %1,%0";
1178 if (i
>= -0x8000 && i
< 0)
1179 return "cvtwl %1,%0";
1181 if (push_operand (operands
[0], SImode
))
1183 return "movl %1,%0";
1186 if (CONST_INT_P (operands
[1]))
1188 HOST_WIDE_INT i
= INTVAL (operands
[1]);
1191 else if ((unsigned HOST_WIDE_INT
)i
< 64)
1192 return "movw %1,%0";
1193 else if ((unsigned HOST_WIDE_INT
)~i
< 64)
1194 return "mcomw %H1,%0";
1195 else if ((unsigned HOST_WIDE_INT
)i
< 256)
1196 return "movzbw %1,%0";
1197 else if (i
>= -0x80 && i
< 0)
1198 return "cvtbw %1,%0";
1200 return "movw %1,%0";
1203 if (CONST_INT_P (operands
[1]))
1205 HOST_WIDE_INT i
= INTVAL (operands
[1]);
1208 else if ((unsigned HOST_WIDE_INT
)~i
< 64)
1209 return "mcomb %B1,%0";
1211 return "movb %1,%0";
1218 /* Output integer add instructions.
1220 The space-time-opcode tradeoffs for addition vary by model of VAX.
1222 On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1223 but it not faster on other models.
1225 "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1226 faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1227 a register is used in an address too soon after it is set.
1228 Compromise by using movab only when it is shorter than the add
1229 or the base register in the address is one of sp, ap, and fp,
1230 which are not modified very often. */
1233 vax_output_int_add (rtx insn
, rtx
*operands
, enum machine_mode mode
)
1240 const char *pattern
;
1244 if (TARGET_QMATH
&& 0)
1247 split_quadword_operands (insn
, PLUS
, operands
, low
, 3);
1251 gcc_assert (rtx_equal_p (operands
[0], operands
[1]));
1252 #ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1253 gcc_assert (!flag_pic
|| !external_memory_operand (low
[2], SImode
));
1254 gcc_assert (!flag_pic
|| !external_memory_operand (low
[0], SImode
));
1257 /* No reason to add a 0 to the low part and thus no carry, so just
1258 emit the appropriate add/sub instruction. */
1259 if (low
[2] == const0_rtx
)
1260 return vax_output_int_add (NULL
, operands
, SImode
);
1262 /* Are we doing addition or subtraction? */
1263 sub
= CONST_INT_P (operands
[2]) && INTVAL (operands
[2]) < 0;
1265 /* We can't use vax_output_int_add since some the patterns don't
1266 modify the carry bit. */
1269 if (low
[2] == constm1_rtx
)
1270 pattern
= "decl %0";
1272 pattern
= "subl2 $%n2,%0";
1276 if (low
[2] == const1_rtx
)
1277 pattern
= "incl %0";
1279 pattern
= "addl2 %2,%0";
1281 output_asm_insn (pattern
, low
);
1283 /* In 2's complement, -n = ~n + 1. Since we are dealing with
1284 two 32bit parts, we complement each and then add one to
1285 low part. We know that the low part can't overflow since
1286 it's value can never be 0. */
1288 return "sbwc %N2,%0";
1289 return "adwc %2,%0";
1292 /* Add low parts. */
1293 if (rtx_equal_p (operands
[0], operands
[1]))
1295 if (low
[2] == const0_rtx
)
1296 /* Should examine operand, punt if not POST_INC. */
1297 pattern
= "tstl %0", carry
= 0;
1298 else if (low
[2] == const1_rtx
)
1299 pattern
= "incl %0";
1301 pattern
= "addl2 %2,%0";
1305 if (low
[2] == const0_rtx
)
1306 pattern
= "movl %1,%0", carry
= 0;
1308 pattern
= "addl3 %2,%1,%0";
1311 output_asm_insn (pattern
, low
);
1313 /* If CARRY is 0, we don't have any carry value to worry about. */
1314 return get_insn_template (CODE_FOR_addsi3
, insn
);
1315 /* %0 = C + %1 + %2 */
1316 if (!rtx_equal_p (operands
[0], operands
[1]))
1317 output_asm_insn ((operands
[1] == const0_rtx
1319 : "movl %1,%0"), operands
);
1320 return "adwc %2,%0";
1324 if (rtx_equal_p (operands
[0], operands
[1]))
1326 if (operands
[2] == const1_rtx
)
1328 if (operands
[2] == constm1_rtx
)
1330 if (CONST_INT_P (operands
[2])
1331 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1332 return "subl2 $%n2,%0";
1333 if (CONST_INT_P (operands
[2])
1334 && (unsigned HOST_WIDE_INT
) INTVAL (operands
[2]) >= 64
1335 && REG_P (operands
[1])
1336 && ((INTVAL (operands
[2]) < 32767 && INTVAL (operands
[2]) > -32768)
1337 || REGNO (operands
[1]) > 11))
1338 return "movab %c2(%1),%0";
1339 if (REG_P (operands
[0]) && symbolic_operand (operands
[2], SImode
))
1340 return "movab %a2[%0],%0";
1341 return "addl2 %2,%0";
1344 if (rtx_equal_p (operands
[0], operands
[2]))
1346 if (REG_P (operands
[0]) && symbolic_operand (operands
[1], SImode
))
1347 return "movab %a1[%0],%0";
1348 return "addl2 %1,%0";
1351 if (CONST_INT_P (operands
[2])
1352 && INTVAL (operands
[2]) < 32767
1353 && INTVAL (operands
[2]) > -32768
1354 && REG_P (operands
[1])
1355 && push_operand (operands
[0], SImode
))
1356 return "pushab %c2(%1)";
1358 if (CONST_INT_P (operands
[2])
1359 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1360 return "subl3 $%n2,%1,%0";
1362 if (CONST_INT_P (operands
[2])
1363 && (unsigned HOST_WIDE_INT
) INTVAL (operands
[2]) >= 64
1364 && REG_P (operands
[1])
1365 && ((INTVAL (operands
[2]) < 32767 && INTVAL (operands
[2]) > -32768)
1366 || REGNO (operands
[1]) > 11))
1367 return "movab %c2(%1),%0";
1369 /* Add this if using gcc on a VAX 3xxx:
1370 if (REG_P (operands[1]) && REG_P (operands[2]))
1371 return "movab (%1)[%2],%0";
1374 if (REG_P (operands
[1]) && symbolic_operand (operands
[2], SImode
))
1376 if (push_operand (operands
[0], SImode
))
1377 return "pushab %a2[%1]";
1378 return "movab %a2[%1],%0";
1381 if (REG_P (operands
[2]) && symbolic_operand (operands
[1], SImode
))
1383 if (push_operand (operands
[0], SImode
))
1384 return "pushab %a1[%2]";
1385 return "movab %a1[%2],%0";
1388 if (flag_pic
&& REG_P (operands
[0])
1389 && symbolic_operand (operands
[2], SImode
))
1390 return "movab %a2,%0;addl2 %1,%0";
1393 && (symbolic_operand (operands
[1], SImode
)
1394 || symbolic_operand (operands
[1], SImode
)))
1397 return "addl3 %1,%2,%0";
1400 if (rtx_equal_p (operands
[0], operands
[1]))
1402 if (operands
[2] == const1_rtx
)
1404 if (operands
[2] == constm1_rtx
)
1406 if (CONST_INT_P (operands
[2])
1407 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1408 return "subw2 $%n2,%0";
1409 return "addw2 %2,%0";
1411 if (rtx_equal_p (operands
[0], operands
[2]))
1412 return "addw2 %1,%0";
1413 if (CONST_INT_P (operands
[2])
1414 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1415 return "subw3 $%n2,%1,%0";
1416 return "addw3 %1,%2,%0";
1419 if (rtx_equal_p (operands
[0], operands
[1]))
1421 if (operands
[2] == const1_rtx
)
1423 if (operands
[2] == constm1_rtx
)
1425 if (CONST_INT_P (operands
[2])
1426 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1427 return "subb2 $%n2,%0";
1428 return "addb2 %2,%0";
1430 if (rtx_equal_p (operands
[0], operands
[2]))
1431 return "addb2 %1,%0";
1432 if (CONST_INT_P (operands
[2])
1433 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1434 return "subb3 $%n2,%1,%0";
1435 return "addb3 %1,%2,%0";
1443 vax_output_int_subtract (rtx insn
, rtx
*operands
, enum machine_mode mode
)
1450 const char *pattern
;
1453 if (TARGET_QMATH
&& 0)
1456 split_quadword_operands (insn
, MINUS
, operands
, low
, 3);
1460 if (operands
[1] == const0_rtx
&& low
[1] == const0_rtx
)
1462 /* Negation is tricky. It's basically complement and increment.
1463 Negate hi, then lo, and subtract the carry back. */
1464 if ((MEM_P (low
[0]) && GET_CODE (XEXP (low
[0], 0)) == POST_INC
)
1465 || (MEM_P (operands
[0])
1466 && GET_CODE (XEXP (operands
[0], 0)) == POST_INC
))
1467 fatal_insn ("illegal operand detected", insn
);
1468 output_asm_insn ("mnegl %2,%0", operands
);
1469 output_asm_insn ("mnegl %2,%0", low
);
1470 return "sbwc $0,%0";
1472 gcc_assert (rtx_equal_p (operands
[0], operands
[1]));
1473 gcc_assert (rtx_equal_p (low
[0], low
[1]));
1474 if (low
[2] == const1_rtx
)
1475 output_asm_insn ("decl %0", low
);
1477 output_asm_insn ("subl2 %2,%0", low
);
1478 return "sbwc %2,%0";
1481 /* Subtract low parts. */
1482 if (rtx_equal_p (operands
[0], operands
[1]))
1484 if (low
[2] == const0_rtx
)
1485 pattern
= 0, carry
= 0;
1486 else if (low
[2] == constm1_rtx
)
1487 pattern
= "decl %0";
1489 pattern
= "subl2 %2,%0";
1493 if (low
[2] == constm1_rtx
)
1494 pattern
= "decl %0";
1495 else if (low
[2] == const0_rtx
)
1496 pattern
= get_insn_template (CODE_FOR_movsi
, insn
), carry
= 0;
1498 pattern
= "subl3 %2,%1,%0";
1501 output_asm_insn (pattern
, low
);
1504 if (!rtx_equal_p (operands
[0], operands
[1]))
1505 return "movl %1,%0;sbwc %2,%0";
1506 return "sbwc %2,%0";
1507 /* %0 = %2 - %1 - C */
1509 return get_insn_template (CODE_FOR_subsi3
, insn
);
1517 /* Output a conditional branch. */
1519 vax_output_conditional_branch (enum rtx_code code
)
1523 case EQ
: return "jeql %l0";
1524 case NE
: return "jneq %l0";
1525 case GT
: return "jgtr %l0";
1526 case LT
: return "jlss %l0";
1527 case GTU
: return "jgtru %l0";
1528 case LTU
: return "jlssu %l0";
1529 case GE
: return "jgeq %l0";
1530 case LE
: return "jleq %l0";
1531 case GEU
: return "jgequ %l0";
1532 case LEU
: return "jlequ %l0";
1539 mkrtx(enum rtx_code code
, enum machine_mode mode
, rtx base
, HOST_WIDE_INT off
)
1543 if (GET_CODE (base
) == CONST
)
1544 base
= XEXP (base
, 0);
1546 if (GET_CODE (base
) == PLUS
)
1548 off
+= INTVAL (XEXP (base
, 1));
1549 base
= XEXP (base
, 0);
1551 if (code
== POST_INC
)
1552 tmp
= gen_rtx_POST_INC (SImode
, base
);
1553 else if (off
== 0 || (REG_P (base
) && code
== REG
))
1556 tmp
= plus_constant (base
, off
);
1557 return gen_rtx_MEM (mode
, tmp
);
1561 vax_output_movmemsi (rtx insn
, rtx
*operands
)
1563 HOST_WIDE_INT n
= INTVAL (operands
[2]);
1566 const char *pat
= NULL
;
1567 const enum rtx_code
*src_codes
;
1568 const enum rtx_code
*dest_codes
;
1572 static const enum machine_mode xmodes
[4] =
1574 QImode
, HImode
, SImode
, DImode
1576 static const char * const pats
[4] =
1578 "movb %1,%0", "movw %1,%0", "movl %1,%0", "movq %1,%0",
1580 static const enum rtx_code codes
[2][3] =
1582 { PLUS
, PLUS
, PLUS
},
1583 { POST_INC
, POST_INC
, REG
},
1586 src
= XEXP (operands
[1], 0);
1589 codes
[REG_P (src
) && find_regno_note (insn
, REG_DEAD
, REGNO(src
))];
1591 dest
= XEXP (operands
[0], 0);
1594 codes
[REG_P (dest
) && find_regno_note (insn
, REG_DEAD
, REGNO(dest
))];
1596 for (off
= 0, code_idx
= 0, mode_idx
= 3; mode_idx
>= 0; mode_idx
--)
1598 const enum machine_mode mode
= xmodes
[mode_idx
];
1599 const HOST_WIDE_INT mode_len
= GET_MODE_SIZE (mode
);
1600 for (; n
>= mode_len
; n
-= mode_len
, off
+= mode_len
)
1603 output_asm_insn (pat
, operands
);
1606 operands
[0] = mkrtx(dest_codes
[code_idx
], mode
, dest
, off
);
1607 operands
[1] = mkrtx(src_codes
[code_idx
], mode
, src
, off
);
1610 pat
= pats
[mode_idx
];
1617 /* 1 if X is an rtx for a constant that is a valid address. */
1620 legitimate_constant_address_p (rtx x
)
1622 if (GET_CODE (x
) == LABEL_REF
|| GET_CODE (x
) == SYMBOL_REF
1623 || CONST_INT_P (x
) || GET_CODE (x
) == HIGH
)
1625 if (GET_CODE (x
) != CONST
)
1627 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1629 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
1630 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x
, 0), 0)))
1636 /* Nonzero if the constant value X is a legitimate general operand.
1637 It is given that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
1640 legitimate_constant_p (rtx x ATTRIBUTE_UNUSED
)
1645 /* The other macros defined here are used only in legitimate_address_p (). */
1647 /* Nonzero if X is a hard reg that can be used as an index
1648 or, if not strict, if it is a pseudo reg. */
1649 #define INDEX_REGISTER_P(X, STRICT) \
1650 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1652 /* Nonzero if X is a hard reg that can be used as a base reg
1653 or, if not strict, if it is a pseudo reg. */
1654 #define BASE_REGISTER_P(X, STRICT) \
1655 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1657 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1659 /* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1660 are no SYMBOL_REFs for external symbols present. */
1663 indirectable_constant_address_p (rtx x
, bool indirect
)
1665 if (GET_CODE (x
) == SYMBOL_REF
)
1666 return !flag_pic
|| SYMBOL_REF_LOCAL_P (x
) || !indirect
;
1668 if (GET_CODE (x
) == CONST
)
1670 || GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
1671 || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x
, 0), 0));
1673 return CONSTANT_ADDRESS_P (x
);
1676 #else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1679 indirectable_constant_address_p (rtx x
, bool indirect ATTRIBUTE_UNUSED
)
1681 return CONSTANT_ADDRESS_P (x
);
1684 #endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1686 /* Nonzero if X is an address which can be indirected. External symbols
1687 could be in a sharable image library, so we disallow those. */
1690 indirectable_address_p(rtx x
, bool strict
, bool indirect
)
1692 if (indirectable_constant_address_p (x
, indirect
)
1693 || BASE_REGISTER_P (x
, strict
))
1695 if (GET_CODE (x
) != PLUS
1696 || !BASE_REGISTER_P (XEXP (x
, 0), strict
)
1697 || (flag_pic
&& !CONST_INT_P (XEXP (x
, 1))))
1699 return indirectable_constant_address_p (XEXP (x
, 1), indirect
);
1702 /* Return 1 if x is a valid address not using indexing.
1703 (This much is the easy part.) */
1705 nonindexed_address_p (rtx x
, bool strict
)
1710 extern rtx
*reg_equiv_mem
;
1711 if (! reload_in_progress
1712 || reg_equiv_mem
[REGNO (x
)] == 0
1713 || indirectable_address_p (reg_equiv_mem
[REGNO (x
)], strict
, false))
1716 if (indirectable_constant_address_p (x
, false))
1718 if (indirectable_address_p (x
, strict
, false))
1720 xfoo0
= XEXP (x
, 0);
1721 if (MEM_P (x
) && indirectable_address_p (xfoo0
, strict
, true))
1723 if ((GET_CODE (x
) == PRE_DEC
|| GET_CODE (x
) == POST_INC
)
1724 && BASE_REGISTER_P (xfoo0
, strict
))
1729 /* 1 if PROD is either a reg times size of mode MODE and MODE is less
1730 than or equal 8 bytes, or just a reg if MODE is one byte. */
1733 index_term_p (rtx prod
, enum machine_mode mode
, bool strict
)
1737 if (GET_MODE_SIZE (mode
) == 1)
1738 return BASE_REGISTER_P (prod
, strict
);
1740 if (GET_CODE (prod
) != MULT
|| GET_MODE_SIZE (mode
) > 8)
1743 xfoo0
= XEXP (prod
, 0);
1744 xfoo1
= XEXP (prod
, 1);
1746 if (CONST_INT_P (xfoo0
)
1747 && INTVAL (xfoo0
) == (int)GET_MODE_SIZE (mode
)
1748 && INDEX_REGISTER_P (xfoo1
, strict
))
1751 if (CONST_INT_P (xfoo1
)
1752 && INTVAL (xfoo1
) == (int)GET_MODE_SIZE (mode
)
1753 && INDEX_REGISTER_P (xfoo0
, strict
))
1759 /* Return 1 if X is the sum of a register
1760 and a valid index term for mode MODE. */
1762 reg_plus_index_p (rtx x
, enum machine_mode mode
, bool strict
)
1766 if (GET_CODE (x
) != PLUS
)
1769 xfoo0
= XEXP (x
, 0);
1770 xfoo1
= XEXP (x
, 1);
1772 if (BASE_REGISTER_P (xfoo0
, strict
) && index_term_p (xfoo1
, mode
, strict
))
1775 if (BASE_REGISTER_P (xfoo1
, strict
) && index_term_p (xfoo0
, mode
, strict
))
1781 /* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
1783 indexable_address_p (rtx xfoo0
, rtx xfoo1
, enum machine_mode mode
, bool strict
)
1785 if (!CONSTANT_ADDRESS_P (xfoo0
))
1787 if (BASE_REGISTER_P (xfoo1
, strict
))
1788 return !flag_pic
|| mode
== QImode
;
1789 if (flag_pic
&& symbolic_operand (xfoo0
, SImode
))
1791 return reg_plus_index_p (xfoo1
, mode
, strict
);
1794 /* legitimate_address_p returns 1 if it recognizes an RTL expression "x"
1795 that is a valid memory address for an instruction.
1796 The MODE argument is the machine mode for the MEM expression
1797 that wants to use this address. */
1799 legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
1803 if (nonindexed_address_p (x
, strict
))
1806 if (GET_CODE (x
) != PLUS
)
1809 /* Handle <address>[index] represented with index-sum outermost */
1811 xfoo0
= XEXP (x
, 0);
1812 xfoo1
= XEXP (x
, 1);
1814 if (index_term_p (xfoo0
, mode
, strict
)
1815 && nonindexed_address_p (xfoo1
, strict
))
1818 if (index_term_p (xfoo1
, mode
, strict
)
1819 && nonindexed_address_p (xfoo0
, strict
))
1822 /* Handle offset(reg)[index] with offset added outermost */
1824 if (indexable_address_p (xfoo0
, xfoo1
, mode
, strict
)
1825 || indexable_address_p (xfoo1
, xfoo0
, mode
, strict
))
1831 /* Return 1 if x (a legitimate address expression) has an effect that
1832 depends on the machine mode it is used for. On the VAX, the predecrement
1833 and postincrement address depend thus (the amount of decrement or
1834 increment being the length of the operand) and all indexed address depend
1835 thus (because the index scale factor is the length of the operand). */
1838 vax_mode_dependent_address_p (rtx x
)
1842 if (GET_CODE (x
) == POST_INC
|| GET_CODE (x
) == PRE_DEC
)
1844 if (GET_CODE (x
) != PLUS
)
1847 xfoo0
= XEXP (x
, 0);
1848 xfoo1
= XEXP (x
, 1);
1850 if (CONST_INT_P (xfoo0
) && REG_P (xfoo1
))
1852 if (CONST_INT_P (xfoo1
) && REG_P (xfoo0
))
1854 if (!flag_pic
&& CONSTANT_ADDRESS_P (xfoo0
) && REG_P (xfoo1
))
1856 if (!flag_pic
&& CONSTANT_ADDRESS_P (xfoo1
) && REG_P (xfoo0
))
1863 fixup_mathdi_operand (rtx x
, enum machine_mode mode
)
1865 if (illegal_addsub_di_memory_operand (x
, mode
))
1867 rtx addr
= XEXP (x
, 0);
1868 rtx temp
= gen_reg_rtx (Pmode
);
1870 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1871 if (GET_CODE (addr
) == CONST
&& flag_pic
)
1873 offset
= XEXP (XEXP (addr
, 0), 1);
1874 addr
= XEXP (XEXP (addr
, 0), 0);
1877 emit_move_insn (temp
, addr
);
1879 temp
= gen_rtx_PLUS (Pmode
, temp
, offset
);
1880 x
= gen_rtx_MEM (DImode
, temp
);
1886 vax_expand_addsub_di_operands (rtx
* operands
, enum rtx_code code
)
1888 int hi_only
= operand_subword (operands
[2], 0, 0, DImode
) == const0_rtx
;
1891 rtx (*gen_old_insn
)(rtx
, rtx
, rtx
);
1892 rtx (*gen_si_insn
)(rtx
, rtx
, rtx
);
1893 rtx (*gen_insn
)(rtx
, rtx
, rtx
);
1897 gen_old_insn
= gen_adddi3_old
;
1898 gen_si_insn
= gen_addsi3
;
1899 gen_insn
= gen_adcdi3
;
1901 else if (code
== MINUS
)
1903 gen_old_insn
= gen_subdi3_old
;
1904 gen_si_insn
= gen_subsi3
;
1905 gen_insn
= gen_sbcdi3
;
1910 /* If this is addition (thus operands are commutative) and if there is one
1911 addend that duplicates the desination, we want that addend to be the
1914 && rtx_equal_p (operands
[0], operands
[2])
1915 && !rtx_equal_p (operands
[1], operands
[2]))
1918 operands
[2] = operands
[1];
1924 emit_insn ((*gen_old_insn
) (operands
[0], operands
[1], operands
[2]));
1928 if (!rtx_equal_p (operands
[0], operands
[1])
1929 && (REG_P (operands
[0]) && MEM_P (operands
[1])))
1931 emit_move_insn (operands
[0], operands
[1]);
1932 operands
[1] = operands
[0];
1935 operands
[0] = fixup_mathdi_operand (operands
[0], DImode
);
1936 operands
[1] = fixup_mathdi_operand (operands
[1], DImode
);
1937 operands
[2] = fixup_mathdi_operand (operands
[2], DImode
);
1939 if (!rtx_equal_p (operands
[0], operands
[1]))
1940 emit_move_insn (operand_subword (operands
[0], 0, 0, DImode
),
1941 operand_subword (operands
[1], 0, 0, DImode
));
1943 emit_insn ((*gen_si_insn
) (operand_subword (operands
[0], 1, 0, DImode
),
1944 operand_subword (operands
[1], 1, 0, DImode
),
1945 operand_subword (operands
[2], 1, 0, DImode
)));
1949 /* If are adding the same value together, that's really a multiply by 2,
1950 and that's just a left shift of 1. */
1951 if (rtx_equal_p (operands
[1], operands
[2]))
1953 gcc_assert (code
!= MINUS
);
1954 emit_insn (gen_ashldi3 (operands
[0], operands
[1], const1_rtx
));
1958 operands
[0] = fixup_mathdi_operand (operands
[0], DImode
);
1960 /* If an operand is the same as operand[0], use the operand[0] rtx
1961 because fixup will an equivalent rtx but not an equal one. */
1963 if (rtx_equal_p (operands
[0], operands
[1]))
1964 operands
[1] = operands
[0];
1966 operands
[1] = fixup_mathdi_operand (operands
[1], DImode
);
1968 if (rtx_equal_p (operands
[0], operands
[2]))
1969 operands
[2] = operands
[0];
1971 operands
[2] = fixup_mathdi_operand (operands
[2], DImode
);
1973 /* If we are subtracting not from ourselves [d = a - b], and because the
1974 carry ops are two operand only, we would need to do a move prior to
1975 the subtract. And if d == b, we would need a temp otherwise
1976 [d = a, d -= d] and we end up with 0. Instead we rewrite d = a - b
1977 into d = -b, d += a. Since -b can never overflow, even if b == d,
1980 If we are doing addition, since the carry ops are two operand, if
1981 we aren't adding to ourselves, move the first addend to the
1982 destination first. */
1984 gcc_assert (operands
[1] != const0_rtx
|| code
== MINUS
);
1985 if (!rtx_equal_p (operands
[0], operands
[1]) && operands
[1] != const0_rtx
)
1987 if (code
== MINUS
&& CONSTANT_P (operands
[1]))
1989 temp
= gen_reg_rtx (DImode
);
1990 emit_insn (gen_sbcdi3 (operands
[0], const0_rtx
, operands
[2]));
1992 gen_insn
= gen_adcdi3
;
1993 operands
[2] = operands
[1];
1994 operands
[1] = operands
[0];
1997 emit_move_insn (operands
[0], operands
[1]);
2000 /* Subtracting a constant will have been rewritten to an addition of the
2001 negative of that constant before we get here. */
2002 gcc_assert (!CONSTANT_P (operands
[2]) || code
== PLUS
);
2003 emit_insn ((*gen_insn
) (operands
[0], operands
[1], operands
[2]));