1 /* Target definitions for the MorphoRISC1
2 Copyright (C) 2005 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 2, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
36 #include "integrate.h"
45 #include "insn-flags.h"
49 #include "target-def.h"
51 /* Frame pointer register mask. */
52 #define FP_MASK (1 << (GPR_FP))
54 /* Link register mask. */
55 #define LINK_MASK (1 << (GPR_LINK))
57 /* Given a SIZE in bytes, advance to the next word. */
58 #define ROUND_ADVANCE(SIZE) (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
60 /* A C structure for machine-specific, per-function data.
61 This is added to the cfun structure. */
62 struct machine_function
GTY(())
64 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
65 int ra_needs_full_frame
;
66 struct rtx_def
* eh_stack_adjust
;
67 int interrupt_handler
;
70 /* Define the information needed to generate branch and scc insns.
71 This is stored from the compare operation. */
72 struct rtx_def
* mt_compare_op0
;
73 struct rtx_def
* mt_compare_op1
;
75 /* Current frame information calculated by compute_frame_size. */
76 struct mt_frame_info current_frame_info
;
78 /* Zero structure to initialize current_frame_info. */
79 struct mt_frame_info zero_frame_info
;
81 /* mt doesn't have unsigned compares need a library call for this. */
82 struct rtx_def
* mt_ucmpsi3_libcall
;
84 static int mt_flag_delayed_branch
;
88 mt_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED
,
89 int incoming ATTRIBUTE_UNUSED
)
91 return gen_rtx_REG (Pmode
, RETVAL_REGNUM
);
94 /* Implement RETURN_ADDR_RTX. */
96 mt_return_addr_rtx (int count
)
101 return get_hard_reg_initial_val (Pmode
, GPR_LINK
);
104 /* The following variable value indicates the number of nops required
105 between the current instruction and the next instruction to avoid
106 any pipeline hazards. */
107 static int mt_nops_required
= 0;
108 static const char * mt_nop_reasons
= "";
110 /* Implement ASM_OUTPUT_OPCODE. */
112 mt_asm_output_opcode (FILE *f ATTRIBUTE_UNUSED
, const char *ptr
)
114 if (mt_nops_required
)
115 fprintf (f
, ";# need %d nops because of %s\n\t",
116 mt_nops_required
, mt_nop_reasons
);
118 while (mt_nops_required
)
120 fprintf (f
, "nop\n\t");
127 /* Given an insn, return whether it's a memory operation or a branch
128 operation, otherwise return TYPE_ARITH. */
129 static enum attr_type
130 mt_get_attr_type (rtx complete_insn
)
132 rtx insn
= PATTERN (complete_insn
);
134 if (JUMP_P (complete_insn
))
136 if (CALL_P (complete_insn
))
139 if (GET_CODE (insn
) != SET
)
142 if (SET_DEST (insn
) == pc_rtx
)
145 if (GET_CODE (SET_DEST (insn
)) == MEM
)
148 if (GET_CODE (SET_SRC (insn
)) == MEM
)
154 /* A helper routine for insn_dependent_p called through note_stores. */
157 insn_dependent_p_1 (rtx x
, rtx pat ATTRIBUTE_UNUSED
, void *data
)
159 rtx
* pinsn
= (rtx
*) data
;
161 if (*pinsn
&& reg_mentioned_p (x
, *pinsn
))
165 /* Return true if anything in insn X is (anti,output,true)
166 dependent on anything in insn Y. */
169 insn_dependent_p (rtx x
, rtx y
)
173 if (! INSN_P (x
) || ! INSN_P (y
))
177 note_stores (PATTERN (x
), insn_dependent_p_1
, &tmp
);
182 note_stores (PATTERN (y
), insn_dependent_p_1
, &tmp
);
183 return (tmp
== NULL_RTX
);
187 /* Return true if anything in insn X is true dependent on anything in
190 insn_true_dependent_p (rtx x
, rtx y
)
194 if (! INSN_P (x
) || ! INSN_P (y
))
198 note_stores (PATTERN (x
), insn_dependent_p_1
, &tmp
);
199 return (tmp
== NULL_RTX
);
202 /* The following determines the number of nops that need to be
203 inserted between the previous instructions and current instruction
204 to avoid pipeline hazards on the mt processor. Remember that
205 the function is not called for asm insns. */
208 mt_final_prescan_insn (rtx insn
,
209 rtx
* opvec ATTRIBUTE_UNUSED
,
210 int noperands ATTRIBUTE_UNUSED
)
213 enum attr_type prev_attr
;
215 mt_nops_required
= 0;
218 /* ms2 constraints are dealt with in reorg. */
222 /* Only worry about real instructions. */
226 /* Find the previous real instructions. */
227 for (prev_i
= PREV_INSN (insn
);
229 && (! INSN_P (prev_i
)
230 || GET_CODE (PATTERN (prev_i
)) == USE
231 || GET_CODE (PATTERN (prev_i
)) == CLOBBER
);
232 prev_i
= PREV_INSN (prev_i
))
234 /* If we meet a barrier, there is no flow through here. */
235 if (BARRIER_P (prev_i
))
239 /* If there isn't one then there is nothing that we need do. */
240 if (prev_i
== NULL
|| ! INSN_P (prev_i
))
243 prev_attr
= mt_get_attr_type (prev_i
);
245 /* Delayed branch slots already taken care of by delay branch scheduling. */
246 if (prev_attr
== TYPE_BRANCH
)
249 switch (mt_get_attr_type (insn
))
253 /* Avoid consecutive memory operation. */
254 if ((prev_attr
== TYPE_LOAD
|| prev_attr
== TYPE_STORE
)
255 && TARGET_MS1_64_001
)
257 mt_nops_required
= 1;
258 mt_nop_reasons
= "consecutive mem ops";
264 /* One cycle of delay is required between load
265 and the dependent arithmetic instruction. */
266 if (prev_attr
== TYPE_LOAD
267 && insn_true_dependent_p (prev_i
, insn
))
269 mt_nops_required
= 1;
270 mt_nop_reasons
= "load->arith dependency delay";
275 if (insn_dependent_p (prev_i
, insn
))
277 if (prev_attr
== TYPE_ARITH
&& TARGET_MS1_64_001
)
279 /* One cycle of delay between arith
280 instructions and branch dependent on arith. */
281 mt_nops_required
= 1;
282 mt_nop_reasons
= "arith->branch dependency delay";
284 else if (prev_attr
== TYPE_LOAD
)
286 /* Two cycles of delay are required
287 between load and dependent branch. */
288 if (TARGET_MS1_64_001
)
289 mt_nops_required
= 2;
291 mt_nops_required
= 1;
292 mt_nop_reasons
= "load->branch dependency delay";
298 fatal_insn ("mt_final_prescan_insn, invalid insn #1", insn
);
303 /* Print debugging information for a frame. */
305 mt_debug_stack (struct mt_frame_info
* info
)
311 error ("info pointer NULL");
315 fprintf (stderr
, "\nStack information for function %s:\n",
316 ((current_function_decl
&& DECL_NAME (current_function_decl
))
317 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl
))
320 fprintf (stderr
, "\ttotal_size = %d\n", info
->total_size
);
321 fprintf (stderr
, "\tpretend_size = %d\n", info
->pretend_size
);
322 fprintf (stderr
, "\targs_size = %d\n", info
->args_size
);
323 fprintf (stderr
, "\textra_size = %d\n", info
->extra_size
);
324 fprintf (stderr
, "\treg_size = %d\n", info
->reg_size
);
325 fprintf (stderr
, "\tvar_size = %d\n", info
->var_size
);
326 fprintf (stderr
, "\tframe_size = %d\n", info
->frame_size
);
327 fprintf (stderr
, "\treg_mask = 0x%x\n", info
->reg_mask
);
328 fprintf (stderr
, "\tsave_fp = %d\n", info
->save_fp
);
329 fprintf (stderr
, "\tsave_lr = %d\n", info
->save_lr
);
330 fprintf (stderr
, "\tinitialized = %d\n", info
->initialized
);
331 fprintf (stderr
, "\tsaved registers =");
333 /* Print out reg_mask in a more readable format. */
334 for (regno
= GPR_R0
; regno
<= GPR_LAST
; regno
++)
335 if ( (1 << regno
) & info
->reg_mask
)
336 fprintf (stderr
, " %s", reg_names
[regno
]);
342 /* Print a memory address as an operand to reference that memory location. */
345 mt_print_operand_simple_address (FILE * file
, rtx addr
)
348 error ("PRINT_OPERAND_ADDRESS, null pointer");
351 switch (GET_CODE (addr
))
354 fprintf (file
, "%s, #0", reg_names
[REGNO (addr
)]);
361 rtx arg0
= XEXP (addr
, 0);
362 rtx arg1
= XEXP (addr
, 1);
364 if (GET_CODE (arg0
) == REG
)
368 if (GET_CODE (offset
) == REG
)
369 fatal_insn ("PRINT_OPERAND_ADDRESS, 2 regs", addr
);
372 else if (GET_CODE (arg1
) == REG
)
373 reg
= arg1
, offset
= arg0
;
374 else if (CONSTANT_P (arg0
) && CONSTANT_P (arg1
))
376 fprintf (file
, "%s, #", reg_names
[GPR_R0
]);
377 output_addr_const (file
, addr
);
380 fprintf (file
, "%s, #", reg_names
[REGNO (reg
)]);
381 output_addr_const (file
, offset
);
389 output_addr_const (file
, addr
);
393 fatal_insn ("PRINT_OPERAND_ADDRESS, invalid insn #1", addr
);
398 /* Implement PRINT_OPERAND_ADDRESS. */
400 mt_print_operand_address (FILE * file
, rtx addr
)
402 if (GET_CODE (addr
) == AND
403 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
404 && INTVAL (XEXP (addr
, 1)) == -3)
405 mt_print_operand_simple_address (file
, XEXP (addr
, 0));
407 mt_print_operand_simple_address (file
, addr
);
410 /* Implement PRINT_OPERAND. */
412 mt_print_operand (FILE * file
, rtx x
, int code
)
417 /* Output a nop if there's nothing for the delay slot. */
418 if (dbr_sequence_length () == 0)
419 fputs ("\n\tnop", file
);
423 fprintf(file
, "#%%hi16(");
424 output_addr_const (file
, x
);
429 fprintf(file
, "#%%lo16(");
430 output_addr_const (file
, x
);
435 fprintf(file
, "#%ld", ~INTVAL (x
));
439 if (GET_CODE (x
) == CONST_INT
&& INTVAL (x
) == 0)
441 fputs (reg_names
[GPR_R0
], file
);
450 /* output_operand_lossage ("mt_print_operand: unknown code"); */
451 fprintf (file
, "unknown code");
455 switch (GET_CODE (x
))
458 fputs (reg_names
[REGNO (x
)], file
);
463 fprintf(file
, "#%ld", INTVAL (x
));
467 mt_print_operand_address(file
, XEXP (x
,0));
472 output_addr_const (file
, x
);
476 fprintf(file
, "Uknown code: %d", GET_CODE (x
));
483 /* Implement INIT_CUMULATIVE_ARGS. */
485 mt_init_cumulative_args (CUMULATIVE_ARGS
* cum
, tree fntype
, rtx libname
,
486 tree fndecl ATTRIBUTE_UNUSED
, int incoming
)
490 if (TARGET_DEBUG_ARG
)
492 fprintf (stderr
, "\nmt_init_cumulative_args:");
495 fputs (" incoming", stderr
);
499 tree ret_type
= TREE_TYPE (fntype
);
500 fprintf (stderr
, " return = %s,",
501 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
504 if (libname
&& GET_CODE (libname
) == SYMBOL_REF
)
505 fprintf (stderr
, " libname = %s", XSTR (libname
, 0));
507 if (cfun
->returns_struct
)
508 fprintf (stderr
, " return-struct");
514 /* Compute the slot number to pass an argument in.
515 Returns the slot number or -1 if passing on the stack.
517 CUM is a variable of type CUMULATIVE_ARGS which gives info about
518 the preceding args and about the function being called.
519 MODE is the argument's machine mode.
520 TYPE is the data type of the argument (as a tree).
521 This is null for libcalls where that information may
523 NAMED is nonzero if this argument is a named parameter
524 (otherwise it is an extra parameter matching an ellipsis).
525 INCOMING_P is zero for FUNCTION_ARG, nonzero for FUNCTION_INCOMING_ARG.
526 *PREGNO records the register number to use if scalar type. */
529 mt_function_arg_slotno (const CUMULATIVE_ARGS
* cum
,
530 enum machine_mode mode
,
532 int named ATTRIBUTE_UNUSED
,
533 int incoming_p ATTRIBUTE_UNUSED
,
536 int regbase
= FIRST_ARG_REGNUM
;
539 if (mode
== VOIDmode
|| targetm
.calls
.must_pass_in_stack (mode
, type
))
542 if (slotno
>= MT_NUM_ARG_REGS
)
545 * pregno
= regbase
+ slotno
;
550 /* Implement FUNCTION_ARG. */
552 mt_function_arg (const CUMULATIVE_ARGS
* cum
,
553 enum machine_mode mode
,
561 slotno
= mt_function_arg_slotno (cum
, mode
, type
, named
, incoming_p
, ®no
);
566 reg
= gen_rtx_REG (mode
, regno
);
571 /* Implement FUNCTION_ARG_ADVANCE. */
573 mt_function_arg_advance (CUMULATIVE_ARGS
* cum
,
574 enum machine_mode mode
,
575 tree type ATTRIBUTE_UNUSED
,
580 /* We pass 0 for incoming_p here, it doesn't matter. */
581 slotno
= mt_function_arg_slotno (cum
, mode
, type
, named
, 0, ®no
);
583 * cum
+= (mode
!= BLKmode
584 ? ROUND_ADVANCE (GET_MODE_SIZE (mode
))
585 : ROUND_ADVANCE (int_size_in_bytes (type
)));
587 if (TARGET_DEBUG_ARG
)
589 "mt_function_arg_advance: words = %2d, mode = %4s, named = %d, size = %3d\n",
590 *cum
, GET_MODE_NAME (mode
), named
,
591 (*cum
) * UNITS_PER_WORD
);
594 /* Implement hook TARGET_ARG_PARTIAL_BYTES.
596 Returns the number of bytes at the beginning of an argument that
597 must be put in registers. The value must be zero for arguments
598 that are passed entirely in registers or that are entirely pushed
601 mt_arg_partial_bytes (CUMULATIVE_ARGS
* pcum
,
602 enum machine_mode mode
,
604 bool named ATTRIBUTE_UNUSED
)
610 words
= ((int_size_in_bytes (type
) + UNITS_PER_WORD
- 1)
613 words
= (GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
615 if (! targetm
.calls
.pass_by_reference (&cum
, mode
, type
, named
)
616 && cum
< MT_NUM_ARG_REGS
617 && (cum
+ words
) > MT_NUM_ARG_REGS
)
619 int bytes
= (MT_NUM_ARG_REGS
- cum
) * UNITS_PER_WORD
;
622 fprintf (stderr
, "function_arg_partial_nregs = %d\n", bytes
);
630 /* Implement TARGET_PASS_BY_REFERENCE hook. */
632 mt_pass_by_reference (CUMULATIVE_ARGS
* cum ATTRIBUTE_UNUSED
,
633 enum machine_mode mode ATTRIBUTE_UNUSED
,
635 bool named ATTRIBUTE_UNUSED
)
637 return (type
&& int_size_in_bytes (type
) > 4 * UNITS_PER_WORD
);
640 /* Implement FUNCTION_ARG_BOUNDARY. */
642 mt_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED
,
643 tree type ATTRIBUTE_UNUSED
)
645 return BITS_PER_WORD
;
648 /* Implement REG_OK_FOR_BASE_P. */
650 mt_reg_ok_for_base_p (rtx x
, int strict
)
653 return (((unsigned) REGNO (x
)) < FIRST_PSEUDO_REGISTER
);
657 /* Helper function of mt_legitimate_address_p. Return true if XINSN
658 is a simple address, otherwise false. */
660 mt_legitimate_simple_address_p (enum machine_mode mode ATTRIBUTE_UNUSED
,
661 rtx xinsn
, int strict
)
665 fprintf (stderr
, "\n========== GO_IF_LEGITIMATE_ADDRESS, %sstrict\n",
666 strict
? "" : "not ");
670 if (GET_CODE (xinsn
) == REG
&& mt_reg_ok_for_base_p (xinsn
, strict
))
673 if (GET_CODE (xinsn
) == PLUS
674 && GET_CODE (XEXP (xinsn
, 0)) == REG
675 && mt_reg_ok_for_base_p (XEXP (xinsn
, 0), strict
)
676 && GET_CODE (XEXP (xinsn
, 1)) == CONST_INT
677 && SMALL_INT (XEXP (xinsn
, 1)))
684 /* Helper function of GO_IF_LEGITIMATE_ADDRESS. Return non-zero if
685 XINSN is a legitimate address on MT. */
687 mt_legitimate_address_p (enum machine_mode mode
, rtx xinsn
, int strict
)
689 if (mt_legitimate_simple_address_p (mode
, xinsn
, strict
))
693 && GET_CODE (xinsn
) == AND
694 && GET_CODE (XEXP (xinsn
, 1)) == CONST_INT
695 && INTVAL (XEXP (xinsn
, 1)) == -3)
696 return mt_legitimate_simple_address_p (mode
, XEXP (xinsn
, 0), strict
);
701 /* Return truth value of whether OP can be used as an operands where a
702 register or 16 bit unsigned integer is needed. */
705 uns_arith_operand (rtx op
, enum machine_mode mode
)
707 if (GET_CODE (op
) == CONST_INT
&& SMALL_INT_UNSIGNED (op
))
710 return register_operand (op
, mode
);
713 /* Return truth value of whether OP can be used as an operands where a
714 16 bit integer is needed. */
717 arith_operand (rtx op
, enum machine_mode mode
)
719 if (GET_CODE (op
) == CONST_INT
&& SMALL_INT (op
))
722 return register_operand (op
, mode
);
725 /* Return truth value of whether OP is a register or the constant 0. */
728 reg_or_0_operand (rtx op
, enum machine_mode mode
)
730 switch (GET_CODE (op
))
733 return INTVAL (op
) == 0;
737 return register_operand (op
, mode
);
746 /* Return truth value of whether OP is a constant that requires two
747 loads to put in a register. */
750 big_const_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
752 if (GET_CODE (op
) == CONST_INT
&& CONST_OK_FOR_LETTER_P (INTVAL (op
), 'M'))
758 /* Return truth value of whether OP is a constant that require only
759 one load to put in a register. */
762 single_const_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
764 if (big_const_operand (op
, mode
)
765 || GET_CODE (op
) == CONST
766 || GET_CODE (op
) == LABEL_REF
767 || GET_CODE (op
) == SYMBOL_REF
)
773 /* True if the current function is an interrupt handler
774 (either via #pragma or an attribute specification). */
775 int interrupt_handler
;
776 enum processor_type mt_cpu
;
778 static struct machine_function
*
779 mt_init_machine_status (void)
781 struct machine_function
*f
;
783 f
= ggc_alloc_cleared (sizeof (struct machine_function
));
788 /* Implement OVERRIDE_OPTIONS. */
790 mt_override_options (void)
792 if (mt_cpu_string
!= NULL
)
794 if (!strcmp (mt_cpu_string
, "ms1-64-001"))
795 mt_cpu
= PROCESSOR_MS1_64_001
;
796 else if (!strcmp (mt_cpu_string
, "ms1-16-002"))
797 mt_cpu
= PROCESSOR_MS1_16_002
;
798 else if (!strcmp (mt_cpu_string
, "ms1-16-003"))
799 mt_cpu
= PROCESSOR_MS1_16_003
;
800 else if (!strcmp (mt_cpu_string
, "ms2"))
801 mt_cpu
= PROCESSOR_MS2
;
803 error ("bad value (%s) for -march= switch", mt_cpu_string
);
806 mt_cpu
= PROCESSOR_MS1_64_001
;
810 flag_omit_frame_pointer
= 0;
814 /* We do delayed branch filling in machine dependent reorg */
815 mt_flag_delayed_branch
= flag_delayed_branch
;
816 flag_delayed_branch
= 0;
818 init_machine_status
= mt_init_machine_status
;
821 /* Do what is necessary for `va_start'. We look at the current function
822 to determine if stdarg or varargs is used and return the address of the
823 first unnamed parameter. */
826 mt_setup_incoming_varargs (CUMULATIVE_ARGS
*cum
,
827 enum machine_mode mode ATTRIBUTE_UNUSED
,
828 tree type ATTRIBUTE_UNUSED
,
829 int *pretend_size
, int no_rtl
)
832 int regs
= MT_NUM_ARG_REGS
- *cum
;
834 *pretend_size
= regs
< 0 ? 0 : GET_MODE_SIZE (SImode
) * regs
;
839 for (regno
= *cum
; regno
< MT_NUM_ARG_REGS
; regno
++)
841 rtx reg
= gen_rtx_REG (SImode
, FIRST_ARG_REGNUM
+ regno
);
842 rtx slot
= gen_rtx_PLUS (Pmode
,
843 gen_rtx_REG (SImode
, ARG_POINTER_REGNUM
),
844 GEN_INT (UNITS_PER_WORD
* regno
));
846 emit_move_insn (gen_rtx_MEM (SImode
, slot
), reg
);
850 /* Returns the number of bytes offset between the frame pointer and the stack
851 pointer for the current function. SIZE is the number of bytes of space
852 needed for local variables. */
855 mt_compute_frame_size (int size
)
858 unsigned int total_size
;
859 unsigned int var_size
;
860 unsigned int args_size
;
861 unsigned int pretend_size
;
862 unsigned int extra_size
;
863 unsigned int reg_size
;
864 unsigned int frame_size
;
865 unsigned int reg_mask
;
868 args_size
= current_function_outgoing_args_size
;
869 pretend_size
= current_function_pretend_args_size
;
870 extra_size
= FIRST_PARM_OFFSET (0);
871 total_size
= extra_size
+ pretend_size
+ args_size
+ var_size
;
875 /* Calculate space needed for registers. */
876 for (regno
= GPR_R0
; regno
<= GPR_LAST
; regno
++)
878 if (MUST_SAVE_REGISTER (regno
))
880 reg_size
+= UNITS_PER_WORD
;
881 reg_mask
|= 1 << regno
;
885 current_frame_info
.save_fp
= (regs_ever_live
[GPR_FP
]
886 || frame_pointer_needed
887 || interrupt_handler
);
888 current_frame_info
.save_lr
= (regs_ever_live
[GPR_LINK
]
890 || interrupt_handler
);
892 reg_size
+= (current_frame_info
.save_fp
+ current_frame_info
.save_lr
)
894 total_size
+= reg_size
;
895 total_size
= ((total_size
+ 3) & ~3);
897 frame_size
= total_size
;
899 /* Save computed information. */
900 current_frame_info
.pretend_size
= pretend_size
;
901 current_frame_info
.var_size
= var_size
;
902 current_frame_info
.args_size
= args_size
;
903 current_frame_info
.reg_size
= reg_size
;
904 current_frame_info
.frame_size
= args_size
+ var_size
;
905 current_frame_info
.total_size
= total_size
;
906 current_frame_info
.extra_size
= extra_size
;
907 current_frame_info
.reg_mask
= reg_mask
;
908 current_frame_info
.initialized
= reload_completed
;
913 /* Emit code to save REG in stack offset pointed to by MEM.
914 STACK_OFFSET is the offset from the SP where the save will happen.
915 This function sets the REG_FRAME_RELATED_EXPR note accordingly. */
917 mt_emit_save_restore (enum save_direction direction
,
918 rtx reg
, rtx mem
, int stack_offset
)
920 if (direction
== FROM_PROCESSOR_TO_MEM
)
924 insn
= emit_move_insn (mem
, reg
);
925 RTX_FRAME_RELATED_P (insn
) = 1;
928 (REG_FRAME_RELATED_EXPR
,
929 gen_rtx_SET (VOIDmode
,
931 gen_rtx_PLUS (SImode
,
933 GEN_INT (stack_offset
))),
938 emit_move_insn (reg
, mem
);
942 /* Emit code to save the frame pointer in the prologue and restore
943 frame pointer in epilogue. */
946 mt_emit_save_fp (enum save_direction direction
,
947 struct mt_frame_info info
)
950 int reg_mask
= info
.reg_mask
& ~(FP_MASK
| LINK_MASK
);
951 int offset
= info
.total_size
;
952 int stack_offset
= info
.total_size
;
954 /* If there is nothing to save, get out now. */
955 if (! info
.save_fp
&& ! info
.save_lr
&& ! reg_mask
)
958 /* If offset doesn't fit in a 15-bit signed integer,
959 uses a scratch registers to get a smaller offset. */
960 if (CONST_OK_FOR_LETTER_P(offset
, 'O'))
961 base_reg
= stack_pointer_rtx
;
964 /* Use the scratch register R9 that holds old stack pointer. */
965 base_reg
= gen_rtx_REG (SImode
, GPR_R9
);
971 offset
-= UNITS_PER_WORD
;
972 stack_offset
-= UNITS_PER_WORD
;
974 (direction
, gen_rtx_REG (SImode
, GPR_FP
),
976 gen_rtx_PLUS (SImode
, base_reg
, GEN_INT (offset
))),
981 /* Emit code to save registers in the prologue and restore register
985 mt_emit_save_regs (enum save_direction direction
,
986 struct mt_frame_info info
)
990 int reg_mask
= info
.reg_mask
& ~(FP_MASK
| LINK_MASK
);
991 int offset
= info
.total_size
;
992 int stack_offset
= info
.total_size
;
994 /* If there is nothing to save, get out now. */
995 if (! info
.save_fp
&& ! info
.save_lr
&& ! reg_mask
)
998 /* If offset doesn't fit in a 15-bit signed integer,
999 uses a scratch registers to get a smaller offset. */
1000 if (CONST_OK_FOR_LETTER_P(offset
, 'O'))
1001 base_reg
= stack_pointer_rtx
;
1004 /* Use the scratch register R9 that holds old stack pointer. */
1005 base_reg
= gen_rtx_REG (SImode
, GPR_R9
);
1011 /* This just records the space for it, the actual move generated in
1012 mt_emit_save_fp (). */
1013 offset
-= UNITS_PER_WORD
;
1014 stack_offset
-= UNITS_PER_WORD
;
1019 offset
-= UNITS_PER_WORD
;
1020 stack_offset
-= UNITS_PER_WORD
;
1021 mt_emit_save_restore
1022 (direction
, gen_rtx_REG (SImode
, GPR_LINK
),
1023 gen_rtx_MEM (SImode
,
1024 gen_rtx_PLUS (SImode
, base_reg
, GEN_INT (offset
))),
1028 /* Save any needed call-saved regs. */
1029 for (regno
= GPR_R0
; regno
<= GPR_LAST
; regno
++)
1031 if ((reg_mask
& (1 << regno
)) != 0)
1033 offset
-= UNITS_PER_WORD
;
1034 stack_offset
-= UNITS_PER_WORD
;
1035 mt_emit_save_restore
1036 (direction
, gen_rtx_REG (SImode
, regno
),
1037 gen_rtx_MEM (SImode
,
1038 gen_rtx_PLUS (SImode
, base_reg
, GEN_INT (offset
))),
1044 /* Return true if FUNC is a function with the 'interrupt' attribute. */
1046 mt_interrupt_function_p (tree func
)
1050 if (TREE_CODE (func
) != FUNCTION_DECL
)
1053 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
1054 return a
!= NULL_TREE
;
1057 /* Generate prologue code. */
1059 mt_expand_prologue (void)
1062 unsigned int frame_size
;
1064 if (mt_interrupt_function_p (current_function_decl
))
1066 interrupt_handler
= 1;
1068 cfun
->machine
->interrupt_handler
= 1;
1071 mt_compute_frame_size (get_frame_size ());
1073 if (TARGET_DEBUG_STACK
)
1074 mt_debug_stack (¤t_frame_info
);
1076 /* Compute size of stack adjustment. */
1077 frame_size
= current_frame_info
.total_size
;
1079 /* If offset doesn't fit in a 15-bit signed integer,
1080 uses a scratch registers to get a smaller offset. */
1081 if (CONST_OK_FOR_LETTER_P(frame_size
, 'O'))
1082 size_rtx
= GEN_INT (frame_size
);
1085 /* We do not have any scratch registers. */
1086 gcc_assert (!interrupt_handler
);
1088 size_rtx
= gen_rtx_REG (SImode
, GPR_R9
);
1089 insn
= emit_move_insn (size_rtx
, GEN_INT (frame_size
& 0xffff0000));
1090 insn
= emit_insn (gen_iorsi3 (size_rtx
, size_rtx
,
1091 GEN_INT (frame_size
& 0x0000ffff)));
1094 /* Allocate stack for this frame. */
1095 /* Make stack adjustment and use scratch register if constant too
1096 large to fit as immediate. */
1099 insn
= emit_insn (gen_subsi3 (stack_pointer_rtx
,
1102 RTX_FRAME_RELATED_P (insn
) = 1;
1104 = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
1105 gen_rtx_SET (VOIDmode
,
1107 gen_rtx_MINUS (SImode
,
1109 GEN_INT (frame_size
))),
1113 /* Set R9 to point to old sp if required for access to register save
1115 if ( current_frame_info
.reg_size
!= 0
1116 && !CONST_OK_FOR_LETTER_P (frame_size
, 'O'))
1117 emit_insn (gen_addsi3 (size_rtx
, size_rtx
, stack_pointer_rtx
));
1119 /* Save the frame pointer. */
1120 mt_emit_save_fp (FROM_PROCESSOR_TO_MEM
, current_frame_info
);
1122 /* Now put the frame pointer into the frame pointer register. */
1123 if (frame_pointer_needed
)
1125 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1126 RTX_FRAME_RELATED_P (insn
) = 1;
1129 /* Save the registers. */
1130 mt_emit_save_regs (FROM_PROCESSOR_TO_MEM
, current_frame_info
);
1132 /* If we are profiling, make sure no instructions are scheduled before
1133 the call to mcount. */
1135 emit_insn (gen_blockage ());
1138 /* Implement EPILOGUE_USES. */
1140 mt_epilogue_uses (int regno
)
1142 if (cfun
->machine
&& cfun
->machine
->interrupt_handler
&& reload_completed
)
1144 return regno
== GPR_LINK
;
1147 /* Generate epilogue. EH_MODE is NORMAL_EPILOGUE when generating a
1148 function epilogue, or EH_EPILOGUE when generating an EH
1151 mt_expand_epilogue (enum epilogue_type eh_mode
)
1154 unsigned frame_size
;
1156 mt_compute_frame_size (get_frame_size ());
1158 if (TARGET_DEBUG_STACK
)
1159 mt_debug_stack (& current_frame_info
);
1161 /* Compute size of stack adjustment. */
1162 frame_size
= current_frame_info
.total_size
;
1164 /* If offset doesn't fit in a 15-bit signed integer,
1165 uses a scratch registers to get a smaller offset. */
1166 if (CONST_OK_FOR_LETTER_P(frame_size
, 'O'))
1167 size_rtx
= GEN_INT (frame_size
);
1170 /* We do not have any scratch registers. */
1171 gcc_assert (!interrupt_handler
);
1173 size_rtx
= gen_rtx_REG (SImode
, GPR_R9
);
1174 insn
= emit_move_insn (size_rtx
, GEN_INT (frame_size
& 0xffff0000));
1175 insn
= emit_insn (gen_iorsi3 (size_rtx
, size_rtx
,
1176 GEN_INT (frame_size
& 0x0000ffff)));
1177 /* Set R9 to point to old sp if required for access to register
1179 emit_insn (gen_addsi3 (size_rtx
, size_rtx
, stack_pointer_rtx
));
1182 /* Restore sp if there was some possible change to it. */
1183 if (frame_pointer_needed
)
1184 insn
= emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
1186 /* Restore the registers. */
1187 mt_emit_save_fp (FROM_MEM_TO_PROCESSOR
, current_frame_info
);
1188 mt_emit_save_regs (FROM_MEM_TO_PROCESSOR
, current_frame_info
);
1190 /* Make stack adjustment and use scratch register if constant too
1191 large to fit as immediate. */
1194 if (CONST_OK_FOR_LETTER_P(frame_size
, 'O'))
1195 /* Can handle this with simple add. */
1196 insn
= emit_insn (gen_addsi3 (stack_pointer_rtx
,
1200 /* Scratch reg R9 has the old sp value. */
1201 insn
= emit_move_insn (stack_pointer_rtx
,
1202 gen_rtx_REG (SImode
, GPR_R9
));
1205 = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
1206 gen_rtx_SET (VOIDmode
,
1208 gen_rtx_PLUS (SImode
,
1210 GEN_INT (frame_size
))),
1214 if (cfun
->machine
&& cfun
->machine
->eh_stack_adjust
!= NULL_RTX
)
1215 /* Perform the additional bump for __throw. */
1216 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1218 cfun
->machine
->eh_stack_adjust
));
1220 /* Generate the appropriate return. */
1221 if (eh_mode
== EH_EPILOGUE
)
1223 emit_jump_insn (gen_eh_return_internal ());
1226 else if (interrupt_handler
)
1227 emit_jump_insn (gen_return_interrupt_internal ());
1229 emit_jump_insn (gen_return_internal ());
1231 /* Reset state info for each function. */
1232 interrupt_handler
= 0;
1233 current_frame_info
= zero_frame_info
;
1235 cfun
->machine
->eh_stack_adjust
= NULL_RTX
;
1239 /* Generate code for the "eh_return" pattern. */
1241 mt_expand_eh_return (rtx
* operands
)
1243 if (GET_CODE (operands
[0]) != REG
1244 || REGNO (operands
[0]) != EH_RETURN_STACKADJ_REGNO
)
1246 rtx sp
= EH_RETURN_STACKADJ_RTX
;
1248 emit_move_insn (sp
, operands
[0]);
1252 emit_insn (gen_eh_epilogue (operands
[0]));
1255 /* Generate code for the "eh_epilogue" pattern. */
1257 mt_emit_eh_epilogue (rtx
* operands ATTRIBUTE_UNUSED
)
1259 cfun
->machine
->eh_stack_adjust
= EH_RETURN_STACKADJ_RTX
; /* operands[0]; */
1260 mt_expand_epilogue (EH_EPILOGUE
);
1263 /* Handle an "interrupt" attribute. */
1265 mt_handle_interrupt_attribute (tree
* node
,
1267 tree args ATTRIBUTE_UNUSED
,
1268 int flags ATTRIBUTE_UNUSED
,
1269 bool * no_add_attrs
)
1271 if (TREE_CODE (*node
) != FUNCTION_DECL
)
1273 warning (OPT_Wattributes
,
1274 "%qs attribute only applies to functions",
1275 IDENTIFIER_POINTER (name
));
1276 *no_add_attrs
= true;
1282 /* Table of machine attributes. */
1283 const struct attribute_spec mt_attribute_table
[] =
1285 /* name, min, max, decl?, type?, func?, handler */
1286 { "interrupt", 0, 0, false, false, false, mt_handle_interrupt_attribute
},
1287 { NULL
, 0, 0, false, false, false, NULL
}
1290 /* Implement INITIAL_ELIMINATION_OFFSET. */
1292 mt_initial_elimination_offset (int from
, int to
)
1294 mt_compute_frame_size (get_frame_size ());
1296 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
1299 else if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
1300 return current_frame_info
.total_size
;
1302 else if (from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
1303 return current_frame_info
.total_size
;
1309 /* Generate a compare for CODE. Return a brand-new rtx that
1310 represents the result of the compare. */
1313 mt_generate_compare (enum rtx_code code
, rtx op0
, rtx op1
)
1315 rtx scratch0
, scratch1
, const_scratch
;
1323 /* Need to adjust ranges for faking unsigned compares. */
1324 scratch0
= gen_reg_rtx (SImode
);
1325 scratch1
= gen_reg_rtx (SImode
);
1326 const_scratch
= force_reg (SImode
, GEN_INT(MT_MIN_INT
));
1327 emit_insn (gen_addsi3 (scratch0
, const_scratch
, op0
));
1328 emit_insn (gen_addsi3 (scratch1
, const_scratch
, op1
));
1336 /* Adjust compare operator to fake unsigned compares. */
1352 /* Generate the actual compare. */
1353 return gen_rtx_fmt_ee (code
, VOIDmode
, scratch0
, scratch1
);
1356 /* Emit a branch of kind CODE to location LOC. */
1359 mt_emit_cbranch (enum rtx_code code
, rtx loc
, rtx op0
, rtx op1
)
1361 rtx condition_rtx
, loc_ref
;
1363 if (! reg_or_0_operand (op0
, SImode
))
1364 op0
= copy_to_mode_reg (SImode
, op0
);
1366 if (! reg_or_0_operand (op1
, SImode
))
1367 op1
= copy_to_mode_reg (SImode
, op1
);
1369 condition_rtx
= mt_generate_compare (code
, op0
, op1
);
1370 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
1371 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
1372 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
1376 /* Subfunction of the following function. Update the flags of any MEM
1377 found in part of X. */
1380 mt_set_memflags_1 (rtx x
, int in_struct_p
, int volatile_p
)
1384 switch (GET_CODE (x
))
1388 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1389 mt_set_memflags_1 (XVECEXP (x
, 0, i
), in_struct_p
, volatile_p
);
1393 mt_set_memflags_1 (PATTERN (x
), in_struct_p
, volatile_p
);
1397 mt_set_memflags_1 (SET_DEST (x
), in_struct_p
, volatile_p
);
1398 mt_set_memflags_1 (SET_SRC (x
), in_struct_p
, volatile_p
);
1402 MEM_IN_STRUCT_P (x
) = in_struct_p
;
1403 MEM_VOLATILE_P (x
) = volatile_p
;
1404 /* Sadly, we cannot use alias sets because the extra aliasing
1405 produced by the AND interferes. Given that two-byte quantities
1406 are the only thing we would be able to differentiate anyway,
1407 there does not seem to be any point in convoluting the early
1408 out of the alias check. */
1409 /* set_mem_alias_set (x, alias_set); */
1417 /* Look for any MEMs in the current sequence of insns and set the
1418 in-struct, unchanging, and volatile flags from the flags in REF.
1419 If REF is not a MEM, don't do anything. */
1422 mt_set_memflags (rtx ref
)
1425 int in_struct_p
, volatile_p
;
1427 if (GET_CODE (ref
) != MEM
)
1430 in_struct_p
= MEM_IN_STRUCT_P (ref
);
1431 volatile_p
= MEM_VOLATILE_P (ref
);
1433 /* This is only called from mt.md, after having had something
1434 generated from one of the insn patterns. So if everything is
1435 zero, the pattern is already up-to-date. */
1436 if (! in_struct_p
&& ! volatile_p
)
1439 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1440 mt_set_memflags_1 (insn
, in_struct_p
, volatile_p
);
1443 /* Implement SECONDARY_RELOAD_CLASS. */
1445 mt_secondary_reload_class (enum reg_class
class ATTRIBUTE_UNUSED
,
1446 enum machine_mode mode
,
1449 if ((mode
== QImode
&& (!TARGET_BYTE_ACCESS
)) || mode
== HImode
)
1451 if (GET_CODE (x
) == MEM
1452 || (GET_CODE (x
) == REG
&& true_regnum (x
) == -1)
1453 || (GET_CODE (x
) == SUBREG
1454 && (GET_CODE (SUBREG_REG (x
)) == MEM
1455 || (GET_CODE (SUBREG_REG (x
)) == REG
1456 && true_regnum (SUBREG_REG (x
)) == -1))))
1457 return GENERAL_REGS
;
1463 /* Handle FUNCTION_VALUE, FUNCTION_OUTGOING_VALUE, and LIBCALL_VALUE
1466 mt_function_value (tree valtype
, enum machine_mode mode
, tree func_decl ATTRIBUTE_UNUSED
)
1468 if ((mode
) == DImode
|| (mode
) == DFmode
)
1469 return gen_rtx_MEM (mode
, gen_rtx_REG (mode
, RETURN_VALUE_REGNUM
));
1472 mode
= TYPE_MODE (valtype
);
1474 return gen_rtx_REG (mode
, RETURN_VALUE_REGNUM
);
1477 /* Split a move into two smaller pieces.
1478 MODE indicates the reduced mode. OPERANDS[0] is the original destination
1479 OPERANDS[1] is the original src. The new destinations are
1480 OPERANDS[2] and OPERANDS[4], while the new sources are OPERANDS[3]
1484 mt_split_words (enum machine_mode nmode
,
1485 enum machine_mode omode
,
1488 rtx dl
,dh
; /* src/dest pieces. */
1490 int move_high_first
= 0; /* Assume no overlap. */
1492 switch (GET_CODE (operands
[0])) /* Dest. */
1496 if ((GET_CODE (operands
[1]) == REG
1497 || GET_CODE (operands
[1]) == SUBREG
)
1498 && true_regnum (operands
[0]) <= true_regnum (operands
[1]))
1499 move_high_first
= 1;
1501 if (GET_CODE (operands
[0]) == SUBREG
)
1503 dl
= gen_rtx_SUBREG (nmode
, SUBREG_REG (operands
[0]),
1504 SUBREG_BYTE (operands
[0]) + GET_MODE_SIZE (nmode
));
1505 dh
= gen_rtx_SUBREG (nmode
, SUBREG_REG (operands
[0]), SUBREG_BYTE (operands
[0]));
1507 else if (GET_CODE (operands
[0]) == REG
&& ! IS_PSEUDO_P (operands
[0]))
1509 int r
= REGNO (operands
[0]);
1510 dh
= gen_rtx_REG (nmode
, r
);
1511 dl
= gen_rtx_REG (nmode
, r
+ HARD_REGNO_NREGS (r
, nmode
));
1515 dh
= gen_rtx_SUBREG (nmode
, operands
[0], 0);
1516 dl
= gen_rtx_SUBREG (nmode
, operands
[0], GET_MODE_SIZE (nmode
));
1521 switch (GET_CODE (XEXP (operands
[0], 0)))
1527 dl
= operand_subword (operands
[0],
1528 GET_MODE_SIZE (nmode
)/UNITS_PER_WORD
,
1530 dh
= operand_subword (operands
[0], 0, 0, omode
);
1537 switch (GET_CODE (operands
[1]))
1540 if (! IS_PSEUDO_P (operands
[1]))
1542 int r
= REGNO (operands
[1]);
1544 sh
= gen_rtx_REG (nmode
, r
);
1545 sl
= gen_rtx_REG (nmode
, r
+ HARD_REGNO_NREGS (r
, nmode
));
1549 sh
= gen_rtx_SUBREG (nmode
, operands
[1], 0);
1550 sl
= gen_rtx_SUBREG (nmode
, operands
[1], GET_MODE_SIZE (nmode
));
1555 if (operands
[1] == const0_rtx
)
1556 sh
= sl
= const0_rtx
;
1558 split_double (operands
[1], & sh
, & sl
);
1562 if (operands
[1] == const0_rtx
)
1563 sh
= sl
= const0_rtx
;
1580 sl
= gen_rtx_SUBREG (nmode
,
1581 SUBREG_REG (operands
[1]),
1582 SUBREG_BYTE (operands
[1]) + GET_MODE_SIZE (nmode
));
1583 sh
= gen_rtx_SUBREG (nmode
,
1584 SUBREG_REG (operands
[1]),
1585 SUBREG_BYTE (operands
[1]));
1589 switch (GET_CODE (XEXP (operands
[1], 0)))
1596 sl
= operand_subword (operands
[1],
1597 GET_MODE_SIZE (nmode
)/UNITS_PER_WORD
,
1599 sh
= operand_subword (operands
[1], 0, 0, omode
);
1601 /* Check if the DF load is going to clobber the register
1602 used for the address, and if so make sure that is going
1603 to be the second move. */
1604 if (GET_CODE (dl
) == REG
1606 == true_regnum (XEXP (XEXP (sl
, 0 ), 0)))
1607 move_high_first
= 1;
1614 if (move_high_first
)
1631 /* Implement TARGET_MUST_PASS_IN_STACK hook. */
1633 mt_pass_in_stack (enum machine_mode mode ATTRIBUTE_UNUSED
, tree type
)
1635 return (((type
) != 0
1636 && (TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
1637 || TREE_ADDRESSABLE (type
))));
1641 /* Structures to hold branch information during reorg. */
1642 typedef struct branch_info
1644 rtx insn
; /* The branch insn. */
1646 struct branch_info
*next
;
1649 typedef struct label_info
1651 rtx label
; /* The label. */
1652 branch_info
*branches
; /* branches to this label. */
1653 struct label_info
*next
;
1656 /* Chain of labels found in current function, used during reorg. */
1657 static label_info
*mt_labels
;
1659 /* If *X is a label, add INSN to the list of branches for that
1663 mt_add_branches (rtx
*x
, void *insn
)
1665 if (GET_CODE (*x
) == LABEL_REF
)
1667 branch_info
*branch
= xmalloc (sizeof (*branch
));
1668 rtx label
= XEXP (*x
, 0);
1671 for (info
= mt_labels
; info
; info
= info
->next
)
1672 if (info
->label
== label
)
1677 info
= xmalloc (sizeof (*info
));
1678 info
->next
= mt_labels
;
1681 info
->label
= label
;
1682 info
->branches
= NULL
;
1685 branch
->next
= info
->branches
;
1686 info
->branches
= branch
;
1687 branch
->insn
= insn
;
1692 /* If BRANCH has a filled delay slot, check if INSN is dependent upon
1693 it. If so, undo the delay slot fill. Returns the next insn, if
1694 we patch out the branch. Returns the branch insn, if we cannot
1695 patch out the branch (due to anti-dependency in the delay slot).
1696 In that case, the caller must insert nops at the branch target. */
1699 mt_check_delay_slot (rtx branch
, rtx insn
)
1706 gcc_assert (GET_CODE (PATTERN (branch
)) == SEQUENCE
);
1707 if (INSN_DELETED_P (branch
))
1709 slot
= XVECEXP (PATTERN (branch
), 0, 1);
1711 tmp
= PATTERN (insn
);
1712 note_stores (PATTERN (slot
), insn_dependent_p_1
, &tmp
);
1714 /* Not dependent. */
1717 /* Undo the delay slot. */
1718 jmp
= XVECEXP (PATTERN (branch
), 0, 0);
1720 tmp
= PATTERN (jmp
);
1721 note_stores (PATTERN (slot
), insn_dependent_p_1
, &tmp
);
1723 /* Anti dependent. */
1726 p
= PREV_INSN (branch
);
1727 NEXT_INSN (p
) = slot
;
1728 PREV_INSN (slot
) = p
;
1729 NEXT_INSN (slot
) = jmp
;
1730 PREV_INSN (jmp
) = slot
;
1731 NEXT_INSN (jmp
) = branch
;
1732 PREV_INSN (branch
) = jmp
;
1733 XVECEXP (PATTERN (branch
), 0, 0) = NULL_RTX
;
1734 XVECEXP (PATTERN (branch
), 0, 1) = NULL_RTX
;
1735 delete_insn (branch
);
1739 /* Insert nops to satisfy pipeline constraints. We only deal with ms2
1740 constraints here. Earlier CPUs are dealt with by inserting nops with
1741 final_prescan (but that can lead to inferior code, and is
1742 impractical with ms2's JAL hazard).
1744 ms2 dynamic constraints
1745 1) a load and a following use must be separated by one insn
1746 2) an insn and a following dependent call must be separated by two insns
1748 only arith insns are placed in delay slots so #1 cannot happen with
1749 a load in a delay slot. #2 can happen with an arith insn in the
1753 mt_reorg_hazard (void)
1757 /* Find all the branches */
1758 for (insn
= get_insns ();
1760 insn
= NEXT_INSN (insn
))
1767 jmp
= PATTERN (insn
);
1769 if (GET_CODE (jmp
) != SEQUENCE
)
1770 /* If it's not got a filled delay slot, then it can't
1774 jmp
= XVECEXP (jmp
, 0, 0);
1776 if (recog_memoized (jmp
) == CODE_FOR_tablejump
)
1777 for (jmp
= XEXP (XEXP (XVECEXP (PATTERN (jmp
), 0, 1), 0), 0);
1778 !JUMP_TABLE_DATA_P (jmp
);
1779 jmp
= NEXT_INSN (jmp
))
1782 for_each_rtx (&PATTERN (jmp
), mt_add_branches
, insn
);
1785 /* Now scan for dependencies. */
1786 for (insn
= get_insns ();
1787 insn
&& !INSN_P (insn
);
1788 insn
= NEXT_INSN (insn
))
1796 enum attr_type attr
;
1798 gcc_assert (INSN_P (insn
) && !INSN_DELETED_P (insn
));
1799 for (next
= NEXT_INSN (insn
);
1801 next
= NEXT_INSN (next
))
1805 if (GET_CODE (PATTERN (next
)) != USE
)
1810 if (GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1811 jmp
= XVECEXP (PATTERN (insn
), 0, 0);
1813 attr
= recog_memoized (jmp
) >= 0 ? get_attr_type (jmp
) : TYPE_UNKNOWN
;
1815 if (next
&& attr
== TYPE_LOAD
)
1817 /* A load. See if NEXT is dependent, and if so insert a
1820 tmp
= PATTERN (next
);
1821 if (GET_CODE (tmp
) == SEQUENCE
)
1822 tmp
= PATTERN (XVECEXP (tmp
, 0, 0));
1823 note_stores (PATTERN (insn
), insn_dependent_p_1
, &tmp
);
1825 emit_insn_after (gen_nop (), insn
);
1828 if (attr
== TYPE_CALL
)
1830 /* A call. Make sure we're not dependent on either of the
1831 previous two dynamic instructions. */
1835 rtx rescan
= NULL_RTX
;
1837 for (count
= 2; count
&& !nops
;)
1841 prev
= PREV_INSN (prev
);
1844 /* If we reach the start of the function, we must
1845 presume the caller set the address in the delay
1846 slot of the call instruction. */
1851 if (BARRIER_P (prev
))
1855 /* Look at branches to this label. */
1857 branch_info
*branch
;
1859 for (label
= mt_labels
;
1861 label
= label
->next
)
1862 if (label
->label
== prev
)
1864 for (branch
= label
->branches
;
1866 branch
= branch
->next
)
1868 tmp
= mt_check_delay_slot (branch
->insn
, jmp
);
1870 if (tmp
== branch
->insn
)
1876 if (tmp
&& branch
->insn
== next
)
1883 if (!INSN_P (prev
) || GET_CODE (PATTERN (prev
)) == USE
)
1886 if (GET_CODE (PATTERN (prev
)) == SEQUENCE
)
1888 /* Look at the delay slot. */
1889 tmp
= mt_check_delay_slot (prev
, jmp
);
1895 type
= (INSN_CODE (prev
) >= 0 ? get_attr_type (prev
)
1897 if (type
== TYPE_CALL
|| type
== TYPE_BRANCH
)
1900 if (type
== TYPE_LOAD
1901 || type
== TYPE_ARITH
1902 || type
== TYPE_COMPLEX
)
1904 tmp
= PATTERN (jmp
);
1905 note_stores (PATTERN (prev
), insn_dependent_p_1
, &tmp
);
1913 if (INSN_CODE (prev
) >= 0)
1918 for (next
= NEXT_INSN (rescan
);
1919 next
&& !INSN_P (next
);
1920 next
= NEXT_INSN (next
))
1923 emit_insn_before (gen_nop (), insn
);
1927 /* Free the data structures. */
1930 label_info
*label
= mt_labels
;
1931 branch_info
*branch
, *next
;
1933 mt_labels
= label
->next
;
1934 for (branch
= label
->branches
; branch
; branch
= next
)
1936 next
= branch
->next
;
1943 /* Fixup the looping instructions, do delayed branch scheduling, fixup
1944 scheduling hazards. */
1947 mt_machine_reorg (void)
1949 if (mt_flag_delayed_branch
)
1950 dbr_schedule (get_insns (), dump_file
);
1954 /* Force all instructions to be split into their final form. */
1955 split_all_insns_noflow ();
1960 /* Initialize the GCC target structure. */
1961 const struct attribute_spec mt_attribute_table
[];
1963 #undef TARGET_ATTRIBUTE_TABLE
1964 #define TARGET_ATTRIBUTE_TABLE mt_attribute_table
1965 #undef TARGET_STRUCT_VALUE_RTX
1966 #define TARGET_STRUCT_VALUE_RTX mt_struct_value_rtx
1967 #undef TARGET_PROMOTE_PROTOTYPES
1968 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
1969 #undef TARGET_PASS_BY_REFERENCE
1970 #define TARGET_PASS_BY_REFERENCE mt_pass_by_reference
1971 #undef TARGET_MUST_PASS_IN_STACK
1972 #define TARGET_MUST_PASS_IN_STACK mt_pass_in_stack
1973 #undef TARGET_ARG_PARTIAL_BYTES
1974 #define TARGET_ARG_PARTIAL_BYTES mt_arg_partial_bytes
1975 #undef TARGET_SETUP_INCOMING_VARARGS
1976 #define TARGET_SETUP_INCOMING_VARARGS mt_setup_incoming_varargs
1977 #undef TARGET_MACHINE_DEPENDENT_REORG
1978 #define TARGET_MACHINE_DEPENDENT_REORG mt_machine_reorg
1980 struct gcc_target targetm
= TARGET_INITIALIZER
;