Sync usage with man page.
[netbsd-mini2440.git] / gnu / dist / gcc4 / gcc / config / mt / mt.c
blobe63319bd3d50dee364435aa11a7518f117f4442a
1 /* Target definitions for the MorphoRISC1
2 Copyright (C) 2005 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 2, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
33 #include "recog.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "integrate.h"
37 #include "tree.h"
38 #include "function.h"
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "flags.h"
43 #include "tm_p.h"
44 #include "ggc.h"
45 #include "insn-flags.h"
46 #include "obstack.h"
47 #include "except.h"
48 #include "target.h"
49 #include "target-def.h"
51 /* Frame pointer register mask. */
52 #define FP_MASK (1 << (GPR_FP))
54 /* Link register mask. */
55 #define LINK_MASK (1 << (GPR_LINK))
57 /* Given a SIZE in bytes, advance to the next word. */
58 #define ROUND_ADVANCE(SIZE) (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
60 /* A C structure for machine-specific, per-function data.
61 This is added to the cfun structure. */
62 struct machine_function GTY(())
64 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
65 int ra_needs_full_frame;
66 struct rtx_def * eh_stack_adjust;
67 int interrupt_handler;
70 /* Define the information needed to generate branch and scc insns.
71 This is stored from the compare operation. */
72 struct rtx_def * mt_compare_op0;
73 struct rtx_def * mt_compare_op1;
75 /* Current frame information calculated by compute_frame_size. */
76 struct mt_frame_info current_frame_info;
78 /* Zero structure to initialize current_frame_info. */
79 struct mt_frame_info zero_frame_info;
81 /* mt doesn't have unsigned compares need a library call for this. */
82 struct rtx_def * mt_ucmpsi3_libcall;
84 static int mt_flag_delayed_branch;
87 static rtx
88 mt_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
89 int incoming ATTRIBUTE_UNUSED)
91 return gen_rtx_REG (Pmode, RETVAL_REGNUM);
94 /* Implement RETURN_ADDR_RTX. */
95 rtx
96 mt_return_addr_rtx (int count)
98 if (count != 0)
99 return NULL_RTX;
101 return get_hard_reg_initial_val (Pmode, GPR_LINK);
104 /* The following variable value indicates the number of nops required
105 between the current instruction and the next instruction to avoid
106 any pipeline hazards. */
107 static int mt_nops_required = 0;
108 static const char * mt_nop_reasons = "";
110 /* Implement ASM_OUTPUT_OPCODE. */
111 const char *
112 mt_asm_output_opcode (FILE *f ATTRIBUTE_UNUSED, const char *ptr)
114 if (mt_nops_required)
115 fprintf (f, ";# need %d nops because of %s\n\t",
116 mt_nops_required, mt_nop_reasons);
118 while (mt_nops_required)
120 fprintf (f, "nop\n\t");
121 -- mt_nops_required;
124 return ptr;
127 /* Given an insn, return whether it's a memory operation or a branch
128 operation, otherwise return TYPE_ARITH. */
129 static enum attr_type
130 mt_get_attr_type (rtx complete_insn)
132 rtx insn = PATTERN (complete_insn);
134 if (JUMP_P (complete_insn))
135 return TYPE_BRANCH;
136 if (CALL_P (complete_insn))
137 return TYPE_BRANCH;
139 if (GET_CODE (insn) != SET)
140 return TYPE_ARITH;
142 if (SET_DEST (insn) == pc_rtx)
143 return TYPE_BRANCH;
145 if (GET_CODE (SET_DEST (insn)) == MEM)
146 return TYPE_STORE;
148 if (GET_CODE (SET_SRC (insn)) == MEM)
149 return TYPE_LOAD;
151 return TYPE_ARITH;
154 /* A helper routine for insn_dependent_p called through note_stores. */
156 static void
157 insn_dependent_p_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
159 rtx * pinsn = (rtx *) data;
161 if (*pinsn && reg_mentioned_p (x, *pinsn))
162 *pinsn = NULL_RTX;
165 /* Return true if anything in insn X is (anti,output,true)
166 dependent on anything in insn Y. */
168 static bool
169 insn_dependent_p (rtx x, rtx y)
171 rtx tmp;
173 if (! INSN_P (x) || ! INSN_P (y))
174 return 0;
176 tmp = PATTERN (y);
177 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
178 if (tmp == NULL_RTX)
179 return true;
181 tmp = PATTERN (x);
182 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
183 return (tmp == NULL_RTX);
187 /* Return true if anything in insn X is true dependent on anything in
188 insn Y. */
189 static bool
190 insn_true_dependent_p (rtx x, rtx y)
192 rtx tmp;
194 if (! INSN_P (x) || ! INSN_P (y))
195 return 0;
197 tmp = PATTERN (y);
198 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
199 return (tmp == NULL_RTX);
202 /* The following determines the number of nops that need to be
203 inserted between the previous instructions and current instruction
204 to avoid pipeline hazards on the mt processor. Remember that
205 the function is not called for asm insns. */
207 void
208 mt_final_prescan_insn (rtx insn,
209 rtx * opvec ATTRIBUTE_UNUSED,
210 int noperands ATTRIBUTE_UNUSED)
212 rtx prev_i;
213 enum attr_type prev_attr;
215 mt_nops_required = 0;
216 mt_nop_reasons = "";
218 /* ms2 constraints are dealt with in reorg. */
219 if (TARGET_MS2)
220 return;
222 /* Only worry about real instructions. */
223 if (! INSN_P (insn))
224 return;
226 /* Find the previous real instructions. */
227 for (prev_i = PREV_INSN (insn);
228 prev_i != NULL
229 && (! INSN_P (prev_i)
230 || GET_CODE (PATTERN (prev_i)) == USE
231 || GET_CODE (PATTERN (prev_i)) == CLOBBER);
232 prev_i = PREV_INSN (prev_i))
234 /* If we meet a barrier, there is no flow through here. */
235 if (BARRIER_P (prev_i))
236 return;
239 /* If there isn't one then there is nothing that we need do. */
240 if (prev_i == NULL || ! INSN_P (prev_i))
241 return;
243 prev_attr = mt_get_attr_type (prev_i);
245 /* Delayed branch slots already taken care of by delay branch scheduling. */
246 if (prev_attr == TYPE_BRANCH)
247 return;
249 switch (mt_get_attr_type (insn))
251 case TYPE_LOAD:
252 case TYPE_STORE:
253 /* Avoid consecutive memory operation. */
254 if ((prev_attr == TYPE_LOAD || prev_attr == TYPE_STORE)
255 && TARGET_MS1_64_001)
257 mt_nops_required = 1;
258 mt_nop_reasons = "consecutive mem ops";
260 /* Drop through. */
262 case TYPE_ARITH:
263 case TYPE_COMPLEX:
264 /* One cycle of delay is required between load
265 and the dependent arithmetic instruction. */
266 if (prev_attr == TYPE_LOAD
267 && insn_true_dependent_p (prev_i, insn))
269 mt_nops_required = 1;
270 mt_nop_reasons = "load->arith dependency delay";
272 break;
274 case TYPE_BRANCH:
275 if (insn_dependent_p (prev_i, insn))
277 if (prev_attr == TYPE_ARITH && TARGET_MS1_64_001)
279 /* One cycle of delay between arith
280 instructions and branch dependent on arith. */
281 mt_nops_required = 1;
282 mt_nop_reasons = "arith->branch dependency delay";
284 else if (prev_attr == TYPE_LOAD)
286 /* Two cycles of delay are required
287 between load and dependent branch. */
288 if (TARGET_MS1_64_001)
289 mt_nops_required = 2;
290 else
291 mt_nops_required = 1;
292 mt_nop_reasons = "load->branch dependency delay";
295 break;
297 default:
298 fatal_insn ("mt_final_prescan_insn, invalid insn #1", insn);
299 break;
303 /* Print debugging information for a frame. */
304 static void
305 mt_debug_stack (struct mt_frame_info * info)
307 int regno;
309 if (!info)
311 error ("info pointer NULL");
312 gcc_unreachable ();
315 fprintf (stderr, "\nStack information for function %s:\n",
316 ((current_function_decl && DECL_NAME (current_function_decl))
317 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
318 : "<unknown>"));
320 fprintf (stderr, "\ttotal_size = %d\n", info->total_size);
321 fprintf (stderr, "\tpretend_size = %d\n", info->pretend_size);
322 fprintf (stderr, "\targs_size = %d\n", info->args_size);
323 fprintf (stderr, "\textra_size = %d\n", info->extra_size);
324 fprintf (stderr, "\treg_size = %d\n", info->reg_size);
325 fprintf (stderr, "\tvar_size = %d\n", info->var_size);
326 fprintf (stderr, "\tframe_size = %d\n", info->frame_size);
327 fprintf (stderr, "\treg_mask = 0x%x\n", info->reg_mask);
328 fprintf (stderr, "\tsave_fp = %d\n", info->save_fp);
329 fprintf (stderr, "\tsave_lr = %d\n", info->save_lr);
330 fprintf (stderr, "\tinitialized = %d\n", info->initialized);
331 fprintf (stderr, "\tsaved registers =");
333 /* Print out reg_mask in a more readable format. */
334 for (regno = GPR_R0; regno <= GPR_LAST; regno++)
335 if ( (1 << regno) & info->reg_mask)
336 fprintf (stderr, " %s", reg_names[regno]);
338 putc ('\n', stderr);
339 fflush (stderr);
342 /* Print a memory address as an operand to reference that memory location. */
344 static void
345 mt_print_operand_simple_address (FILE * file, rtx addr)
347 if (!addr)
348 error ("PRINT_OPERAND_ADDRESS, null pointer");
350 else
351 switch (GET_CODE (addr))
353 case REG:
354 fprintf (file, "%s, #0", reg_names [REGNO (addr)]);
355 break;
357 case PLUS:
359 rtx reg = 0;
360 rtx offset = 0;
361 rtx arg0 = XEXP (addr, 0);
362 rtx arg1 = XEXP (addr, 1);
364 if (GET_CODE (arg0) == REG)
366 reg = arg0;
367 offset = arg1;
368 if (GET_CODE (offset) == REG)
369 fatal_insn ("PRINT_OPERAND_ADDRESS, 2 regs", addr);
372 else if (GET_CODE (arg1) == REG)
373 reg = arg1, offset = arg0;
374 else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
376 fprintf (file, "%s, #", reg_names [GPR_R0]);
377 output_addr_const (file, addr);
378 break;
380 fprintf (file, "%s, #", reg_names [REGNO (reg)]);
381 output_addr_const (file, offset);
382 break;
385 case LABEL_REF:
386 case SYMBOL_REF:
387 case CONST_INT:
388 case CONST:
389 output_addr_const (file, addr);
390 break;
392 default:
393 fatal_insn ("PRINT_OPERAND_ADDRESS, invalid insn #1", addr);
394 break;
398 /* Implement PRINT_OPERAND_ADDRESS. */
399 void
400 mt_print_operand_address (FILE * file, rtx addr)
402 if (GET_CODE (addr) == AND
403 && GET_CODE (XEXP (addr, 1)) == CONST_INT
404 && INTVAL (XEXP (addr, 1)) == -3)
405 mt_print_operand_simple_address (file, XEXP (addr, 0));
406 else
407 mt_print_operand_simple_address (file, addr);
410 /* Implement PRINT_OPERAND. */
411 void
412 mt_print_operand (FILE * file, rtx x, int code)
414 switch (code)
416 case '#':
417 /* Output a nop if there's nothing for the delay slot. */
418 if (dbr_sequence_length () == 0)
419 fputs ("\n\tnop", file);
420 return;
422 case 'H':
423 fprintf(file, "#%%hi16(");
424 output_addr_const (file, x);
425 fprintf(file, ")");
426 return;
428 case 'L':
429 fprintf(file, "#%%lo16(");
430 output_addr_const (file, x);
431 fprintf(file, ")");
432 return;
434 case 'N':
435 fprintf(file, "#%ld", ~INTVAL (x));
436 return;
438 case 'z':
439 if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0)
441 fputs (reg_names[GPR_R0], file);
442 return;
445 case 0:
446 /* Handled below. */
447 break;
449 default:
450 /* output_operand_lossage ("mt_print_operand: unknown code"); */
451 fprintf (file, "unknown code");
452 return;
455 switch (GET_CODE (x))
457 case REG:
458 fputs (reg_names [REGNO (x)], file);
459 break;
461 case CONST:
462 case CONST_INT:
463 fprintf(file, "#%ld", INTVAL (x));
464 break;
466 case MEM:
467 mt_print_operand_address(file, XEXP (x,0));
468 break;
470 case LABEL_REF:
471 case SYMBOL_REF:
472 output_addr_const (file, x);
473 break;
475 default:
476 fprintf(file, "Uknown code: %d", GET_CODE (x));
477 break;
480 return;
483 /* Implement INIT_CUMULATIVE_ARGS. */
484 void
485 mt_init_cumulative_args (CUMULATIVE_ARGS * cum, tree fntype, rtx libname,
486 tree fndecl ATTRIBUTE_UNUSED, int incoming)
488 *cum = 0;
490 if (TARGET_DEBUG_ARG)
492 fprintf (stderr, "\nmt_init_cumulative_args:");
494 if (incoming)
495 fputs (" incoming", stderr);
497 if (fntype)
499 tree ret_type = TREE_TYPE (fntype);
500 fprintf (stderr, " return = %s,",
501 tree_code_name[ (int)TREE_CODE (ret_type) ]);
504 if (libname && GET_CODE (libname) == SYMBOL_REF)
505 fprintf (stderr, " libname = %s", XSTR (libname, 0));
507 if (cfun->returns_struct)
508 fprintf (stderr, " return-struct");
510 putc ('\n', stderr);
514 /* Compute the slot number to pass an argument in.
515 Returns the slot number or -1 if passing on the stack.
517 CUM is a variable of type CUMULATIVE_ARGS which gives info about
518 the preceding args and about the function being called.
519 MODE is the argument's machine mode.
520 TYPE is the data type of the argument (as a tree).
521 This is null for libcalls where that information may
522 not be available.
523 NAMED is nonzero if this argument is a named parameter
524 (otherwise it is an extra parameter matching an ellipsis).
525 INCOMING_P is zero for FUNCTION_ARG, nonzero for FUNCTION_INCOMING_ARG.
526 *PREGNO records the register number to use if scalar type. */
528 static int
529 mt_function_arg_slotno (const CUMULATIVE_ARGS * cum,
530 enum machine_mode mode,
531 tree type,
532 int named ATTRIBUTE_UNUSED,
533 int incoming_p ATTRIBUTE_UNUSED,
534 int * pregno)
536 int regbase = FIRST_ARG_REGNUM;
537 int slotno = * cum;
539 if (mode == VOIDmode || targetm.calls.must_pass_in_stack (mode, type))
540 return -1;
542 if (slotno >= MT_NUM_ARG_REGS)
543 return -1;
545 * pregno = regbase + slotno;
547 return slotno;
550 /* Implement FUNCTION_ARG. */
552 mt_function_arg (const CUMULATIVE_ARGS * cum,
553 enum machine_mode mode,
554 tree type,
555 int named,
556 int incoming_p)
558 int slotno, regno;
559 rtx reg;
561 slotno = mt_function_arg_slotno (cum, mode, type, named, incoming_p, &regno);
563 if (slotno == -1)
564 reg = NULL_RTX;
565 else
566 reg = gen_rtx_REG (mode, regno);
568 return reg;
571 /* Implement FUNCTION_ARG_ADVANCE. */
572 void
573 mt_function_arg_advance (CUMULATIVE_ARGS * cum,
574 enum machine_mode mode,
575 tree type ATTRIBUTE_UNUSED,
576 int named)
578 int slotno, regno;
580 /* We pass 0 for incoming_p here, it doesn't matter. */
581 slotno = mt_function_arg_slotno (cum, mode, type, named, 0, &regno);
583 * cum += (mode != BLKmode
584 ? ROUND_ADVANCE (GET_MODE_SIZE (mode))
585 : ROUND_ADVANCE (int_size_in_bytes (type)));
587 if (TARGET_DEBUG_ARG)
588 fprintf (stderr,
589 "mt_function_arg_advance: words = %2d, mode = %4s, named = %d, size = %3d\n",
590 *cum, GET_MODE_NAME (mode), named,
591 (*cum) * UNITS_PER_WORD);
594 /* Implement hook TARGET_ARG_PARTIAL_BYTES.
596 Returns the number of bytes at the beginning of an argument that
597 must be put in registers. The value must be zero for arguments
598 that are passed entirely in registers or that are entirely pushed
599 on the stack. */
600 static int
601 mt_arg_partial_bytes (CUMULATIVE_ARGS * pcum,
602 enum machine_mode mode,
603 tree type,
604 bool named ATTRIBUTE_UNUSED)
606 int cum = * pcum;
607 int words;
609 if (mode == BLKmode)
610 words = ((int_size_in_bytes (type) + UNITS_PER_WORD - 1)
611 / UNITS_PER_WORD);
612 else
613 words = (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
615 if (! targetm.calls.pass_by_reference (&cum, mode, type, named)
616 && cum < MT_NUM_ARG_REGS
617 && (cum + words) > MT_NUM_ARG_REGS)
619 int bytes = (MT_NUM_ARG_REGS - cum) * UNITS_PER_WORD;
621 if (TARGET_DEBUG)
622 fprintf (stderr, "function_arg_partial_nregs = %d\n", bytes);
623 return bytes;
626 return 0;
630 /* Implement TARGET_PASS_BY_REFERENCE hook. */
631 static bool
632 mt_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
633 enum machine_mode mode ATTRIBUTE_UNUSED,
634 tree type,
635 bool named ATTRIBUTE_UNUSED)
637 return (type && int_size_in_bytes (type) > 4 * UNITS_PER_WORD);
640 /* Implement FUNCTION_ARG_BOUNDARY. */
642 mt_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
643 tree type ATTRIBUTE_UNUSED)
645 return BITS_PER_WORD;
648 /* Implement REG_OK_FOR_BASE_P. */
650 mt_reg_ok_for_base_p (rtx x, int strict)
652 if (strict)
653 return (((unsigned) REGNO (x)) < FIRST_PSEUDO_REGISTER);
654 return 1;
657 /* Helper function of mt_legitimate_address_p. Return true if XINSN
658 is a simple address, otherwise false. */
659 static bool
660 mt_legitimate_simple_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
661 rtx xinsn, int strict)
663 if (TARGET_DEBUG)
665 fprintf (stderr, "\n========== GO_IF_LEGITIMATE_ADDRESS, %sstrict\n",
666 strict ? "" : "not ");
667 debug_rtx (xinsn);
670 if (GET_CODE (xinsn) == REG && mt_reg_ok_for_base_p (xinsn, strict))
671 return true;
673 if (GET_CODE (xinsn) == PLUS
674 && GET_CODE (XEXP (xinsn, 0)) == REG
675 && mt_reg_ok_for_base_p (XEXP (xinsn, 0), strict)
676 && GET_CODE (XEXP (xinsn, 1)) == CONST_INT
677 && SMALL_INT (XEXP (xinsn, 1)))
678 return true;
680 return false;
684 /* Helper function of GO_IF_LEGITIMATE_ADDRESS. Return non-zero if
685 XINSN is a legitimate address on MT. */
687 mt_legitimate_address_p (enum machine_mode mode, rtx xinsn, int strict)
689 if (mt_legitimate_simple_address_p (mode, xinsn, strict))
690 return 1;
692 if ((mode) == SImode
693 && GET_CODE (xinsn) == AND
694 && GET_CODE (XEXP (xinsn, 1)) == CONST_INT
695 && INTVAL (XEXP (xinsn, 1)) == -3)
696 return mt_legitimate_simple_address_p (mode, XEXP (xinsn, 0), strict);
697 else
698 return 0;
701 /* Return truth value of whether OP can be used as an operands where a
702 register or 16 bit unsigned integer is needed. */
705 uns_arith_operand (rtx op, enum machine_mode mode)
707 if (GET_CODE (op) == CONST_INT && SMALL_INT_UNSIGNED (op))
708 return 1;
710 return register_operand (op, mode);
713 /* Return truth value of whether OP can be used as an operands where a
714 16 bit integer is needed. */
717 arith_operand (rtx op, enum machine_mode mode)
719 if (GET_CODE (op) == CONST_INT && SMALL_INT (op))
720 return 1;
722 return register_operand (op, mode);
725 /* Return truth value of whether OP is a register or the constant 0. */
728 reg_or_0_operand (rtx op, enum machine_mode mode)
730 switch (GET_CODE (op))
732 case CONST_INT:
733 return INTVAL (op) == 0;
735 case REG:
736 case SUBREG:
737 return register_operand (op, mode);
739 default:
740 break;
743 return 0;
746 /* Return truth value of whether OP is a constant that requires two
747 loads to put in a register. */
750 big_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
752 if (GET_CODE (op) == CONST_INT && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
753 return 1;
755 return 0;
758 /* Return truth value of whether OP is a constant that require only
759 one load to put in a register. */
762 single_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
764 if (big_const_operand (op, mode)
765 || GET_CODE (op) == CONST
766 || GET_CODE (op) == LABEL_REF
767 || GET_CODE (op) == SYMBOL_REF)
768 return 0;
770 return 1;
773 /* True if the current function is an interrupt handler
774 (either via #pragma or an attribute specification). */
775 int interrupt_handler;
776 enum processor_type mt_cpu;
778 static struct machine_function *
779 mt_init_machine_status (void)
781 struct machine_function *f;
783 f = ggc_alloc_cleared (sizeof (struct machine_function));
785 return f;
788 /* Implement OVERRIDE_OPTIONS. */
789 void
790 mt_override_options (void)
792 if (mt_cpu_string != NULL)
794 if (!strcmp (mt_cpu_string, "ms1-64-001"))
795 mt_cpu = PROCESSOR_MS1_64_001;
796 else if (!strcmp (mt_cpu_string, "ms1-16-002"))
797 mt_cpu = PROCESSOR_MS1_16_002;
798 else if (!strcmp (mt_cpu_string, "ms1-16-003"))
799 mt_cpu = PROCESSOR_MS1_16_003;
800 else if (!strcmp (mt_cpu_string, "ms2"))
801 mt_cpu = PROCESSOR_MS2;
802 else
803 error ("bad value (%s) for -march= switch", mt_cpu_string);
805 else
806 mt_cpu = PROCESSOR_MS1_64_001;
808 if (flag_exceptions)
810 flag_omit_frame_pointer = 0;
811 flag_gcse = 0;
814 /* We do delayed branch filling in machine dependent reorg */
815 mt_flag_delayed_branch = flag_delayed_branch;
816 flag_delayed_branch = 0;
818 init_machine_status = mt_init_machine_status;
821 /* Do what is necessary for `va_start'. We look at the current function
822 to determine if stdarg or varargs is used and return the address of the
823 first unnamed parameter. */
825 static void
826 mt_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
827 enum machine_mode mode ATTRIBUTE_UNUSED,
828 tree type ATTRIBUTE_UNUSED,
829 int *pretend_size, int no_rtl)
831 int regno;
832 int regs = MT_NUM_ARG_REGS - *cum;
834 *pretend_size = regs < 0 ? 0 : GET_MODE_SIZE (SImode) * regs;
836 if (no_rtl)
837 return;
839 for (regno = *cum; regno < MT_NUM_ARG_REGS; regno++)
841 rtx reg = gen_rtx_REG (SImode, FIRST_ARG_REGNUM + regno);
842 rtx slot = gen_rtx_PLUS (Pmode,
843 gen_rtx_REG (SImode, ARG_POINTER_REGNUM),
844 GEN_INT (UNITS_PER_WORD * regno));
846 emit_move_insn (gen_rtx_MEM (SImode, slot), reg);
850 /* Returns the number of bytes offset between the frame pointer and the stack
851 pointer for the current function. SIZE is the number of bytes of space
852 needed for local variables. */
854 unsigned int
855 mt_compute_frame_size (int size)
857 int regno;
858 unsigned int total_size;
859 unsigned int var_size;
860 unsigned int args_size;
861 unsigned int pretend_size;
862 unsigned int extra_size;
863 unsigned int reg_size;
864 unsigned int frame_size;
865 unsigned int reg_mask;
867 var_size = size;
868 args_size = current_function_outgoing_args_size;
869 pretend_size = current_function_pretend_args_size;
870 extra_size = FIRST_PARM_OFFSET (0);
871 total_size = extra_size + pretend_size + args_size + var_size;
872 reg_size = 0;
873 reg_mask = 0;
875 /* Calculate space needed for registers. */
876 for (regno = GPR_R0; regno <= GPR_LAST; regno++)
878 if (MUST_SAVE_REGISTER (regno))
880 reg_size += UNITS_PER_WORD;
881 reg_mask |= 1 << regno;
885 current_frame_info.save_fp = (regs_ever_live [GPR_FP]
886 || frame_pointer_needed
887 || interrupt_handler);
888 current_frame_info.save_lr = (regs_ever_live [GPR_LINK]
889 || profile_flag
890 || interrupt_handler);
892 reg_size += (current_frame_info.save_fp + current_frame_info.save_lr)
893 * UNITS_PER_WORD;
894 total_size += reg_size;
895 total_size = ((total_size + 3) & ~3);
897 frame_size = total_size;
899 /* Save computed information. */
900 current_frame_info.pretend_size = pretend_size;
901 current_frame_info.var_size = var_size;
902 current_frame_info.args_size = args_size;
903 current_frame_info.reg_size = reg_size;
904 current_frame_info.frame_size = args_size + var_size;
905 current_frame_info.total_size = total_size;
906 current_frame_info.extra_size = extra_size;
907 current_frame_info.reg_mask = reg_mask;
908 current_frame_info.initialized = reload_completed;
910 return total_size;
913 /* Emit code to save REG in stack offset pointed to by MEM.
914 STACK_OFFSET is the offset from the SP where the save will happen.
915 This function sets the REG_FRAME_RELATED_EXPR note accordingly. */
916 static void
917 mt_emit_save_restore (enum save_direction direction,
918 rtx reg, rtx mem, int stack_offset)
920 if (direction == FROM_PROCESSOR_TO_MEM)
922 rtx insn;
924 insn = emit_move_insn (mem, reg);
925 RTX_FRAME_RELATED_P (insn) = 1;
926 REG_NOTES (insn)
927 = gen_rtx_EXPR_LIST
928 (REG_FRAME_RELATED_EXPR,
929 gen_rtx_SET (VOIDmode,
930 gen_rtx_MEM (SImode,
931 gen_rtx_PLUS (SImode,
932 stack_pointer_rtx,
933 GEN_INT (stack_offset))),
934 reg),
935 REG_NOTES (insn));
937 else
938 emit_move_insn (reg, mem);
942 /* Emit code to save the frame pointer in the prologue and restore
943 frame pointer in epilogue. */
945 static void
946 mt_emit_save_fp (enum save_direction direction,
947 struct mt_frame_info info)
949 rtx base_reg;
950 int reg_mask = info.reg_mask & ~(FP_MASK | LINK_MASK);
951 int offset = info.total_size;
952 int stack_offset = info.total_size;
954 /* If there is nothing to save, get out now. */
955 if (! info.save_fp && ! info.save_lr && ! reg_mask)
956 return;
958 /* If offset doesn't fit in a 15-bit signed integer,
959 uses a scratch registers to get a smaller offset. */
960 if (CONST_OK_FOR_LETTER_P(offset, 'O'))
961 base_reg = stack_pointer_rtx;
962 else
964 /* Use the scratch register R9 that holds old stack pointer. */
965 base_reg = gen_rtx_REG (SImode, GPR_R9);
966 offset = 0;
969 if (info.save_fp)
971 offset -= UNITS_PER_WORD;
972 stack_offset -= UNITS_PER_WORD;
973 mt_emit_save_restore
974 (direction, gen_rtx_REG (SImode, GPR_FP),
975 gen_rtx_MEM (SImode,
976 gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
977 stack_offset);
981 /* Emit code to save registers in the prologue and restore register
982 in epilogue. */
984 static void
985 mt_emit_save_regs (enum save_direction direction,
986 struct mt_frame_info info)
988 rtx base_reg;
989 int regno;
990 int reg_mask = info.reg_mask & ~(FP_MASK | LINK_MASK);
991 int offset = info.total_size;
992 int stack_offset = info.total_size;
994 /* If there is nothing to save, get out now. */
995 if (! info.save_fp && ! info.save_lr && ! reg_mask)
996 return;
998 /* If offset doesn't fit in a 15-bit signed integer,
999 uses a scratch registers to get a smaller offset. */
1000 if (CONST_OK_FOR_LETTER_P(offset, 'O'))
1001 base_reg = stack_pointer_rtx;
1002 else
1004 /* Use the scratch register R9 that holds old stack pointer. */
1005 base_reg = gen_rtx_REG (SImode, GPR_R9);
1006 offset = 0;
1009 if (info.save_fp)
1011 /* This just records the space for it, the actual move generated in
1012 mt_emit_save_fp (). */
1013 offset -= UNITS_PER_WORD;
1014 stack_offset -= UNITS_PER_WORD;
1017 if (info.save_lr)
1019 offset -= UNITS_PER_WORD;
1020 stack_offset -= UNITS_PER_WORD;
1021 mt_emit_save_restore
1022 (direction, gen_rtx_REG (SImode, GPR_LINK),
1023 gen_rtx_MEM (SImode,
1024 gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
1025 stack_offset);
1028 /* Save any needed call-saved regs. */
1029 for (regno = GPR_R0; regno <= GPR_LAST; regno++)
1031 if ((reg_mask & (1 << regno)) != 0)
1033 offset -= UNITS_PER_WORD;
1034 stack_offset -= UNITS_PER_WORD;
1035 mt_emit_save_restore
1036 (direction, gen_rtx_REG (SImode, regno),
1037 gen_rtx_MEM (SImode,
1038 gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
1039 stack_offset);
1044 /* Return true if FUNC is a function with the 'interrupt' attribute. */
1045 static bool
1046 mt_interrupt_function_p (tree func)
1048 tree a;
1050 if (TREE_CODE (func) != FUNCTION_DECL)
1051 return false;
1053 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
1054 return a != NULL_TREE;
1057 /* Generate prologue code. */
1058 void
1059 mt_expand_prologue (void)
1061 rtx size_rtx, insn;
1062 unsigned int frame_size;
1064 if (mt_interrupt_function_p (current_function_decl))
1066 interrupt_handler = 1;
1067 if (cfun->machine)
1068 cfun->machine->interrupt_handler = 1;
1071 mt_compute_frame_size (get_frame_size ());
1073 if (TARGET_DEBUG_STACK)
1074 mt_debug_stack (&current_frame_info);
1076 /* Compute size of stack adjustment. */
1077 frame_size = current_frame_info.total_size;
1079 /* If offset doesn't fit in a 15-bit signed integer,
1080 uses a scratch registers to get a smaller offset. */
1081 if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
1082 size_rtx = GEN_INT (frame_size);
1083 else
1085 /* We do not have any scratch registers. */
1086 gcc_assert (!interrupt_handler);
1088 size_rtx = gen_rtx_REG (SImode, GPR_R9);
1089 insn = emit_move_insn (size_rtx, GEN_INT (frame_size & 0xffff0000));
1090 insn = emit_insn (gen_iorsi3 (size_rtx, size_rtx,
1091 GEN_INT (frame_size & 0x0000ffff)));
1094 /* Allocate stack for this frame. */
1095 /* Make stack adjustment and use scratch register if constant too
1096 large to fit as immediate. */
1097 if (frame_size)
1099 insn = emit_insn (gen_subsi3 (stack_pointer_rtx,
1100 stack_pointer_rtx,
1101 size_rtx));
1102 RTX_FRAME_RELATED_P (insn) = 1;
1103 REG_NOTES (insn)
1104 = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1105 gen_rtx_SET (VOIDmode,
1106 stack_pointer_rtx,
1107 gen_rtx_MINUS (SImode,
1108 stack_pointer_rtx,
1109 GEN_INT (frame_size))),
1110 REG_NOTES (insn));
1113 /* Set R9 to point to old sp if required for access to register save
1114 area. */
1115 if ( current_frame_info.reg_size != 0
1116 && !CONST_OK_FOR_LETTER_P (frame_size, 'O'))
1117 emit_insn (gen_addsi3 (size_rtx, size_rtx, stack_pointer_rtx));
1119 /* Save the frame pointer. */
1120 mt_emit_save_fp (FROM_PROCESSOR_TO_MEM, current_frame_info);
1122 /* Now put the frame pointer into the frame pointer register. */
1123 if (frame_pointer_needed)
1125 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1126 RTX_FRAME_RELATED_P (insn) = 1;
1129 /* Save the registers. */
1130 mt_emit_save_regs (FROM_PROCESSOR_TO_MEM, current_frame_info);
1132 /* If we are profiling, make sure no instructions are scheduled before
1133 the call to mcount. */
1134 if (profile_flag)
1135 emit_insn (gen_blockage ());
1138 /* Implement EPILOGUE_USES. */
1140 mt_epilogue_uses (int regno)
1142 if (cfun->machine && cfun->machine->interrupt_handler && reload_completed)
1143 return 1;
1144 return regno == GPR_LINK;
1147 /* Generate epilogue. EH_MODE is NORMAL_EPILOGUE when generating a
1148 function epilogue, or EH_EPILOGUE when generating an EH
1149 epilogue. */
1150 void
1151 mt_expand_epilogue (enum epilogue_type eh_mode)
1153 rtx size_rtx, insn;
1154 unsigned frame_size;
1156 mt_compute_frame_size (get_frame_size ());
1158 if (TARGET_DEBUG_STACK)
1159 mt_debug_stack (& current_frame_info);
1161 /* Compute size of stack adjustment. */
1162 frame_size = current_frame_info.total_size;
1164 /* If offset doesn't fit in a 15-bit signed integer,
1165 uses a scratch registers to get a smaller offset. */
1166 if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
1167 size_rtx = GEN_INT (frame_size);
1168 else
1170 /* We do not have any scratch registers. */
1171 gcc_assert (!interrupt_handler);
1173 size_rtx = gen_rtx_REG (SImode, GPR_R9);
1174 insn = emit_move_insn (size_rtx, GEN_INT (frame_size & 0xffff0000));
1175 insn = emit_insn (gen_iorsi3 (size_rtx, size_rtx,
1176 GEN_INT (frame_size & 0x0000ffff)));
1177 /* Set R9 to point to old sp if required for access to register
1178 save area. */
1179 emit_insn (gen_addsi3 (size_rtx, size_rtx, stack_pointer_rtx));
1182 /* Restore sp if there was some possible change to it. */
1183 if (frame_pointer_needed)
1184 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1186 /* Restore the registers. */
1187 mt_emit_save_fp (FROM_MEM_TO_PROCESSOR, current_frame_info);
1188 mt_emit_save_regs (FROM_MEM_TO_PROCESSOR, current_frame_info);
1190 /* Make stack adjustment and use scratch register if constant too
1191 large to fit as immediate. */
1192 if (frame_size)
1194 if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
1195 /* Can handle this with simple add. */
1196 insn = emit_insn (gen_addsi3 (stack_pointer_rtx,
1197 stack_pointer_rtx,
1198 size_rtx));
1199 else
1200 /* Scratch reg R9 has the old sp value. */
1201 insn = emit_move_insn (stack_pointer_rtx,
1202 gen_rtx_REG (SImode, GPR_R9));
1204 REG_NOTES (insn)
1205 = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1206 gen_rtx_SET (VOIDmode,
1207 stack_pointer_rtx,
1208 gen_rtx_PLUS (SImode,
1209 stack_pointer_rtx,
1210 GEN_INT (frame_size))),
1211 REG_NOTES (insn));
1214 if (cfun->machine && cfun->machine->eh_stack_adjust != NULL_RTX)
1215 /* Perform the additional bump for __throw. */
1216 emit_insn (gen_addsi3 (stack_pointer_rtx,
1217 stack_pointer_rtx,
1218 cfun->machine->eh_stack_adjust));
1220 /* Generate the appropriate return. */
1221 if (eh_mode == EH_EPILOGUE)
1223 emit_jump_insn (gen_eh_return_internal ());
1224 emit_barrier ();
1226 else if (interrupt_handler)
1227 emit_jump_insn (gen_return_interrupt_internal ());
1228 else
1229 emit_jump_insn (gen_return_internal ());
1231 /* Reset state info for each function. */
1232 interrupt_handler = 0;
1233 current_frame_info = zero_frame_info;
1234 if (cfun->machine)
1235 cfun->machine->eh_stack_adjust = NULL_RTX;
1239 /* Generate code for the "eh_return" pattern. */
1240 void
1241 mt_expand_eh_return (rtx * operands)
1243 if (GET_CODE (operands[0]) != REG
1244 || REGNO (operands[0]) != EH_RETURN_STACKADJ_REGNO)
1246 rtx sp = EH_RETURN_STACKADJ_RTX;
1248 emit_move_insn (sp, operands[0]);
1249 operands[0] = sp;
1252 emit_insn (gen_eh_epilogue (operands[0]));
1255 /* Generate code for the "eh_epilogue" pattern. */
1256 void
1257 mt_emit_eh_epilogue (rtx * operands ATTRIBUTE_UNUSED)
1259 cfun->machine->eh_stack_adjust = EH_RETURN_STACKADJ_RTX; /* operands[0]; */
1260 mt_expand_epilogue (EH_EPILOGUE);
1263 /* Handle an "interrupt" attribute. */
1264 static tree
1265 mt_handle_interrupt_attribute (tree * node,
1266 tree name,
1267 tree args ATTRIBUTE_UNUSED,
1268 int flags ATTRIBUTE_UNUSED,
1269 bool * no_add_attrs)
1271 if (TREE_CODE (*node) != FUNCTION_DECL)
1273 warning (OPT_Wattributes,
1274 "%qs attribute only applies to functions",
1275 IDENTIFIER_POINTER (name));
1276 *no_add_attrs = true;
1279 return NULL_TREE;
1282 /* Table of machine attributes. */
1283 const struct attribute_spec mt_attribute_table[] =
1285 /* name, min, max, decl?, type?, func?, handler */
1286 { "interrupt", 0, 0, false, false, false, mt_handle_interrupt_attribute },
1287 { NULL, 0, 0, false, false, false, NULL }
1290 /* Implement INITIAL_ELIMINATION_OFFSET. */
1292 mt_initial_elimination_offset (int from, int to)
1294 mt_compute_frame_size (get_frame_size ());
1296 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1297 return 0;
1299 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1300 return current_frame_info.total_size;
1302 else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
1303 return current_frame_info.total_size;
1305 else
1306 gcc_unreachable ();
1309 /* Generate a compare for CODE. Return a brand-new rtx that
1310 represents the result of the compare. */
1312 static rtx
1313 mt_generate_compare (enum rtx_code code, rtx op0, rtx op1)
1315 rtx scratch0, scratch1, const_scratch;
1317 switch (code)
1319 case GTU:
1320 case LTU:
1321 case GEU:
1322 case LEU:
1323 /* Need to adjust ranges for faking unsigned compares. */
1324 scratch0 = gen_reg_rtx (SImode);
1325 scratch1 = gen_reg_rtx (SImode);
1326 const_scratch = force_reg (SImode, GEN_INT(MT_MIN_INT));
1327 emit_insn (gen_addsi3 (scratch0, const_scratch, op0));
1328 emit_insn (gen_addsi3 (scratch1, const_scratch, op1));
1329 break;
1330 default:
1331 scratch0 = op0;
1332 scratch1 = op1;
1333 break;
1336 /* Adjust compare operator to fake unsigned compares. */
1337 switch (code)
1339 case GTU:
1340 code = GT; break;
1341 case LTU:
1342 code = LT; break;
1343 case GEU:
1344 code = GE; break;
1345 case LEU:
1346 code = LE; break;
1347 default:
1348 /* do nothing */
1349 break;
1352 /* Generate the actual compare. */
1353 return gen_rtx_fmt_ee (code, VOIDmode, scratch0, scratch1);
1356 /* Emit a branch of kind CODE to location LOC. */
1358 void
1359 mt_emit_cbranch (enum rtx_code code, rtx loc, rtx op0, rtx op1)
1361 rtx condition_rtx, loc_ref;
1363 if (! reg_or_0_operand (op0, SImode))
1364 op0 = copy_to_mode_reg (SImode, op0);
1366 if (! reg_or_0_operand (op1, SImode))
1367 op1 = copy_to_mode_reg (SImode, op1);
1369 condition_rtx = mt_generate_compare (code, op0, op1);
1370 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
1371 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1372 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
1373 loc_ref, pc_rtx)));
1376 /* Subfunction of the following function. Update the flags of any MEM
1377 found in part of X. */
1379 static void
1380 mt_set_memflags_1 (rtx x, int in_struct_p, int volatile_p)
1382 int i;
1384 switch (GET_CODE (x))
1386 case SEQUENCE:
1387 case PARALLEL:
1388 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1389 mt_set_memflags_1 (XVECEXP (x, 0, i), in_struct_p, volatile_p);
1390 break;
1392 case INSN:
1393 mt_set_memflags_1 (PATTERN (x), in_struct_p, volatile_p);
1394 break;
1396 case SET:
1397 mt_set_memflags_1 (SET_DEST (x), in_struct_p, volatile_p);
1398 mt_set_memflags_1 (SET_SRC (x), in_struct_p, volatile_p);
1399 break;
1401 case MEM:
1402 MEM_IN_STRUCT_P (x) = in_struct_p;
1403 MEM_VOLATILE_P (x) = volatile_p;
1404 /* Sadly, we cannot use alias sets because the extra aliasing
1405 produced by the AND interferes. Given that two-byte quantities
1406 are the only thing we would be able to differentiate anyway,
1407 there does not seem to be any point in convoluting the early
1408 out of the alias check. */
1409 /* set_mem_alias_set (x, alias_set); */
1410 break;
1412 default:
1413 break;
1417 /* Look for any MEMs in the current sequence of insns and set the
1418 in-struct, unchanging, and volatile flags from the flags in REF.
1419 If REF is not a MEM, don't do anything. */
1421 void
1422 mt_set_memflags (rtx ref)
1424 rtx insn;
1425 int in_struct_p, volatile_p;
1427 if (GET_CODE (ref) != MEM)
1428 return;
1430 in_struct_p = MEM_IN_STRUCT_P (ref);
1431 volatile_p = MEM_VOLATILE_P (ref);
1433 /* This is only called from mt.md, after having had something
1434 generated from one of the insn patterns. So if everything is
1435 zero, the pattern is already up-to-date. */
1436 if (! in_struct_p && ! volatile_p)
1437 return;
1439 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1440 mt_set_memflags_1 (insn, in_struct_p, volatile_p);
1443 /* Implement SECONDARY_RELOAD_CLASS. */
1444 enum reg_class
1445 mt_secondary_reload_class (enum reg_class class ATTRIBUTE_UNUSED,
1446 enum machine_mode mode,
1447 rtx x)
1449 if ((mode == QImode && (!TARGET_BYTE_ACCESS)) || mode == HImode)
1451 if (GET_CODE (x) == MEM
1452 || (GET_CODE (x) == REG && true_regnum (x) == -1)
1453 || (GET_CODE (x) == SUBREG
1454 && (GET_CODE (SUBREG_REG (x)) == MEM
1455 || (GET_CODE (SUBREG_REG (x)) == REG
1456 && true_regnum (SUBREG_REG (x)) == -1))))
1457 return GENERAL_REGS;
1460 return NO_REGS;
1463 /* Handle FUNCTION_VALUE, FUNCTION_OUTGOING_VALUE, and LIBCALL_VALUE
1464 macros. */
1466 mt_function_value (tree valtype, enum machine_mode mode, tree func_decl ATTRIBUTE_UNUSED)
1468 if ((mode) == DImode || (mode) == DFmode)
1469 return gen_rtx_MEM (mode, gen_rtx_REG (mode, RETURN_VALUE_REGNUM));
1471 if (valtype)
1472 mode = TYPE_MODE (valtype);
1474 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1477 /* Split a move into two smaller pieces.
1478 MODE indicates the reduced mode. OPERANDS[0] is the original destination
1479 OPERANDS[1] is the original src. The new destinations are
1480 OPERANDS[2] and OPERANDS[4], while the new sources are OPERANDS[3]
1481 and OPERANDS[5]. */
1483 void
1484 mt_split_words (enum machine_mode nmode,
1485 enum machine_mode omode,
1486 rtx *operands)
1488 rtx dl,dh; /* src/dest pieces. */
1489 rtx sl,sh;
1490 int move_high_first = 0; /* Assume no overlap. */
1492 switch (GET_CODE (operands[0])) /* Dest. */
1494 case SUBREG:
1495 case REG:
1496 if ((GET_CODE (operands[1]) == REG
1497 || GET_CODE (operands[1]) == SUBREG)
1498 && true_regnum (operands[0]) <= true_regnum (operands[1]))
1499 move_high_first = 1;
1501 if (GET_CODE (operands[0]) == SUBREG)
1503 dl = gen_rtx_SUBREG (nmode, SUBREG_REG (operands[0]),
1504 SUBREG_BYTE (operands[0]) + GET_MODE_SIZE (nmode));
1505 dh = gen_rtx_SUBREG (nmode, SUBREG_REG (operands[0]), SUBREG_BYTE (operands[0]));
1507 else if (GET_CODE (operands[0]) == REG && ! IS_PSEUDO_P (operands[0]))
1509 int r = REGNO (operands[0]);
1510 dh = gen_rtx_REG (nmode, r);
1511 dl = gen_rtx_REG (nmode, r + HARD_REGNO_NREGS (r, nmode));
1513 else
1515 dh = gen_rtx_SUBREG (nmode, operands[0], 0);
1516 dl = gen_rtx_SUBREG (nmode, operands[0], GET_MODE_SIZE (nmode));
1518 break;
1520 case MEM:
1521 switch (GET_CODE (XEXP (operands[0], 0)))
1523 case POST_INC:
1524 case POST_DEC:
1525 gcc_unreachable ();
1526 default:
1527 dl = operand_subword (operands[0],
1528 GET_MODE_SIZE (nmode)/UNITS_PER_WORD,
1529 0, omode);
1530 dh = operand_subword (operands[0], 0, 0, omode);
1532 break;
1533 default:
1534 gcc_unreachable ();
1537 switch (GET_CODE (operands[1]))
1539 case REG:
1540 if (! IS_PSEUDO_P (operands[1]))
1542 int r = REGNO (operands[1]);
1544 sh = gen_rtx_REG (nmode, r);
1545 sl = gen_rtx_REG (nmode, r + HARD_REGNO_NREGS (r, nmode));
1547 else
1549 sh = gen_rtx_SUBREG (nmode, operands[1], 0);
1550 sl = gen_rtx_SUBREG (nmode, operands[1], GET_MODE_SIZE (nmode));
1552 break;
1554 case CONST_DOUBLE:
1555 if (operands[1] == const0_rtx)
1556 sh = sl = const0_rtx;
1557 else
1558 split_double (operands[1], & sh, & sl);
1559 break;
1561 case CONST_INT:
1562 if (operands[1] == const0_rtx)
1563 sh = sl = const0_rtx;
1564 else
1566 int vl, vh;
1568 switch (nmode)
1570 default:
1571 gcc_unreachable ();
1574 sl = GEN_INT (vl);
1575 sh = GEN_INT (vh);
1577 break;
1579 case SUBREG:
1580 sl = gen_rtx_SUBREG (nmode,
1581 SUBREG_REG (operands[1]),
1582 SUBREG_BYTE (operands[1]) + GET_MODE_SIZE (nmode));
1583 sh = gen_rtx_SUBREG (nmode,
1584 SUBREG_REG (operands[1]),
1585 SUBREG_BYTE (operands[1]));
1586 break;
1588 case MEM:
1589 switch (GET_CODE (XEXP (operands[1], 0)))
1591 case POST_DEC:
1592 case POST_INC:
1593 gcc_unreachable ();
1594 break;
1595 default:
1596 sl = operand_subword (operands[1],
1597 GET_MODE_SIZE (nmode)/UNITS_PER_WORD,
1598 0, omode);
1599 sh = operand_subword (operands[1], 0, 0, omode);
1601 /* Check if the DF load is going to clobber the register
1602 used for the address, and if so make sure that is going
1603 to be the second move. */
1604 if (GET_CODE (dl) == REG
1605 && true_regnum (dl)
1606 == true_regnum (XEXP (XEXP (sl, 0 ), 0)))
1607 move_high_first = 1;
1609 break;
1610 default:
1611 gcc_unreachable ();
1614 if (move_high_first)
1616 operands[2] = dh;
1617 operands[3] = sh;
1618 operands[4] = dl;
1619 operands[5] = sl;
1621 else
1623 operands[2] = dl;
1624 operands[3] = sl;
1625 operands[4] = dh;
1626 operands[5] = sh;
1628 return;
1631 /* Implement TARGET_MUST_PASS_IN_STACK hook. */
1632 static bool
1633 mt_pass_in_stack (enum machine_mode mode ATTRIBUTE_UNUSED, tree type)
1635 return (((type) != 0
1636 && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1637 || TREE_ADDRESSABLE (type))));
1641 /* Structures to hold branch information during reorg. */
1642 typedef struct branch_info
1644 rtx insn; /* The branch insn. */
1646 struct branch_info *next;
1647 } branch_info;
1649 typedef struct label_info
1651 rtx label; /* The label. */
1652 branch_info *branches; /* branches to this label. */
1653 struct label_info *next;
1654 } label_info;
1656 /* Chain of labels found in current function, used during reorg. */
1657 static label_info *mt_labels;
1659 /* If *X is a label, add INSN to the list of branches for that
1660 label. */
1662 static int
1663 mt_add_branches (rtx *x, void *insn)
1665 if (GET_CODE (*x) == LABEL_REF)
1667 branch_info *branch = xmalloc (sizeof (*branch));
1668 rtx label = XEXP (*x, 0);
1669 label_info *info;
1671 for (info = mt_labels; info; info = info->next)
1672 if (info->label == label)
1673 break;
1675 if (!info)
1677 info = xmalloc (sizeof (*info));
1678 info->next = mt_labels;
1679 mt_labels = info;
1681 info->label = label;
1682 info->branches = NULL;
1685 branch->next = info->branches;
1686 info->branches = branch;
1687 branch->insn = insn;
1689 return 0;
1692 /* If BRANCH has a filled delay slot, check if INSN is dependent upon
1693 it. If so, undo the delay slot fill. Returns the next insn, if
1694 we patch out the branch. Returns the branch insn, if we cannot
1695 patch out the branch (due to anti-dependency in the delay slot).
1696 In that case, the caller must insert nops at the branch target. */
1698 static rtx
1699 mt_check_delay_slot (rtx branch, rtx insn)
1701 rtx slot;
1702 rtx tmp;
1703 rtx p;
1704 rtx jmp;
1706 gcc_assert (GET_CODE (PATTERN (branch)) == SEQUENCE);
1707 if (INSN_DELETED_P (branch))
1708 return NULL_RTX;
1709 slot = XVECEXP (PATTERN (branch), 0, 1);
1711 tmp = PATTERN (insn);
1712 note_stores (PATTERN (slot), insn_dependent_p_1, &tmp);
1713 if (tmp)
1714 /* Not dependent. */
1715 return NULL_RTX;
1717 /* Undo the delay slot. */
1718 jmp = XVECEXP (PATTERN (branch), 0, 0);
1720 tmp = PATTERN (jmp);
1721 note_stores (PATTERN (slot), insn_dependent_p_1, &tmp);
1722 if (!tmp)
1723 /* Anti dependent. */
1724 return branch;
1726 p = PREV_INSN (branch);
1727 NEXT_INSN (p) = slot;
1728 PREV_INSN (slot) = p;
1729 NEXT_INSN (slot) = jmp;
1730 PREV_INSN (jmp) = slot;
1731 NEXT_INSN (jmp) = branch;
1732 PREV_INSN (branch) = jmp;
1733 XVECEXP (PATTERN (branch), 0, 0) = NULL_RTX;
1734 XVECEXP (PATTERN (branch), 0, 1) = NULL_RTX;
1735 delete_insn (branch);
1736 return jmp;
1739 /* Insert nops to satisfy pipeline constraints. We only deal with ms2
1740 constraints here. Earlier CPUs are dealt with by inserting nops with
1741 final_prescan (but that can lead to inferior code, and is
1742 impractical with ms2's JAL hazard).
1744 ms2 dynamic constraints
1745 1) a load and a following use must be separated by one insn
1746 2) an insn and a following dependent call must be separated by two insns
1748 only arith insns are placed in delay slots so #1 cannot happen with
1749 a load in a delay slot. #2 can happen with an arith insn in the
1750 delay slot. */
1752 static void
1753 mt_reorg_hazard (void)
1755 rtx insn, next;
1757 /* Find all the branches */
1758 for (insn = get_insns ();
1759 insn;
1760 insn = NEXT_INSN (insn))
1762 rtx jmp;
1764 if (!INSN_P (insn))
1765 continue;
1767 jmp = PATTERN (insn);
1769 if (GET_CODE (jmp) != SEQUENCE)
1770 /* If it's not got a filled delay slot, then it can't
1771 conflict. */
1772 continue;
1774 jmp = XVECEXP (jmp, 0, 0);
1776 if (recog_memoized (jmp) == CODE_FOR_tablejump)
1777 for (jmp = XEXP (XEXP (XVECEXP (PATTERN (jmp), 0, 1), 0), 0);
1778 !JUMP_TABLE_DATA_P (jmp);
1779 jmp = NEXT_INSN (jmp))
1780 continue;
1782 for_each_rtx (&PATTERN (jmp), mt_add_branches, insn);
1785 /* Now scan for dependencies. */
1786 for (insn = get_insns ();
1787 insn && !INSN_P (insn);
1788 insn = NEXT_INSN (insn))
1789 continue;
1791 for (;
1792 insn;
1793 insn = next)
1795 rtx jmp, tmp;
1796 enum attr_type attr;
1798 gcc_assert (INSN_P (insn) && !INSN_DELETED_P (insn));
1799 for (next = NEXT_INSN (insn);
1800 next;
1801 next = NEXT_INSN (next))
1803 if (!INSN_P (next))
1804 continue;
1805 if (GET_CODE (PATTERN (next)) != USE)
1806 break;
1809 jmp = insn;
1810 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
1811 jmp = XVECEXP (PATTERN (insn), 0, 0);
1813 attr = recog_memoized (jmp) >= 0 ? get_attr_type (jmp) : TYPE_UNKNOWN;
1815 if (next && attr == TYPE_LOAD)
1817 /* A load. See if NEXT is dependent, and if so insert a
1818 nop. */
1820 tmp = PATTERN (next);
1821 if (GET_CODE (tmp) == SEQUENCE)
1822 tmp = PATTERN (XVECEXP (tmp, 0, 0));
1823 note_stores (PATTERN (insn), insn_dependent_p_1, &tmp);
1824 if (!tmp)
1825 emit_insn_after (gen_nop (), insn);
1828 if (attr == TYPE_CALL)
1830 /* A call. Make sure we're not dependent on either of the
1831 previous two dynamic instructions. */
1832 int nops = 0;
1833 int count;
1834 rtx prev = insn;
1835 rtx rescan = NULL_RTX;
1837 for (count = 2; count && !nops;)
1839 int type;
1841 prev = PREV_INSN (prev);
1842 if (!prev)
1844 /* If we reach the start of the function, we must
1845 presume the caller set the address in the delay
1846 slot of the call instruction. */
1847 nops = count;
1848 break;
1851 if (BARRIER_P (prev))
1852 break;
1853 if (LABEL_P (prev))
1855 /* Look at branches to this label. */
1856 label_info *label;
1857 branch_info *branch;
1859 for (label = mt_labels;
1860 label;
1861 label = label->next)
1862 if (label->label == prev)
1864 for (branch = label->branches;
1865 branch;
1866 branch = branch->next)
1868 tmp = mt_check_delay_slot (branch->insn, jmp);
1870 if (tmp == branch->insn)
1872 nops = count;
1873 break;
1876 if (tmp && branch->insn == next)
1877 rescan = tmp;
1879 break;
1881 continue;
1883 if (!INSN_P (prev) || GET_CODE (PATTERN (prev)) == USE)
1884 continue;
1886 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
1888 /* Look at the delay slot. */
1889 tmp = mt_check_delay_slot (prev, jmp);
1890 if (tmp == prev)
1891 nops = count;
1892 break;
1895 type = (INSN_CODE (prev) >= 0 ? get_attr_type (prev)
1896 : TYPE_COMPLEX);
1897 if (type == TYPE_CALL || type == TYPE_BRANCH)
1898 break;
1900 if (type == TYPE_LOAD
1901 || type == TYPE_ARITH
1902 || type == TYPE_COMPLEX)
1904 tmp = PATTERN (jmp);
1905 note_stores (PATTERN (prev), insn_dependent_p_1, &tmp);
1906 if (!tmp)
1908 nops = count;
1909 break;
1913 if (INSN_CODE (prev) >= 0)
1914 count--;
1917 if (rescan)
1918 for (next = NEXT_INSN (rescan);
1919 next && !INSN_P (next);
1920 next = NEXT_INSN (next))
1921 continue;
1922 while (nops--)
1923 emit_insn_before (gen_nop (), insn);
1927 /* Free the data structures. */
1928 while (mt_labels)
1930 label_info *label = mt_labels;
1931 branch_info *branch, *next;
1933 mt_labels = label->next;
1934 for (branch = label->branches; branch; branch = next)
1936 next = branch->next;
1937 free (branch);
1939 free (label);
1943 /* Fixup the looping instructions, do delayed branch scheduling, fixup
1944 scheduling hazards. */
1946 static void
1947 mt_machine_reorg (void)
1949 if (mt_flag_delayed_branch)
1950 dbr_schedule (get_insns (), dump_file);
1952 if (TARGET_MS2)
1954 /* Force all instructions to be split into their final form. */
1955 split_all_insns_noflow ();
1956 mt_reorg_hazard ();
1960 /* Initialize the GCC target structure. */
1961 const struct attribute_spec mt_attribute_table[];
1963 #undef TARGET_ATTRIBUTE_TABLE
1964 #define TARGET_ATTRIBUTE_TABLE mt_attribute_table
1965 #undef TARGET_STRUCT_VALUE_RTX
1966 #define TARGET_STRUCT_VALUE_RTX mt_struct_value_rtx
1967 #undef TARGET_PROMOTE_PROTOTYPES
1968 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
1969 #undef TARGET_PASS_BY_REFERENCE
1970 #define TARGET_PASS_BY_REFERENCE mt_pass_by_reference
1971 #undef TARGET_MUST_PASS_IN_STACK
1972 #define TARGET_MUST_PASS_IN_STACK mt_pass_in_stack
1973 #undef TARGET_ARG_PARTIAL_BYTES
1974 #define TARGET_ARG_PARTIAL_BYTES mt_arg_partial_bytes
1975 #undef TARGET_SETUP_INCOMING_VARARGS
1976 #define TARGET_SETUP_INCOMING_VARARGS mt_setup_incoming_varargs
1977 #undef TARGET_MACHINE_DEPENDENT_REORG
1978 #define TARGET_MACHINE_DEPENDENT_REORG mt_machine_reorg
1980 struct gcc_target targetm = TARGET_INITIALIZER;
1982 #include "gt-mt.h"