Sync usage with man page.
[netbsd-mini2440.git] / gnu / dist / gcc4 / gcc / config / stormy16 / stormy16.c
blobd1f33c7b7195664ac8e71e8ef94937564a4920b9
1 /* Xstormy16 target functions.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "flags.h"
37 #include "recog.h"
38 #include "toplev.h"
39 #include "obstack.h"
40 #include "tree.h"
41 #include "expr.h"
42 #include "optabs.h"
43 #include "except.h"
44 #include "function.h"
45 #include "target.h"
46 #include "target-def.h"
47 #include "tm_p.h"
48 #include "langhooks.h"
49 #include "tree-gimple.h"
51 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
52 static void xstormy16_asm_out_constructor (rtx, int);
53 static void xstormy16_asm_out_destructor (rtx, int);
54 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
55 HOST_WIDE_INT, tree);
57 static void xstormy16_init_builtins (void);
58 static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
59 static bool xstormy16_rtx_costs (rtx, int, int, int *);
60 static int xstormy16_address_cost (rtx);
61 static bool xstormy16_return_in_memory (tree, tree);
63 /* Define the information needed to generate branch and scc insns. This is
64 stored from the compare operation. */
65 struct rtx_def * xstormy16_compare_op0;
66 struct rtx_def * xstormy16_compare_op1;
68 /* Compute a (partial) cost for rtx X. Return true if the complete
69 cost has been computed, and false if subexpressions should be
70 scanned. In either case, *TOTAL contains the cost result. */
72 static bool
73 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
74 int *total)
76 switch (code)
78 case CONST_INT:
79 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
80 *total = COSTS_N_INSNS (1) / 2;
81 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
82 *total = COSTS_N_INSNS (1);
83 else
84 *total = COSTS_N_INSNS (2);
85 return true;
87 case CONST_DOUBLE:
88 case CONST:
89 case SYMBOL_REF:
90 case LABEL_REF:
91 *total = COSTS_N_INSNS(2);
92 return true;
94 case MULT:
95 *total = COSTS_N_INSNS (35 + 6);
96 return true;
97 case DIV:
98 *total = COSTS_N_INSNS (51 - 6);
99 return true;
101 default:
102 return false;
106 static int
107 xstormy16_address_cost (rtx x)
109 return (GET_CODE (x) == CONST_INT ? 2
110 : GET_CODE (x) == PLUS ? 7
111 : 5);
114 /* Branches are handled as follows:
116 1. HImode compare-and-branches. The machine supports these
117 natively, so the appropriate pattern is emitted directly.
119 2. SImode EQ and NE. These are emitted as pairs of HImode
120 compare-and-branches.
122 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
123 of a SImode subtract followed by a branch (not a compare-and-branch),
124 like this:
129 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
137 /* Emit a branch of kind CODE to location LOC. */
139 void
140 xstormy16_emit_cbranch (enum rtx_code code, rtx loc)
142 rtx op0 = xstormy16_compare_op0;
143 rtx op1 = xstormy16_compare_op1;
144 rtx condition_rtx, loc_ref, branch, cy_clobber;
145 rtvec vec;
146 enum machine_mode mode;
148 mode = GET_MODE (op0);
149 gcc_assert (mode == HImode || mode == SImode);
151 if (mode == SImode
152 && (code == GT || code == LE || code == GTU || code == LEU))
154 int unsigned_p = (code == GTU || code == LEU);
155 int gt_p = (code == GT || code == GTU);
156 rtx lab = NULL_RTX;
158 if (gt_p)
159 lab = gen_label_rtx ();
160 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, gt_p ? lab : loc);
161 /* This should be generated as a comparison against the temporary
162 created by the previous insn, but reload can't handle that. */
163 xstormy16_emit_cbranch (gt_p ? NE : EQ, loc);
164 if (gt_p)
165 emit_label (lab);
166 return;
168 else if (mode == SImode
169 && (code == NE || code == EQ)
170 && op1 != const0_rtx)
172 rtx lab = NULL_RTX;
173 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
174 int i;
176 if (code == EQ)
177 lab = gen_label_rtx ();
179 for (i = 0; i < num_words - 1; i++)
181 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
182 i * UNITS_PER_WORD);
183 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
184 i * UNITS_PER_WORD);
185 xstormy16_emit_cbranch (NE, code == EQ ? lab : loc);
187 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
188 i * UNITS_PER_WORD);
189 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
190 i * UNITS_PER_WORD);
191 xstormy16_emit_cbranch (code, loc);
193 if (code == EQ)
194 emit_label (lab);
195 return;
198 /* We can't allow reload to try to generate any reload after a branch,
199 so when some register must match we must make the temporary ourselves. */
200 if (mode != HImode)
202 rtx tmp;
203 tmp = gen_reg_rtx (mode);
204 emit_move_insn (tmp, op0);
205 op0 = tmp;
208 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
209 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
210 branch = gen_rtx_SET (VOIDmode, pc_rtx,
211 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
212 loc_ref, pc_rtx));
214 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (BImode));
216 if (mode == HImode)
217 vec = gen_rtvec (2, branch, cy_clobber);
218 else if (code == NE || code == EQ)
219 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
220 else
222 rtx sub;
223 #if 0
224 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
225 #else
226 sub = gen_rtx_CLOBBER (SImode, op0);
227 #endif
228 vec = gen_rtvec (3, branch, sub, cy_clobber);
231 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
234 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
235 the arithmetic operation. Most of the work is done by
236 xstormy16_expand_arith. */
238 void
239 xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
240 rtx dest, rtx carry)
242 rtx op0 = XEXP (comparison, 0);
243 rtx op1 = XEXP (comparison, 1);
244 rtx seq, last_insn;
245 rtx compare;
247 start_sequence ();
248 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1, carry);
249 seq = get_insns ();
250 end_sequence ();
252 gcc_assert (INSN_P (seq));
254 last_insn = seq;
255 while (NEXT_INSN (last_insn) != NULL_RTX)
256 last_insn = NEXT_INSN (last_insn);
258 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
259 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
260 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
261 emit_insn (seq);
265 /* Return the string to output a conditional branch to LABEL, which is
266 the operand number of the label.
268 OP is the conditional expression, or NULL for branch-always.
270 REVERSED is nonzero if we should reverse the sense of the comparison.
272 INSN is the insn. */
274 char *
275 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
277 static char string[64];
278 int need_longbranch = (op != NULL_RTX
279 ? get_attr_length (insn) == 8
280 : get_attr_length (insn) == 4);
281 int really_reversed = reversed ^ need_longbranch;
282 const char *ccode;
283 const char *template;
284 const char *operands;
285 enum rtx_code code;
287 if (! op)
289 if (need_longbranch)
290 ccode = "jmpf";
291 else
292 ccode = "br";
293 sprintf (string, "%s %s", ccode, label);
294 return string;
297 code = GET_CODE (op);
299 if (GET_CODE (XEXP (op, 0)) != REG)
301 code = swap_condition (code);
302 operands = "%3,%2";
304 else
305 operands = "%2,%3";
307 /* Work out which way this really branches. */
308 if (really_reversed)
309 code = reverse_condition (code);
311 switch (code)
313 case EQ: ccode = "z"; break;
314 case NE: ccode = "nz"; break;
315 case GE: ccode = "ge"; break;
316 case LT: ccode = "lt"; break;
317 case GT: ccode = "gt"; break;
318 case LE: ccode = "le"; break;
319 case GEU: ccode = "nc"; break;
320 case LTU: ccode = "c"; break;
321 case GTU: ccode = "hi"; break;
322 case LEU: ccode = "ls"; break;
324 default:
325 gcc_unreachable ();
328 if (need_longbranch)
329 template = "b%s %s,.+8 | jmpf %s";
330 else
331 template = "b%s %s,%s";
332 sprintf (string, template, ccode, operands, label);
334 return string;
337 /* Return the string to output a conditional branch to LABEL, which is
338 the operand number of the label, but suitable for the tail of a
339 SImode branch.
341 OP is the conditional expression (OP is never NULL_RTX).
343 REVERSED is nonzero if we should reverse the sense of the comparison.
345 INSN is the insn. */
347 char *
348 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
350 static char string[64];
351 int need_longbranch = get_attr_length (insn) >= 8;
352 int really_reversed = reversed ^ need_longbranch;
353 const char *ccode;
354 const char *template;
355 char prevop[16];
356 enum rtx_code code;
358 code = GET_CODE (op);
360 /* Work out which way this really branches. */
361 if (really_reversed)
362 code = reverse_condition (code);
364 switch (code)
366 case EQ: ccode = "z"; break;
367 case NE: ccode = "nz"; break;
368 case GE: ccode = "ge"; break;
369 case LT: ccode = "lt"; break;
370 case GEU: ccode = "nc"; break;
371 case LTU: ccode = "c"; break;
373 /* The missing codes above should never be generated. */
374 default:
375 gcc_unreachable ();
378 switch (code)
380 case EQ: case NE:
382 int regnum;
384 gcc_assert (GET_CODE (XEXP (op, 0)) == REG);
386 regnum = REGNO (XEXP (op, 0));
387 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
389 break;
391 case GE: case LT: case GEU: case LTU:
392 strcpy (prevop, "sbc %2,%3");
393 break;
395 default:
396 gcc_unreachable ();
399 if (need_longbranch)
400 template = "%s | b%s .+6 | jmpf %s";
401 else
402 template = "%s | b%s %s";
403 sprintf (string, template, prevop, ccode, label);
405 return string;
408 /* Many machines have some registers that cannot be copied directly to or from
409 memory or even from other types of registers. An example is the `MQ'
410 register, which on most machines, can only be copied to or from general
411 registers, but not memory. Some machines allow copying all registers to and
412 from memory, but require a scratch register for stores to some memory
413 locations (e.g., those with symbolic address on the RT, and those with
414 certain symbolic address on the SPARC when compiling PIC). In some cases,
415 both an intermediate and a scratch register are required.
417 You should define these macros to indicate to the reload phase that it may
418 need to allocate at least one register for a reload in addition to the
419 register to contain the data. Specifically, if copying X to a register
420 CLASS in MODE requires an intermediate register, you should define
421 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
422 whose registers can be used as intermediate registers or scratch registers.
424 If copying a register CLASS in MODE to X requires an intermediate or scratch
425 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
426 largest register class required. If the requirements for input and output
427 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
428 instead of defining both macros identically.
430 The values returned by these macros are often `GENERAL_REGS'. Return
431 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
432 to or from a register of CLASS in MODE without requiring a scratch register.
433 Do not define this macro if it would always return `NO_REGS'.
435 If a scratch register is required (either with or without an intermediate
436 register), you should define patterns for `reload_inM' or `reload_outM', as
437 required.. These patterns, which will normally be implemented with a
438 `define_expand', should be similar to the `movM' patterns, except that
439 operand 2 is the scratch register.
441 Define constraints for the reload register and scratch register that contain
442 a single register class. If the original reload register (whose class is
443 CLASS) can meet the constraint given in the pattern, the value returned by
444 these macros is used for the class of the scratch register. Otherwise, two
445 additional reload registers are required. Their classes are obtained from
446 the constraints in the insn pattern.
448 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
449 either be in a hard register or in memory. Use `true_regnum' to find out;
450 it will return -1 if the pseudo is in memory and the hard register number if
451 it is in a register.
453 These macros should not be used in the case where a particular class of
454 registers can only be copied to memory and not to another class of
455 registers. In that case, secondary reload registers are not needed and
456 would not be helpful. Instead, a stack location must be used to perform the
457 copy and the `movM' pattern should use memory as an intermediate storage.
458 This case often occurs between floating-point and general registers. */
460 enum reg_class
461 xstormy16_secondary_reload_class (enum reg_class class,
462 enum machine_mode mode,
463 rtx x)
465 /* This chip has the interesting property that only the first eight
466 registers can be moved to/from memory. */
467 if ((GET_CODE (x) == MEM
468 || ((GET_CODE (x) == SUBREG || GET_CODE (x) == REG)
469 && (true_regnum (x) == -1
470 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
471 && ! reg_class_subset_p (class, EIGHT_REGS))
472 return EIGHT_REGS;
474 /* When reloading a PLUS, the carry register will be required
475 unless the inc or dec instructions can be used. */
476 if (xstormy16_carry_plus_operand (x, mode))
477 return CARRY_REGS;
479 return NO_REGS;
482 /* Recognize a PLUS that needs the carry register. */
484 xstormy16_carry_plus_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
486 return (GET_CODE (x) == PLUS
487 && GET_CODE (XEXP (x, 1)) == CONST_INT
488 && (INTVAL (XEXP (x, 1)) < -4 || INTVAL (XEXP (x, 1)) > 4));
491 /* Detect and error out on out-of-range constants for movhi. */
493 xs_hi_general_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
495 if ((GET_CODE (x) == CONST_INT)
496 && ((INTVAL (x) >= 32768) || (INTVAL (x) < -32768)))
497 error ("constant halfword load operand out of range");
498 return general_operand (x, mode);
501 /* Detect and error out on out-of-range constants for addhi and subhi. */
503 xs_hi_nonmemory_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
505 if ((GET_CODE (x) == CONST_INT)
506 && ((INTVAL (x) >= 32768) || (INTVAL (x) < -32768)))
507 error ("constant arithmetic operand out of range");
508 return nonmemory_operand (x, mode);
511 enum reg_class
512 xstormy16_preferred_reload_class (rtx x, enum reg_class class)
514 if (class == GENERAL_REGS
515 && GET_CODE (x) == MEM)
516 return EIGHT_REGS;
518 return class;
521 /* Predicate for symbols and addresses that reflect special 8-bit
522 addressing. */
524 xstormy16_below100_symbol (rtx x,
525 enum machine_mode mode ATTRIBUTE_UNUSED)
527 if (GET_CODE (x) == CONST)
528 x = XEXP (x, 0);
529 if (GET_CODE (x) == PLUS
530 && GET_CODE (XEXP (x, 1)) == CONST_INT)
531 x = XEXP (x, 0);
533 if (GET_CODE (x) == SYMBOL_REF)
534 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
536 if (GET_CODE (x) == CONST_INT)
538 HOST_WIDE_INT i = INTVAL (x);
539 if ((i >= 0x0000 && i <= 0x00ff)
540 || (i >= 0x7f00 && i <= 0x7fff))
541 return 1;
543 return 0;
546 /* Likewise, but only for non-volatile MEMs, for patterns where the
547 MEM will get split into smaller sized accesses. */
549 xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
551 if (GET_CODE (x) == MEM && MEM_VOLATILE_P (x))
552 return 0;
553 return xstormy16_below100_operand (x, mode);
556 /* Expand an 8-bit IOR. This either detects the one case we can
557 actually do, or uses a 16-bit IOR. */
558 void
559 xstormy16_expand_iorqi3 (rtx *operands)
561 rtx in, out, outsub, val;
563 out = operands[0];
564 in = operands[1];
565 val = operands[2];
567 if (xstormy16_onebit_set_operand (val, QImode))
569 if (!xstormy16_below100_or_register (in, QImode))
570 in = copy_to_mode_reg (QImode, in);
571 if (!xstormy16_below100_or_register (out, QImode))
572 out = gen_reg_rtx (QImode);
573 emit_insn (gen_iorqi3_internal (out, in, val));
574 if (out != operands[0])
575 emit_move_insn (operands[0], out);
576 return;
579 if (GET_CODE (in) != REG)
580 in = copy_to_mode_reg (QImode, in);
581 if (GET_CODE (val) != REG
582 && GET_CODE (val) != CONST_INT)
583 val = copy_to_mode_reg (QImode, val);
584 if (GET_CODE (out) != REG)
585 out = gen_reg_rtx (QImode);
587 in = simplify_gen_subreg (HImode, in, QImode, 0);
588 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
589 if (GET_CODE (val) != CONST_INT)
590 val = simplify_gen_subreg (HImode, val, QImode, 0);
592 emit_insn (gen_iorhi3 (outsub, in, val));
594 if (out != operands[0])
595 emit_move_insn (operands[0], out);
598 /* Likewise, for AND. */
599 void
600 xstormy16_expand_andqi3 (rtx *operands)
602 rtx in, out, outsub, val;
604 out = operands[0];
605 in = operands[1];
606 val = operands[2];
608 if (xstormy16_onebit_clr_operand (val, QImode))
610 if (!xstormy16_below100_or_register (in, QImode))
611 in = copy_to_mode_reg (QImode, in);
612 if (!xstormy16_below100_or_register (out, QImode))
613 out = gen_reg_rtx (QImode);
614 emit_insn (gen_andqi3_internal (out, in, val));
615 if (out != operands[0])
616 emit_move_insn (operands[0], out);
617 return;
620 if (GET_CODE (in) != REG)
621 in = copy_to_mode_reg (QImode, in);
622 if (GET_CODE (val) != REG
623 && GET_CODE (val) != CONST_INT)
624 val = copy_to_mode_reg (QImode, val);
625 if (GET_CODE (out) != REG)
626 out = gen_reg_rtx (QImode);
628 in = simplify_gen_subreg (HImode, in, QImode, 0);
629 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
630 if (GET_CODE (val) != CONST_INT)
631 val = simplify_gen_subreg (HImode, val, QImode, 0);
633 emit_insn (gen_andhi3 (outsub, in, val));
635 if (out != operands[0])
636 emit_move_insn (operands[0], out);
639 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
640 (GET_CODE (X) == CONST_INT \
641 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
643 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
644 (GET_CODE (X) == CONST_INT \
645 && INTVAL (X) + (OFFSET) >= 0 \
646 && INTVAL (X) + (OFFSET) < 0x8000 \
647 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
650 xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
651 rtx x, int strict)
653 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
654 return 1;
656 if (GET_CODE (x) == PLUS
657 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
658 x = XEXP (x, 0);
660 if ((GET_CODE (x) == PRE_MODIFY
661 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
662 || GET_CODE (x) == POST_INC
663 || GET_CODE (x) == PRE_DEC)
664 x = XEXP (x, 0);
666 if (GET_CODE (x) == REG && REGNO_OK_FOR_BASE_P (REGNO (x))
667 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
668 return 1;
670 if (xstormy16_below100_symbol(x, mode))
671 return 1;
673 return 0;
676 /* Return nonzero if memory address X (an RTX) can have different
677 meanings depending on the machine mode of the memory reference it
678 is used for or if the address is valid for some modes but not
679 others.
681 Autoincrement and autodecrement addresses typically have mode-dependent
682 effects because the amount of the increment or decrement is the size of the
683 operand being addressed. Some machines have other mode-dependent addresses.
684 Many RISC machines have no mode-dependent addresses.
686 You may assume that ADDR is a valid address for the machine.
688 On this chip, this is true if the address is valid with an offset
689 of 0 but not of 6, because in that case it cannot be used as an
690 address for DImode or DFmode, or if the address is a post-increment
691 or pre-decrement address. */
693 xstormy16_mode_dependent_address_p (rtx x)
695 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
696 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
697 return 1;
699 if (GET_CODE (x) == PLUS
700 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
701 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
702 return 1;
704 if (GET_CODE (x) == PLUS)
705 x = XEXP (x, 0);
707 if (GET_CODE (x) == POST_INC
708 || GET_CODE (x) == PRE_DEC)
709 return 1;
711 return 0;
714 /* A C expression that defines the optional machine-dependent constraint
715 letters (`Q', `R', `S', `T', `U') that can be used to segregate specific
716 types of operands, usually memory references, for the target machine.
717 Normally this macro will not be defined. If it is required for a particular
718 target machine, it should return 1 if VALUE corresponds to the operand type
719 represented by the constraint letter C. If C is not defined as an extra
720 constraint, the value returned should be 0 regardless of VALUE. */
722 xstormy16_extra_constraint_p (rtx x, int c)
724 switch (c)
726 /* 'Q' is for pushes. */
727 case 'Q':
728 return (GET_CODE (x) == MEM
729 && GET_CODE (XEXP (x, 0)) == POST_INC
730 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
732 /* 'R' is for pops. */
733 case 'R':
734 return (GET_CODE (x) == MEM
735 && GET_CODE (XEXP (x, 0)) == PRE_DEC
736 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
738 /* 'S' is for immediate memory addresses. */
739 case 'S':
740 return (GET_CODE (x) == MEM
741 && GET_CODE (XEXP (x, 0)) == CONST_INT
742 && xstormy16_legitimate_address_p (VOIDmode, XEXP (x, 0), 0));
744 /* 'T' is for Rx. */
745 case 'T':
746 /* Not implemented yet. */
747 return 0;
749 /* 'U' is for CONST_INT values not between 2 and 15 inclusive,
750 for allocating a scratch register for 32-bit shifts. */
751 case 'U':
752 return (GET_CODE (x) == CONST_INT
753 && (INTVAL (x) < 2 || INTVAL (x) > 15));
755 /* 'Z' is for CONST_INT value zero. This is for adding zero to
756 a register in addhi3, which would otherwise require a carry. */
757 case 'Z':
758 return (GET_CODE (x) == CONST_INT
759 && (INTVAL (x) == 0));
761 case 'W':
762 return xstormy16_below100_operand(x, GET_MODE(x));
764 default:
765 return 0;
770 short_memory_operand (rtx x, enum machine_mode mode)
772 if (! memory_operand (x, mode))
773 return 0;
774 return (GET_CODE (XEXP (x, 0)) != PLUS);
777 /* Splitter for the 'move' patterns, for modes not directly implemented
778 by hardware. Emit insns to copy a value of mode MODE from SRC to
779 DEST.
781 This function is only called when reload_completed.
784 void
785 xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
787 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
788 int direction, end, i;
789 int src_modifies = 0;
790 int dest_modifies = 0;
791 int src_volatile = 0;
792 int dest_volatile = 0;
793 rtx mem_operand;
794 rtx auto_inc_reg_rtx = NULL_RTX;
796 /* Check initial conditions. */
797 gcc_assert (reload_completed
798 && mode != QImode && mode != HImode
799 && nonimmediate_operand (dest, mode)
800 && general_operand (src, mode));
802 /* This case is not supported below, and shouldn't be generated. */
803 gcc_assert (GET_CODE (dest) != MEM || GET_CODE (src) != MEM);
805 /* This case is very very bad after reload, so trap it now. */
806 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
808 /* The general idea is to copy by words, offsetting the source and
809 destination. Normally the least-significant word will be copied
810 first, but for pre-dec operations it's better to copy the
811 most-significant word first. Only one operand can be a pre-dec
812 or post-inc operand.
814 It's also possible that the copy overlaps so that the direction
815 must be reversed. */
816 direction = 1;
818 if (GET_CODE (dest) == MEM)
820 mem_operand = XEXP (dest, 0);
821 dest_modifies = side_effects_p (mem_operand);
822 if (auto_inc_p (mem_operand))
823 auto_inc_reg_rtx = XEXP (mem_operand, 0);
824 dest_volatile = MEM_VOLATILE_P (dest);
825 if (dest_volatile)
827 dest = copy_rtx (dest);
828 MEM_VOLATILE_P (dest) = 0;
831 else if (GET_CODE (src) == MEM)
833 mem_operand = XEXP (src, 0);
834 src_modifies = side_effects_p (mem_operand);
835 if (auto_inc_p (mem_operand))
836 auto_inc_reg_rtx = XEXP (mem_operand, 0);
837 src_volatile = MEM_VOLATILE_P (src);
838 if (src_volatile)
840 src = copy_rtx (src);
841 MEM_VOLATILE_P (src) = 0;
844 else
845 mem_operand = NULL_RTX;
847 if (mem_operand == NULL_RTX)
849 if (GET_CODE (src) == REG
850 && GET_CODE (dest) == REG
851 && reg_overlap_mentioned_p (dest, src)
852 && REGNO (dest) > REGNO (src))
853 direction = -1;
855 else if (GET_CODE (mem_operand) == PRE_DEC
856 || (GET_CODE (mem_operand) == PLUS
857 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
858 direction = -1;
859 else if (GET_CODE (src) == MEM
860 && reg_overlap_mentioned_p (dest, src))
862 int regno;
864 gcc_assert (GET_CODE (dest) == REG);
865 regno = REGNO (dest);
867 gcc_assert (refers_to_regno_p (regno, regno + num_words,
868 mem_operand, 0));
870 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
871 direction = -1;
872 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
873 mem_operand, 0))
874 direction = 1;
875 else
876 /* This means something like
877 (set (reg:DI r0) (mem:DI (reg:HI r1)))
878 which we'd need to support by doing the set of the second word
879 last. */
880 gcc_unreachable ();
883 end = direction < 0 ? -1 : num_words;
884 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
886 rtx w_src, w_dest, insn;
888 if (src_modifies)
889 w_src = gen_rtx_MEM (word_mode, mem_operand);
890 else
891 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
892 if (src_volatile)
893 MEM_VOLATILE_P (w_src) = 1;
894 if (dest_modifies)
895 w_dest = gen_rtx_MEM (word_mode, mem_operand);
896 else
897 w_dest = simplify_gen_subreg (word_mode, dest, mode,
898 i * UNITS_PER_WORD);
899 if (dest_volatile)
900 MEM_VOLATILE_P (w_dest) = 1;
902 /* The simplify_subreg calls must always be able to simplify. */
903 gcc_assert (GET_CODE (w_src) != SUBREG
904 && GET_CODE (w_dest) != SUBREG);
906 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
907 if (auto_inc_reg_rtx)
908 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
909 auto_inc_reg_rtx,
910 REG_NOTES (insn));
914 /* Expander for the 'move' patterns. Emit insns to copy a value of
915 mode MODE from SRC to DEST. */
917 void
918 xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
920 if ((GET_CODE (dest) == MEM) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
922 rtx pmv = XEXP (dest, 0);
923 rtx dest_reg = XEXP (pmv, 0);
924 rtx dest_mod = XEXP (pmv, 1);
925 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
926 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
928 dest = gen_rtx_MEM (mode, dest_reg);
929 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
931 else if ((GET_CODE (src) == MEM) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
933 rtx pmv = XEXP (src, 0);
934 rtx src_reg = XEXP (pmv, 0);
935 rtx src_mod = XEXP (pmv, 1);
936 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
937 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
939 src = gen_rtx_MEM (mode, src_reg);
940 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
943 /* There are only limited immediate-to-memory move instructions. */
944 if (! reload_in_progress
945 && ! reload_completed
946 && GET_CODE (dest) == MEM
947 && (GET_CODE (XEXP (dest, 0)) != CONST_INT
948 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
949 && ! xstormy16_below100_operand (dest, mode)
950 && GET_CODE (src) != REG
951 && GET_CODE (src) != SUBREG)
952 src = copy_to_mode_reg (mode, src);
954 /* Don't emit something we would immediately split. */
955 if (reload_completed
956 && mode != HImode && mode != QImode)
958 xstormy16_split_move (mode, dest, src);
959 return;
962 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
966 /* Stack Layout:
968 The stack is laid out as follows:
970 SP->
971 FP-> Local variables
972 Register save area (up to 4 words)
973 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
975 AP-> Return address (two words)
976 9th procedure parameter word
977 10th procedure parameter word
979 last procedure parameter word
981 The frame pointer location is tuned to make it most likely that all
982 parameters and local variables can be accessed using a load-indexed
983 instruction. */
985 /* A structure to describe the layout. */
986 struct xstormy16_stack_layout
988 /* Size of the topmost three items on the stack. */
989 int locals_size;
990 int register_save_size;
991 int stdarg_save_size;
992 /* Sum of the above items. */
993 int frame_size;
994 /* Various offsets. */
995 int first_local_minus_ap;
996 int sp_minus_fp;
997 int fp_minus_ap;
1000 /* Does REGNO need to be saved? */
1001 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
1002 ((regs_ever_live[REGNUM] && ! call_used_regs[REGNUM]) \
1003 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
1004 && (REGNO_REG_CLASS (REGNUM) != CARRY_REGS) \
1005 && (regs_ever_live[REGNUM] || ! current_function_is_leaf)))
1007 /* Compute the stack layout. */
1008 struct xstormy16_stack_layout
1009 xstormy16_compute_stack_layout (void)
1011 struct xstormy16_stack_layout layout;
1012 int regno;
1013 const int ifun = xstormy16_interrupt_function_p ();
1015 layout.locals_size = get_frame_size ();
1017 layout.register_save_size = 0;
1018 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1019 if (REG_NEEDS_SAVE (regno, ifun))
1020 layout.register_save_size += UNITS_PER_WORD;
1022 if (current_function_stdarg)
1023 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1024 else
1025 layout.stdarg_save_size = 0;
1027 layout.frame_size = (layout.locals_size
1028 + layout.register_save_size
1029 + layout.stdarg_save_size);
1031 if (current_function_args_size <= 2048 && current_function_args_size != -1)
1033 if (layout.frame_size + INCOMING_FRAME_SP_OFFSET
1034 + current_function_args_size <= 2048)
1035 layout.fp_minus_ap = layout.frame_size + INCOMING_FRAME_SP_OFFSET;
1036 else
1037 layout.fp_minus_ap = 2048 - current_function_args_size;
1039 else
1040 layout.fp_minus_ap = (layout.stdarg_save_size
1041 + layout.register_save_size
1042 + INCOMING_FRAME_SP_OFFSET);
1043 layout.sp_minus_fp = (layout.frame_size + INCOMING_FRAME_SP_OFFSET
1044 - layout.fp_minus_ap);
1045 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
1046 return layout;
1049 /* Determine how all the special registers get eliminated. */
1051 xstormy16_initial_elimination_offset (int from, int to)
1053 struct xstormy16_stack_layout layout;
1054 int result;
1056 layout = xstormy16_compute_stack_layout ();
1058 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1059 result = layout.sp_minus_fp - layout.locals_size;
1060 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1061 result = -layout.locals_size;
1062 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1063 result = -layout.fp_minus_ap;
1064 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1065 result = -(layout.sp_minus_fp + layout.fp_minus_ap);
1066 else
1067 gcc_unreachable ();
1069 return result;
1072 static rtx
1073 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1075 rtx set, clobber, insn;
1077 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1078 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
1079 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1080 return insn;
1083 /* Called after register allocation to add any instructions needed for
1084 the prologue. Using a prologue insn is favored compared to putting
1085 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1086 since it allows the scheduler to intermix instructions with the
1087 saves of the caller saved registers. In some cases, it might be
1088 necessary to emit a barrier instruction as the last insn to prevent
1089 such scheduling.
1091 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1092 so that the debug info generation code can handle them properly. */
1093 void
1094 xstormy16_expand_prologue (void)
1096 struct xstormy16_stack_layout layout;
1097 int regno;
1098 rtx insn;
1099 rtx mem_push_rtx;
1100 const int ifun = xstormy16_interrupt_function_p ();
1102 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1103 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1105 layout = xstormy16_compute_stack_layout ();
1107 if (layout.locals_size >= 32768)
1108 error ("local variable memory requirements exceed capacity");
1110 /* Save the argument registers if necessary. */
1111 if (layout.stdarg_save_size)
1112 for (regno = FIRST_ARGUMENT_REGISTER;
1113 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1114 regno++)
1116 rtx dwarf;
1117 rtx reg = gen_rtx_REG (HImode, regno);
1119 insn = emit_move_insn (mem_push_rtx, reg);
1120 RTX_FRAME_RELATED_P (insn) = 1;
1122 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1124 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1125 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1126 reg);
1127 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1128 plus_constant (stack_pointer_rtx,
1129 GET_MODE_SIZE (Pmode)));
1130 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1131 dwarf,
1132 REG_NOTES (insn));
1133 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1134 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1137 /* Push each of the registers to save. */
1138 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1139 if (REG_NEEDS_SAVE (regno, ifun))
1141 rtx dwarf;
1142 rtx reg = gen_rtx_REG (HImode, regno);
1144 insn = emit_move_insn (mem_push_rtx, reg);
1145 RTX_FRAME_RELATED_P (insn) = 1;
1147 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1149 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1150 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1151 reg);
1152 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1153 plus_constant (stack_pointer_rtx,
1154 GET_MODE_SIZE (Pmode)));
1155 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1156 dwarf,
1157 REG_NOTES (insn));
1158 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1159 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1162 /* It's just possible that the SP here might be what we need for
1163 the new FP... */
1164 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1165 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1167 /* Allocate space for local variables. */
1168 if (layout.locals_size)
1170 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1171 GEN_INT (layout.locals_size));
1172 RTX_FRAME_RELATED_P (insn) = 1;
1175 /* Set up the frame pointer, if required. */
1176 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1178 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1180 if (layout.sp_minus_fp)
1181 emit_addhi3_postreload (hard_frame_pointer_rtx,
1182 hard_frame_pointer_rtx,
1183 GEN_INT (-layout.sp_minus_fp));
1187 /* Do we need an epilogue at all? */
1189 direct_return (void)
1191 return (reload_completed
1192 && xstormy16_compute_stack_layout ().frame_size == 0);
1195 /* Called after register allocation to add any instructions needed for
1196 the epilogue. Using an epilogue insn is favored compared to putting
1197 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1198 since it allows the scheduler to intermix instructions with the
1199 saves of the caller saved registers. In some cases, it might be
1200 necessary to emit a barrier instruction as the last insn to prevent
1201 such scheduling. */
1203 void
1204 xstormy16_expand_epilogue (void)
1206 struct xstormy16_stack_layout layout;
1207 rtx mem_pop_rtx, insn;
1208 int regno;
1209 const int ifun = xstormy16_interrupt_function_p ();
1211 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1212 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1214 layout = xstormy16_compute_stack_layout ();
1216 /* Pop the stack for the locals. */
1217 if (layout.locals_size)
1219 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1220 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1221 else
1223 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1224 GEN_INT (- layout.locals_size));
1225 RTX_FRAME_RELATED_P (insn) = 1;
1229 /* Restore any call-saved registers. */
1230 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1231 if (REG_NEEDS_SAVE (regno, ifun))
1233 rtx dwarf;
1235 insn = emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1236 RTX_FRAME_RELATED_P (insn) = 1;
1237 dwarf = gen_rtx_SET (Pmode, stack_pointer_rtx,
1238 plus_constant (stack_pointer_rtx,
1239 -GET_MODE_SIZE (Pmode)));
1240 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1241 dwarf,
1242 REG_NOTES (insn));
1245 /* Pop the stack for the stdarg save area. */
1246 if (layout.stdarg_save_size)
1248 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1249 GEN_INT (- layout.stdarg_save_size));
1250 RTX_FRAME_RELATED_P (insn) = 1;
1253 /* Return. */
1254 if (ifun)
1255 emit_jump_insn (gen_return_internal_interrupt ());
1256 else
1257 emit_jump_insn (gen_return_internal ());
1261 xstormy16_epilogue_uses (int regno)
1263 if (reload_completed && call_used_regs[regno])
1265 const int ifun = xstormy16_interrupt_function_p ();
1266 return REG_NEEDS_SAVE (regno, ifun);
1268 return 0;
1271 void
1272 xstormy16_function_profiler (void)
1274 sorry ("function_profiler support");
1278 /* Return an updated summarizer variable CUM to advance past an
1279 argument in the argument list. The values MODE, TYPE and NAMED
1280 describe that argument. Once this is done, the variable CUM is
1281 suitable for analyzing the *following* argument with
1282 `FUNCTION_ARG', etc.
1284 This function need not do anything if the argument in question was
1285 passed on the stack. The compiler knows how to track the amount of
1286 stack space used for arguments without any special help. However,
1287 it makes life easier for xstormy16_build_va_list if it does update
1288 the word count. */
1289 CUMULATIVE_ARGS
1290 xstormy16_function_arg_advance (CUMULATIVE_ARGS cum, enum machine_mode mode,
1291 tree type, int named ATTRIBUTE_UNUSED)
1293 /* If an argument would otherwise be passed partially in registers,
1294 and partially on the stack, the whole of it is passed on the
1295 stack. */
1296 if (cum < NUM_ARGUMENT_REGISTERS
1297 && cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1298 cum = NUM_ARGUMENT_REGISTERS;
1300 cum += XSTORMY16_WORD_SIZE (type, mode);
1302 return cum;
1306 xstormy16_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
1307 tree type, int named ATTRIBUTE_UNUSED)
1309 if (mode == VOIDmode)
1310 return const0_rtx;
1311 if (targetm.calls.must_pass_in_stack (mode, type)
1312 || cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1313 return 0;
1314 return gen_rtx_REG (mode, cum + 2);
1317 /* Build the va_list type.
1319 For this chip, va_list is a record containing a counter and a pointer.
1320 The counter is of type 'int' and indicates how many bytes
1321 have been used to date. The pointer indicates the stack position
1322 for arguments that have not been passed in registers.
1323 To keep the layout nice, the pointer is first in the structure. */
1325 static tree
1326 xstormy16_build_builtin_va_list (void)
1328 tree f_1, f_2, record, type_decl;
1330 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1331 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
1333 f_1 = build_decl (FIELD_DECL, get_identifier ("base"),
1334 ptr_type_node);
1335 f_2 = build_decl (FIELD_DECL, get_identifier ("count"),
1336 unsigned_type_node);
1338 DECL_FIELD_CONTEXT (f_1) = record;
1339 DECL_FIELD_CONTEXT (f_2) = record;
1341 TREE_CHAIN (record) = type_decl;
1342 TYPE_NAME (record) = type_decl;
1343 TYPE_FIELDS (record) = f_1;
1344 TREE_CHAIN (f_1) = f_2;
1346 layout_type (record);
1348 return record;
1351 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1352 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1353 variable to initialize. NEXTARG is the machine independent notion of the
1354 'next' argument after the variable arguments. */
1355 void
1356 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1358 tree f_base, f_count;
1359 tree base, count;
1360 tree t;
1362 if (xstormy16_interrupt_function_p ())
1363 error ("cannot use va_start in interrupt function");
1365 f_base = TYPE_FIELDS (va_list_type_node);
1366 f_count = TREE_CHAIN (f_base);
1368 base = build (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1369 count = build (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1370 NULL_TREE);
1372 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1373 t = build (PLUS_EXPR, TREE_TYPE (base), t,
1374 build_int_cst (NULL_TREE, INCOMING_FRAME_SP_OFFSET));
1375 t = build (MODIFY_EXPR, TREE_TYPE (base), base, t);
1376 TREE_SIDE_EFFECTS (t) = 1;
1377 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1379 t = build (MODIFY_EXPR, TREE_TYPE (count), count,
1380 build_int_cst (NULL_TREE,
1381 current_function_args_info * UNITS_PER_WORD));
1382 TREE_SIDE_EFFECTS (t) = 1;
1383 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1386 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1387 of type va_list as a tree, TYPE is the type passed to va_arg.
1388 Note: This algorithm is documented in stormy-abi. */
1390 static tree
1391 xstormy16_expand_builtin_va_arg (tree valist, tree type, tree *pre_p,
1392 tree *post_p ATTRIBUTE_UNUSED)
1394 tree f_base, f_count;
1395 tree base, count;
1396 tree count_tmp, addr, t;
1397 tree lab_gotaddr, lab_fromstack;
1398 int size, size_of_reg_args, must_stack;
1399 tree size_tree;
1401 f_base = TYPE_FIELDS (va_list_type_node);
1402 f_count = TREE_CHAIN (f_base);
1404 base = build (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1405 count = build (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1406 NULL_TREE);
1408 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1409 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1410 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1412 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1414 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1415 lab_gotaddr = create_artificial_label ();
1416 lab_fromstack = create_artificial_label ();
1417 addr = create_tmp_var (ptr_type_node, NULL);
1419 if (!must_stack)
1421 tree r;
1423 t = fold_convert (TREE_TYPE (count), size_tree);
1424 t = build (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1425 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1426 t = build (GT_EXPR, boolean_type_node, t, r);
1427 t = build (COND_EXPR, void_type_node, t,
1428 build (GOTO_EXPR, void_type_node, lab_fromstack),
1429 NULL);
1430 gimplify_and_add (t, pre_p);
1432 t = fold_convert (ptr_type_node, count_tmp);
1433 t = build (PLUS_EXPR, ptr_type_node, base, t);
1434 t = build (MODIFY_EXPR, void_type_node, addr, t);
1435 gimplify_and_add (t, pre_p);
1437 t = build (GOTO_EXPR, void_type_node, lab_gotaddr);
1438 gimplify_and_add (t, pre_p);
1440 t = build (LABEL_EXPR, void_type_node, lab_fromstack);
1441 gimplify_and_add (t, pre_p);
1444 /* Arguments larger than a word might need to skip over some
1445 registers, since arguments are either passed entirely in
1446 registers or entirely on the stack. */
1447 size = PUSH_ROUNDING (int_size_in_bytes (type));
1448 if (size > 2 || size < 0 || must_stack)
1450 tree r, u;
1452 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1453 u = build (MODIFY_EXPR, void_type_node, count_tmp, r);
1455 t = fold_convert (TREE_TYPE (count), r);
1456 t = build (GE_EXPR, boolean_type_node, count_tmp, t);
1457 t = build (COND_EXPR, void_type_node, t, NULL, u);
1458 gimplify_and_add (t, pre_p);
1461 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1462 - INCOMING_FRAME_SP_OFFSET);
1463 t = fold_convert (TREE_TYPE (count), t);
1464 t = build (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1465 t = build (PLUS_EXPR, TREE_TYPE (count), t,
1466 fold_convert (TREE_TYPE (count), size_tree));
1467 t = fold_convert (TREE_TYPE (base), fold (t));
1468 t = build (MINUS_EXPR, TREE_TYPE (base), base, t);
1469 t = build (MODIFY_EXPR, void_type_node, addr, t);
1470 gimplify_and_add (t, pre_p);
1472 t = build (LABEL_EXPR, void_type_node, lab_gotaddr);
1473 gimplify_and_add (t, pre_p);
1475 t = fold_convert (TREE_TYPE (count), size_tree);
1476 t = build (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1477 t = build (MODIFY_EXPR, TREE_TYPE (count), count, t);
1478 gimplify_and_add (t, pre_p);
1480 addr = fold_convert (build_pointer_type (type), addr);
1481 return build_va_arg_indirect_ref (addr);
1484 /* Initialize the variable parts of a trampoline. ADDR is an RTX for
1485 the address of the trampoline; FNADDR is an RTX for the address of
1486 the nested function; STATIC_CHAIN is an RTX for the static chain
1487 value that should be passed to the function when it is called. */
1488 void
1489 xstormy16_initialize_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
1491 rtx reg_addr = gen_reg_rtx (Pmode);
1492 rtx temp = gen_reg_rtx (HImode);
1493 rtx reg_fnaddr = gen_reg_rtx (HImode);
1494 rtx reg_addr_mem;
1496 reg_addr_mem = gen_rtx_MEM (HImode, reg_addr);
1498 emit_move_insn (reg_addr, addr);
1499 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1500 emit_move_insn (reg_addr_mem, temp);
1501 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1502 emit_move_insn (temp, static_chain);
1503 emit_move_insn (reg_addr_mem, temp);
1504 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1505 emit_move_insn (reg_fnaddr, fnaddr);
1506 emit_move_insn (temp, reg_fnaddr);
1507 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1508 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1509 emit_move_insn (reg_addr_mem, temp);
1510 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1511 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1512 emit_move_insn (reg_addr_mem, reg_fnaddr);
1515 /* Worker function for FUNCTION_VALUE. */
1518 xstormy16_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
1520 enum machine_mode mode;
1521 mode = TYPE_MODE (valtype);
1522 PROMOTE_MODE (mode, 0, valtype);
1523 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1526 /* A C compound statement that outputs the assembler code for a thunk function,
1527 used to implement C++ virtual function calls with multiple inheritance. The
1528 thunk acts as a wrapper around a virtual function, adjusting the implicit
1529 object parameter before handing control off to the real function.
1531 First, emit code to add the integer DELTA to the location that contains the
1532 incoming first argument. Assume that this argument contains a pointer, and
1533 is the one used to pass the `this' pointer in C++. This is the incoming
1534 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1535 addition must preserve the values of all other incoming arguments.
1537 After the addition, emit code to jump to FUNCTION, which is a
1538 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1539 the return address. Hence returning from FUNCTION will return to whoever
1540 called the current `thunk'.
1542 The effect must be as if @var{function} had been called directly
1543 with the adjusted first argument. This macro is responsible for
1544 emitting all of the code for a thunk function;
1545 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1546 not invoked.
1548 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1549 extracted from it.) It might possibly be useful on some targets, but
1550 probably not. */
1552 static void
1553 xstormy16_asm_output_mi_thunk (FILE *file,
1554 tree thunk_fndecl ATTRIBUTE_UNUSED,
1555 HOST_WIDE_INT delta,
1556 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1557 tree function)
1559 int regnum = FIRST_ARGUMENT_REGISTER;
1561 /* There might be a hidden first argument for a returned structure. */
1562 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1563 regnum += 1;
1565 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1566 fputs ("\tjmpf ", file);
1567 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1568 putc ('\n', file);
1571 /* The purpose of this function is to override the default behavior of
1572 BSS objects. Normally, they go into .bss or .sbss via ".common"
1573 directives, but we need to override that and put them in
1574 .bss_below100. We can't just use a section override (like we do
1575 for .data_below100), because that makes them initialized rather
1576 than uninitialized. */
1577 void
1578 xstormy16_asm_output_aligned_common (FILE *stream,
1579 tree decl,
1580 const char *name,
1581 int size,
1582 int align,
1583 int global)
1585 rtx mem = DECL_RTL (decl);
1586 rtx symbol;
1588 if (mem != NULL_RTX
1589 && GET_CODE (mem) == MEM
1590 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1591 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1593 const char *name2;
1594 int p2align = 0;
1596 bss100_section ();
1598 while (align > 8)
1600 align /= 2;
1601 p2align ++;
1604 name2 = default_strip_name_encoding (name);
1605 if (global)
1606 fprintf (stream, "\t.globl\t%s\n", name2);
1607 if (p2align)
1608 fprintf (stream, "\t.p2align %d\n", p2align);
1609 fprintf (stream, "\t.type\t%s, @object\n", name2);
1610 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1611 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1612 return;
1615 if (!global)
1617 fprintf (stream, "\t.local\t");
1618 assemble_name (stream, name);
1619 fprintf (stream, "\n");
1621 fprintf (stream, "\t.comm\t");
1622 assemble_name (stream, name);
1623 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1626 /* Mark symbols with the "below100" attribute so that we can use the
1627 special addressing modes for them. */
1629 static void
1630 xstormy16_encode_section_info (tree decl, rtx r, int first)
1632 default_encode_section_info (decl, r, first);
1634 if (TREE_CODE (decl) == VAR_DECL
1635 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1636 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1638 rtx symbol = XEXP (r, 0);
1640 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1641 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1645 /* Output constructors and destructors. Just like
1646 default_named_section_asm_out_* but don't set the sections writable. */
1647 #undef TARGET_ASM_CONSTRUCTOR
1648 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1649 #undef TARGET_ASM_DESTRUCTOR
1650 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1652 static void
1653 xstormy16_asm_out_destructor (rtx symbol, int priority)
1655 const char *section = ".dtors";
1656 char buf[16];
1658 /* ??? This only works reliably with the GNU linker. */
1659 if (priority != DEFAULT_INIT_PRIORITY)
1661 sprintf (buf, ".dtors.%.5u",
1662 /* Invert the numbering so the linker puts us in the proper
1663 order; constructors are run from right to left, and the
1664 linker sorts in increasing order. */
1665 MAX_INIT_PRIORITY - priority);
1666 section = buf;
1669 named_section_flags (section, 0);
1670 assemble_align (POINTER_SIZE);
1671 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1674 static void
1675 xstormy16_asm_out_constructor (rtx symbol, int priority)
1677 const char *section = ".ctors";
1678 char buf[16];
1680 /* ??? This only works reliably with the GNU linker. */
1681 if (priority != DEFAULT_INIT_PRIORITY)
1683 sprintf (buf, ".ctors.%.5u",
1684 /* Invert the numbering so the linker puts us in the proper
1685 order; constructors are run from right to left, and the
1686 linker sorts in increasing order. */
1687 MAX_INIT_PRIORITY - priority);
1688 section = buf;
1691 named_section_flags (section, 0);
1692 assemble_align (POINTER_SIZE);
1693 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1696 /* Print a memory address as an operand to reference that memory location. */
1697 void
1698 xstormy16_print_operand_address (FILE *file, rtx address)
1700 HOST_WIDE_INT offset;
1701 int pre_dec, post_inc;
1703 /* There are a few easy cases. */
1704 if (GET_CODE (address) == CONST_INT)
1706 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1707 return;
1710 if (CONSTANT_P (address) || GET_CODE (address) == CODE_LABEL)
1712 output_addr_const (file, address);
1713 return;
1716 /* Otherwise, it's hopefully something of the form
1717 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...))
1720 if (GET_CODE (address) == PLUS)
1722 gcc_assert (GET_CODE (XEXP (address, 1)) == CONST_INT);
1723 offset = INTVAL (XEXP (address, 1));
1724 address = XEXP (address, 0);
1726 else
1727 offset = 0;
1729 pre_dec = (GET_CODE (address) == PRE_DEC);
1730 post_inc = (GET_CODE (address) == POST_INC);
1731 if (pre_dec || post_inc)
1732 address = XEXP (address, 0);
1734 gcc_assert (GET_CODE (address) == REG);
1736 fputc ('(', file);
1737 if (pre_dec)
1738 fputs ("--", file);
1739 fputs (reg_names [REGNO (address)], file);
1740 if (post_inc)
1741 fputs ("++", file);
1742 if (offset != 0)
1743 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1744 fputc (')', file);
1747 /* Print an operand to an assembler instruction. */
1748 void
1749 xstormy16_print_operand (FILE *file, rtx x, int code)
1751 switch (code)
1753 case 'B':
1754 /* There is either one bit set, or one bit clear, in X.
1755 Print it preceded by '#'. */
1757 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1758 HOST_WIDE_INT xx = 1;
1759 HOST_WIDE_INT l;
1761 if (GET_CODE (x) == CONST_INT)
1762 xx = INTVAL (x);
1763 else
1764 output_operand_lossage ("'B' operand is not constant");
1766 /* GCC sign-extends masks with the MSB set, so we have to
1767 detect all the cases that differ only in sign extension
1768 beyond the bits we care about. Normally, the predicates
1769 and constraints ensure that we have the right values. This
1770 works correctly for valid masks. */
1771 if (bits_set[xx & 7] <= 1)
1773 /* Remove sign extension bits. */
1774 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1775 xx &= 0xff;
1776 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1777 xx &= 0xffff;
1778 l = exact_log2 (xx);
1780 else
1782 /* Add sign extension bits. */
1783 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1784 xx |= ~(HOST_WIDE_INT)0xff;
1785 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1786 xx |= ~(HOST_WIDE_INT)0xffff;
1787 l = exact_log2 (~xx);
1790 if (l == -1)
1791 output_operand_lossage ("'B' operand has multiple bits set");
1793 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1794 return;
1797 case 'C':
1798 /* Print the symbol without a surrounding @fptr(). */
1799 if (GET_CODE (x) == SYMBOL_REF)
1800 assemble_name (file, XSTR (x, 0));
1801 else if (GET_CODE (x) == LABEL_REF)
1802 output_asm_label (x);
1803 else
1804 xstormy16_print_operand_address (file, x);
1805 return;
1807 case 'o':
1808 case 'O':
1809 /* Print the immediate operand less one, preceded by '#'.
1810 For 'O', negate it first. */
1812 HOST_WIDE_INT xx = 0;
1814 if (GET_CODE (x) == CONST_INT)
1815 xx = INTVAL (x);
1816 else
1817 output_operand_lossage ("'o' operand is not constant");
1819 if (code == 'O')
1820 xx = -xx;
1822 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1823 return;
1826 case 'b':
1827 /* Print the shift mask for bp/bn. */
1829 HOST_WIDE_INT xx = 1;
1830 HOST_WIDE_INT l;
1832 if (GET_CODE (x) == CONST_INT)
1833 xx = INTVAL (x);
1834 else
1835 output_operand_lossage ("'B' operand is not constant");
1837 l = 7 - xx;
1839 fputs (IMMEDIATE_PREFIX, file);
1840 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1841 return;
1844 case 0:
1845 /* Handled below. */
1846 break;
1848 default:
1849 output_operand_lossage ("xstormy16_print_operand: unknown code");
1850 return;
1853 switch (GET_CODE (x))
1855 case REG:
1856 fputs (reg_names [REGNO (x)], file);
1857 break;
1859 case MEM:
1860 xstormy16_print_operand_address (file, XEXP (x, 0));
1861 break;
1863 default:
1864 /* Some kind of constant or label; an immediate operand,
1865 so prefix it with '#' for the assembler. */
1866 fputs (IMMEDIATE_PREFIX, file);
1867 output_addr_const (file, x);
1868 break;
1871 return;
1875 /* Expander for the `casesi' pattern.
1876 INDEX is the index of the switch statement.
1877 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1878 to the first table entry.
1879 RANGE is the number of table entries.
1880 TABLE is an ADDR_VEC that is the jump table.
1881 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1882 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1885 void
1886 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1887 rtx table, rtx default_label)
1889 HOST_WIDE_INT range_i = INTVAL (range);
1890 rtx int_index;
1892 /* This code uses 'br', so it can deal only with tables of size up to
1893 8192 entries. */
1894 if (range_i >= 8192)
1895 sorry ("switch statement of size %lu entries too large",
1896 (unsigned long) range_i);
1898 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1899 OPTAB_LIB_WIDEN);
1900 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1901 default_label);
1902 int_index = gen_lowpart_common (HImode, index);
1903 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1904 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1907 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1908 instructions, without label or alignment or any other special
1909 constructs. We know that the previous instruction will be the
1910 `tablejump_pcrel' output above.
1912 TODO: it might be nice to output 'br' instructions if they could
1913 all reach. */
1915 void
1916 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1918 int vlen, idx;
1920 current_function_section (current_function_decl);
1922 vlen = XVECLEN (table, 0);
1923 for (idx = 0; idx < vlen; idx++)
1925 fputs ("\tjmpf ", file);
1926 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1927 fputc ('\n', file);
1932 /* Expander for the `call' patterns.
1933 INDEX is the index of the switch statement.
1934 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1935 to the first table entry.
1936 RANGE is the number of table entries.
1937 TABLE is an ADDR_VEC that is the jump table.
1938 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1939 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1942 void
1943 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1945 rtx call, temp;
1946 enum machine_mode mode;
1948 gcc_assert (GET_CODE (dest) == MEM);
1949 dest = XEXP (dest, 0);
1951 if (! CONSTANT_P (dest)
1952 && GET_CODE (dest) != REG)
1953 dest = force_reg (Pmode, dest);
1955 if (retval == NULL)
1956 mode = VOIDmode;
1957 else
1958 mode = GET_MODE (retval);
1960 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1961 counter);
1962 if (retval)
1963 call = gen_rtx_SET (VOIDmode, retval, call);
1965 if (! CONSTANT_P (dest))
1967 temp = gen_reg_rtx (HImode);
1968 emit_move_insn (temp, const0_rtx);
1970 else
1971 temp = const0_rtx;
1973 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1974 gen_rtx_USE (VOIDmode, temp)));
1975 emit_call_insn (call);
1978 /* Expanders for multiword computational operations. */
1980 /* Expander for arithmetic operations; emit insns to compute
1982 (set DEST (CODE:MODE SRC0 SRC1))
1984 using CARRY as a temporary. When CODE is COMPARE, a branch
1985 template is generated (this saves duplicating code in
1986 xstormy16_split_cbranch). */
1988 void
1989 xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
1990 rtx dest, rtx src0, rtx src1, rtx carry)
1992 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1993 int i;
1994 int firstloop = 1;
1996 if (code == NEG)
1997 emit_move_insn (src0, const0_rtx);
1999 for (i = 0; i < num_words; i++)
2001 rtx w_src0, w_src1, w_dest;
2002 rtx insn;
2004 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
2005 i * UNITS_PER_WORD);
2006 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
2007 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
2009 switch (code)
2011 case PLUS:
2012 if (firstloop
2013 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
2014 continue;
2016 if (firstloop)
2017 insn = gen_addchi4 (w_dest, w_src0, w_src1, carry);
2018 else
2019 insn = gen_addchi5 (w_dest, w_src0, w_src1, carry, carry);
2020 break;
2022 case NEG:
2023 case MINUS:
2024 case COMPARE:
2025 if (code == COMPARE && i == num_words - 1)
2027 rtx branch, sub, clobber, sub_1;
2029 sub_1 = gen_rtx_MINUS (HImode, w_src0,
2030 gen_rtx_ZERO_EXTEND (HImode, carry));
2031 sub = gen_rtx_SET (VOIDmode, w_dest,
2032 gen_rtx_MINUS (HImode, sub_1, w_src1));
2033 clobber = gen_rtx_CLOBBER (VOIDmode, carry);
2034 branch = gen_rtx_SET (VOIDmode, pc_rtx,
2035 gen_rtx_IF_THEN_ELSE (VOIDmode,
2036 gen_rtx_EQ (HImode,
2037 sub_1,
2038 w_src1),
2039 pc_rtx,
2040 pc_rtx));
2041 insn = gen_rtx_PARALLEL (VOIDmode,
2042 gen_rtvec (3, branch, sub, clobber));
2044 else if (firstloop
2045 && code != COMPARE
2046 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
2047 continue;
2048 else if (firstloop)
2049 insn = gen_subchi4 (w_dest, w_src0, w_src1, carry);
2050 else
2051 insn = gen_subchi5 (w_dest, w_src0, w_src1, carry, carry);
2052 break;
2054 case IOR:
2055 case XOR:
2056 case AND:
2057 if (GET_CODE (w_src1) == CONST_INT
2058 && INTVAL (w_src1) == -(code == AND))
2059 continue;
2061 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
2062 w_src0, w_src1));
2063 break;
2065 case NOT:
2066 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2067 break;
2069 default:
2070 gcc_unreachable ();
2073 firstloop = 0;
2074 emit (insn);
2077 /* If we emit nothing, try_split() will think we failed. So emit
2078 something that does nothing and can be optimized away. */
2079 if (firstloop)
2080 emit (gen_nop ());
2083 /* The shift operations are split at output time for constant values;
2084 variable-width shifts get handed off to a library routine.
2086 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2087 SIZE_R will be a CONST_INT, X will be a hard register. */
2089 const char *
2090 xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
2091 rtx x, rtx size_r, rtx temp)
2093 HOST_WIDE_INT size;
2094 const char *r0, *r1, *rt;
2095 static char r[64];
2097 gcc_assert (GET_CODE (size_r) == CONST_INT
2098 && GET_CODE (x) == REG && mode == SImode);
2099 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2101 if (size == 0)
2102 return "";
2104 r0 = reg_names [REGNO (x)];
2105 r1 = reg_names [REGNO (x) + 1];
2107 /* For shifts of size 1, we can use the rotate instructions. */
2108 if (size == 1)
2110 switch (code)
2112 case ASHIFT:
2113 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2114 break;
2115 case ASHIFTRT:
2116 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2117 break;
2118 case LSHIFTRT:
2119 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2120 break;
2121 default:
2122 gcc_unreachable ();
2124 return r;
2127 /* For large shifts, there are easy special cases. */
2128 if (size == 16)
2130 switch (code)
2132 case ASHIFT:
2133 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2134 break;
2135 case ASHIFTRT:
2136 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2137 break;
2138 case LSHIFTRT:
2139 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2140 break;
2141 default:
2142 gcc_unreachable ();
2144 return r;
2146 if (size > 16)
2148 switch (code)
2150 case ASHIFT:
2151 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2152 r1, r0, r0, r1, (int) size - 16);
2153 break;
2154 case ASHIFTRT:
2155 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2156 r0, r1, r1, r0, (int) size - 16);
2157 break;
2158 case LSHIFTRT:
2159 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2160 r0, r1, r1, r0, (int) size - 16);
2161 break;
2162 default:
2163 gcc_unreachable ();
2165 return r;
2168 /* For the rest, we have to do more work. In particular, we
2169 need a temporary. */
2170 rt = reg_names [REGNO (temp)];
2171 switch (code)
2173 case ASHIFT:
2174 sprintf (r,
2175 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2176 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16-size),
2177 r1, rt);
2178 break;
2179 case ASHIFTRT:
2180 sprintf (r,
2181 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2182 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16-size),
2183 r0, rt);
2184 break;
2185 case LSHIFTRT:
2186 sprintf (r,
2187 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2188 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16-size),
2189 r0, rt);
2190 break;
2191 default:
2192 gcc_unreachable ();
2194 return r;
2197 /* Attribute handling. */
2199 /* Return nonzero if the function is an interrupt function. */
2201 xstormy16_interrupt_function_p (void)
2203 tree attributes;
2205 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2206 any functions are declared, which is demonstrably wrong, but
2207 it is worked around here. FIXME. */
2208 if (!cfun)
2209 return 0;
2211 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2212 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2215 #undef TARGET_ATTRIBUTE_TABLE
2216 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2217 static tree xstormy16_handle_interrupt_attribute
2218 (tree *, tree, tree, int, bool *);
2219 static tree xstormy16_handle_below100_attribute
2220 (tree *, tree, tree, int, bool *);
2222 static const struct attribute_spec xstormy16_attribute_table[] =
2224 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2225 { "interrupt", 0, 0, false, true, true, xstormy16_handle_interrupt_attribute },
2226 { "BELOW100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2227 { "below100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2228 { NULL, 0, 0, false, false, false, NULL }
2231 /* Handle an "interrupt" attribute;
2232 arguments as in struct attribute_spec.handler. */
2233 static tree
2234 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2235 tree args ATTRIBUTE_UNUSED,
2236 int flags ATTRIBUTE_UNUSED,
2237 bool *no_add_attrs)
2239 if (TREE_CODE (*node) != FUNCTION_TYPE)
2241 warning (OPT_Wattributes, "%qs attribute only applies to functions",
2242 IDENTIFIER_POINTER (name));
2243 *no_add_attrs = true;
2246 return NULL_TREE;
2249 /* Handle an "below" attribute;
2250 arguments as in struct attribute_spec.handler. */
2251 static tree
2252 xstormy16_handle_below100_attribute (tree *node,
2253 tree name ATTRIBUTE_UNUSED,
2254 tree args ATTRIBUTE_UNUSED,
2255 int flags ATTRIBUTE_UNUSED,
2256 bool *no_add_attrs)
2258 if (TREE_CODE (*node) != VAR_DECL
2259 && TREE_CODE (*node) != POINTER_TYPE
2260 && TREE_CODE (*node) != TYPE_DECL)
2262 warning (OPT_Wattributes,
2263 "%<__BELOW100__%> attribute only applies to variables");
2264 *no_add_attrs = true;
2266 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2268 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2270 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2271 "with auto storage class");
2272 *no_add_attrs = true;
2276 return NULL_TREE;
2279 #undef TARGET_INIT_BUILTINS
2280 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2281 #undef TARGET_EXPAND_BUILTIN
2282 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2284 static struct {
2285 const char *name;
2286 int md_code;
2287 const char *arg_ops; /* 0..9, t for temp register, r for return value */
2288 const char *arg_types; /* s=short,l=long, upper case for unsigned */
2289 } s16builtins[] = {
2290 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2291 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2292 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2293 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2294 { 0, 0, 0, 0 }
2297 static void
2298 xstormy16_init_builtins (void)
2300 tree args, ret_type, arg;
2301 int i, a;
2303 ret_type = void_type_node;
2305 for (i=0; s16builtins[i].name; i++)
2307 args = void_list_node;
2308 for (a=strlen (s16builtins[i].arg_types)-1; a>=0; a--)
2310 switch (s16builtins[i].arg_types[a])
2312 case 's': arg = short_integer_type_node; break;
2313 case 'S': arg = short_unsigned_type_node; break;
2314 case 'l': arg = long_integer_type_node; break;
2315 case 'L': arg = long_unsigned_type_node; break;
2316 default: gcc_unreachable ();
2318 if (a == 0)
2319 ret_type = arg;
2320 else
2321 args = tree_cons (NULL_TREE, arg, args);
2323 lang_hooks.builtin_function (s16builtins[i].name,
2324 build_function_type (ret_type, args),
2325 i, BUILT_IN_MD, NULL, NULL);
2329 static rtx
2330 xstormy16_expand_builtin(tree exp, rtx target,
2331 rtx subtarget ATTRIBUTE_UNUSED,
2332 enum machine_mode mode ATTRIBUTE_UNUSED,
2333 int ignore ATTRIBUTE_UNUSED)
2335 rtx op[10], args[10], pat, copyto[10], retval = 0;
2336 tree fndecl, argtree;
2337 int i, a, o, code;
2339 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2340 argtree = TREE_OPERAND (exp, 1);
2341 i = DECL_FUNCTION_CODE (fndecl);
2342 code = s16builtins[i].md_code;
2344 for (a = 0; a < 10 && argtree; a++)
2346 args[a] = expand_expr (TREE_VALUE (argtree), NULL_RTX, VOIDmode, 0);
2347 argtree = TREE_CHAIN (argtree);
2350 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2352 char ao = s16builtins[i].arg_ops[o];
2353 char c = insn_data[code].operand[o].constraint[0];
2354 int omode;
2356 copyto[o] = 0;
2358 omode = insn_data[code].operand[o].mode;
2359 if (ao == 'r')
2360 op[o] = target ? target : gen_reg_rtx (omode);
2361 else if (ao == 't')
2362 op[o] = gen_reg_rtx (omode);
2363 else
2364 op[o] = args[(int) hex_value (ao)];
2366 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2368 if (c == '+' || c == '=')
2370 copyto[o] = op[o];
2371 op[o] = gen_reg_rtx (omode);
2373 else
2374 op[o] = copy_to_mode_reg (omode, op[o]);
2377 if (ao == 'r')
2378 retval = op[o];
2381 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2382 op[5], op[6], op[7], op[8], op[9]);
2383 emit_insn (pat);
2385 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2386 if (copyto[o])
2388 emit_move_insn (copyto[o], op[o]);
2389 if (op[o] == retval)
2390 retval = copyto[o];
2393 return retval;
2397 /* Look for combinations of insns that can be converted to BN or BP
2398 opcodes. This is, unfortunately, too complex to do with MD
2399 patterns. */
2400 static void
2401 combine_bnp (rtx insn)
2403 int insn_code, regno, need_extend;
2404 unsigned int mask;
2405 rtx cond, reg, and, load, qireg, mem;
2406 enum machine_mode load_mode = QImode;
2407 enum machine_mode and_mode = QImode;
2408 rtx shift = NULL_RTX;
2410 insn_code = recog_memoized (insn);
2411 if (insn_code != CODE_FOR_cbranchhi
2412 && insn_code != CODE_FOR_cbranchhi_neg)
2413 return;
2415 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2416 cond = XEXP (cond, 1); /* if */
2417 cond = XEXP (cond, 0); /* cond */
2418 switch (GET_CODE (cond))
2420 case NE:
2421 case EQ:
2422 need_extend = 0;
2423 break;
2424 case LT:
2425 case GE:
2426 need_extend = 1;
2427 break;
2428 default:
2429 return;
2432 reg = XEXP (cond, 0);
2433 if (GET_CODE (reg) != REG)
2434 return;
2435 regno = REGNO (reg);
2436 if (XEXP (cond, 1) != const0_rtx)
2437 return;
2438 if (! find_regno_note (insn, REG_DEAD, regno))
2439 return;
2440 qireg = gen_rtx_REG (QImode, regno);
2442 if (need_extend)
2444 /* LT and GE conditionals should have a sign extend before
2445 them. */
2446 for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
2448 int and_code = recog_memoized (and);
2450 if (and_code == CODE_FOR_extendqihi2
2451 && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
2452 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), qireg))
2453 break;
2455 if (and_code == CODE_FOR_movhi_internal
2456 && rtx_equal_p (SET_DEST (PATTERN (and)), reg))
2458 /* This is for testing bit 15. */
2459 and = insn;
2460 break;
2463 if (reg_mentioned_p (reg, and))
2464 return;
2466 if (GET_CODE (and) != NOTE
2467 && GET_CODE (and) != INSN)
2468 return;
2471 else
2473 /* EQ and NE conditionals have an AND before them. */
2474 for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
2476 if (recog_memoized (and) == CODE_FOR_andhi3
2477 && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
2478 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), reg))
2479 break;
2481 if (reg_mentioned_p (reg, and))
2482 return;
2484 if (GET_CODE (and) != NOTE
2485 && GET_CODE (and) != INSN)
2486 return;
2489 if (and)
2491 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2492 followed by an AND like this:
2494 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2495 (clobber (reg:BI carry))]
2497 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2499 Attempt to detect this here. */
2500 for (shift = prev_real_insn (and); shift; shift = prev_real_insn (shift))
2502 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2503 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2504 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2505 break;
2507 if (reg_mentioned_p (reg, shift)
2508 || (GET_CODE (shift) != NOTE
2509 && GET_CODE (shift) != INSN))
2511 shift = NULL_RTX;
2512 break;
2517 if (!and)
2518 return;
2520 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and);
2521 load;
2522 load = prev_real_insn (load))
2524 int load_code = recog_memoized (load);
2526 if (load_code == CODE_FOR_movhi_internal
2527 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2528 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2529 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2531 load_mode = HImode;
2532 break;
2535 if (load_code == CODE_FOR_movqi_internal
2536 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2537 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2539 load_mode = QImode;
2540 break;
2543 if (load_code == CODE_FOR_zero_extendqihi2
2544 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2545 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2547 load_mode = QImode;
2548 and_mode = HImode;
2549 break;
2552 if (reg_mentioned_p (reg, load))
2553 return;
2555 if (GET_CODE (load) != NOTE
2556 && GET_CODE (load) != INSN)
2557 return;
2559 if (!load)
2560 return;
2562 mem = SET_SRC (PATTERN (load));
2564 if (need_extend)
2566 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2568 /* If the mem includes a zero-extend operation and we are
2569 going to generate a sign-extend operation then move the
2570 mem inside the zero-extend. */
2571 if (GET_CODE (mem) == ZERO_EXTEND)
2572 mem = XEXP (mem, 0);
2574 else
2576 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and)), 1), load_mode))
2577 return;
2579 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and)), 1));
2581 if (shift)
2582 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2585 if (load_mode == HImode)
2587 rtx addr = XEXP (mem, 0);
2589 if (! (mask & 0xff))
2591 addr = plus_constant (addr, 1);
2592 mask >>= 8;
2594 mem = gen_rtx_MEM (QImode, addr);
2597 if (need_extend)
2598 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2599 else
2600 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2602 INSN_CODE (insn) = -1;
2603 delete_insn (load);
2605 if (and != insn)
2606 delete_insn (and);
2608 if (shift != NULL_RTX)
2609 delete_insn (shift);
2612 static void
2613 xstormy16_reorg (void)
2615 rtx insn;
2617 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2619 if (! JUMP_P (insn))
2620 continue;
2621 combine_bnp (insn);
2626 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2628 static bool
2629 xstormy16_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
2631 HOST_WIDE_INT size = int_size_in_bytes (type);
2632 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2635 #undef TARGET_ASM_ALIGNED_HI_OP
2636 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2637 #undef TARGET_ASM_ALIGNED_SI_OP
2638 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2639 #undef TARGET_ENCODE_SECTION_INFO
2640 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2642 #undef TARGET_ASM_OUTPUT_MI_THUNK
2643 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2644 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2645 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2647 #undef TARGET_RTX_COSTS
2648 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2649 #undef TARGET_ADDRESS_COST
2650 #define TARGET_ADDRESS_COST xstormy16_address_cost
2652 #undef TARGET_BUILD_BUILTIN_VA_LIST
2653 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2654 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2655 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_expand_builtin_va_arg
2657 #undef TARGET_PROMOTE_FUNCTION_ARGS
2658 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
2659 #undef TARGET_PROMOTE_FUNCTION_RETURN
2660 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
2661 #undef TARGET_PROMOTE_PROTOTYPES
2662 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
2664 #undef TARGET_RETURN_IN_MEMORY
2665 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2667 #undef TARGET_MACHINE_DEPENDENT_REORG
2668 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2670 struct gcc_target targetm = TARGET_INITIALIZER;