1 ;; ARM Thumb-1 Machine Description
2 ;; Copyright (C) 2007-2025 Free Software Foundation, Inc.
4 ;; This file is part of GCC.
6 ;; GCC is free software; you can redistribute it and/or modify it
7 ;; under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 3, or (at your option)
11 ;; GCC is distributed in the hope that it will be useful, but
12 ;; WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 ;; General Public License for more details.
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GCC; see the file COPYING3. If not see
18 ;; <http://www.gnu.org/licenses/>. */
21 ;;---------------------------------------------------------------------------
25 ;; Beware of splitting Thumb1 patterns that output multiple
26 ;; assembly instructions, in particular instruction such as SBC and
27 ;; ADC which consume flags. For example, in the pattern thumb_subdi3
28 ;; below, the output SUB implicitly sets the flags (assembled to SUBS)
29 ;; and then the Carry flag is used by SBC to compute the correct
30 ;; result. If we split thumb_subdi3 pattern into two separate RTL
31 ;; insns (using define_insn_and_split), the scheduler might place
32 ;; other RTL insns between SUB and SBC, possibly modifying the Carry
33 ;; flag used by SBC. This might happen because most Thumb1 patterns
34 ;; for flag-setting instructions do not have explicit RTL for setting
35 ;; or clobbering the flags. Instead, they have the attribute "conds"
36 ;; with value "set" or "clob". However, this attribute is not used to
37 ;; identify dependencies and therefore the scheduler might reorder
38 ;; these instruction. Currenly, this problem cannot happen because
39 ;; there are no separate Thumb1 patterns for individual instruction
40 ;; that consume flags (except conditional execution, which is treated
41 ;; differently). In particular there is no Thumb1 armv6-m pattern for
46 (define_insn "*thumb1_adddi3"
47 [(set (match_operand:DI 0 "register_operand" "=l")
48 (plus:DI (match_operand:DI 1 "register_operand" "%0")
49 (match_operand:DI 2 "register_operand" "l")))
50 (clobber (reg:CC CC_REGNUM))
53 "adds\\t%Q0, %Q0, %Q2\;adcs\\t%R0, %R0, %R2"
54 [(set_attr "length" "4")
55 (set_attr "type" "multiple")]
58 ;; Changes to the constraints of this pattern must be propagated to those of
59 ;; atomic additions in sync.md and to the logic for bind_old_new in
60 ;; arm_split_atomic_op in arm.cc. These must be at least as strict as the
61 ;; constraints here and aim to be as permissive.
62 (define_insn_and_split "*thumb1_addsi3"
63 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
64 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
65 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
68 static const char * const asms[] =
70 \"adds\\t%0, %0, %2\",
71 \"subs\\t%0, %0, #%n2\",
72 \"adds\\t%0, %1, %2\",
81 if ((which_alternative == 2 || which_alternative == 6)
82 && CONST_INT_P (operands[2])
83 && INTVAL (operands[2]) < 0)
84 return (which_alternative == 2) ? \"subs\\t%0, %1, #%n2\" : \"sub\\t%0, %1, #%n2\";
85 return asms[which_alternative];
87 "&& reload_completed && CONST_INT_P (operands[2])
88 && ((operands[1] != stack_pointer_rtx
89 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
90 || (operands[1] == stack_pointer_rtx
91 && INTVAL (operands[2]) > 1020))"
92 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
93 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
95 HOST_WIDE_INT offset = INTVAL (operands[2]);
96 if (operands[1] == stack_pointer_rtx)
102 else if (offset < -255)
105 operands[3] = GEN_INT (offset);
106 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
108 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")
109 (set_attr "type" "alus_imm,alus_imm,alus_sreg,alus_sreg,alus_sreg,
110 alus_sreg,alus_sreg,multiple,multiple,multiple")]
113 ;; Reloading and elimination of the frame pointer can
114 ;; sometimes cause this optimization to be missed.
116 [(set (match_operand:SI 0 "low_register_operand" "")
117 (match_operand:SI 1 "const_int_operand" ""))
119 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
121 && UINTVAL (operands[1]) < 1024
122 && (UINTVAL (operands[1]) & 3) == 0"
123 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
127 (define_insn "*thumb_subdi3"
128 [(set (match_operand:DI 0 "register_operand" "=l")
129 (minus:DI (match_operand:DI 1 "register_operand" "0")
130 (match_operand:DI 2 "register_operand" "l")))
131 (clobber (reg:CC CC_REGNUM))]
133 "subs\\t%Q0, %Q0, %Q2\;sbcs\\t%R0, %R0, %R2"
134 [(set_attr "length" "4")
135 (set_attr "type" "multiple")]
138 ;; Changes to the constraints of this pattern must be propagated to those of
139 ;; atomic subtractions in sync.md and to the logic for bind_old_new in
140 ;; arm_split_atomic_op in arm.cc. These must be at least as strict as the
141 ;; constraints here and aim to be as permissive.
142 (define_insn "thumb1_subsi3_insn"
143 [(set (match_operand:SI 0 "register_operand" "=l")
144 (minus:SI (match_operand:SI 1 "register_operand" "l")
145 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
148 [(set_attr "length" "2")
149 (set_attr "conds" "set")
150 (set_attr "type" "alus_sreg")]
153 ;; Unfortunately on Thumb the '&'/'0' trick can fail when operands
154 ;; 1 and 2 are the same, because reload will make operand 0 match
155 ;; operand 1 without realizing that this conflicts with operand 2. We fix
156 ;; this by adding another alternative to match this case, and then `reload'
157 ;; it ourselves. This alternative must come first.
158 (define_insn "*thumb_mulsi3"
159 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
160 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
161 (match_operand:SI 2 "register_operand" "l,l,l")))]
162 "TARGET_THUMB1 && !arm_arch6"
164 movs\\t%0, %1\;muls\\t%0, %2
165 mov\\t%0, %1\;muls\\t%0, %2
167 [(set_attr "length" "4,4,2")
168 (set_attr "type" "muls")]
171 (define_insn "*thumb_mulsi3_v6"
172 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
173 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
174 (match_operand:SI 2 "register_operand" "l,0,0")))]
175 "TARGET_THUMB1 && arm_arch6"
180 [(set_attr "length" "2")
181 (set_attr "type" "muls")]
184 ;; Changes to the constraints of this pattern must be propagated to those of
185 ;; atomic bitwise ANDs and NANDs in sync.md and to the logic for bind_old_new
186 ;; in arm_split_atomic_op in arm.cc. These must be at least as strict as the
187 ;; constraints here and aim to be as permissive.
188 (define_insn "*thumb1_andsi3_insn"
189 [(set (match_operand:SI 0 "register_operand" "=l")
190 (and:SI (match_operand:SI 1 "register_operand" "%0")
191 (match_operand:SI 2 "register_operand" "l")))]
194 [(set_attr "length" "2")
195 (set_attr "type" "logic_imm")
196 (set_attr "conds" "set")])
199 [(set (match_operand:SI 0 "s_register_operand" "")
200 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
201 (match_operand:SI 2 "const_int_operand" "")
202 (match_operand:SI 3 "const_int_operand" "")))
203 (clobber (match_operand:SI 4 "s_register_operand" ""))]
205 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
206 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
208 HOST_WIDE_INT temp = INTVAL (operands[2]);
210 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
211 operands[3] = GEN_INT (32 - temp);
216 [(set (match_operand:SI 0 "s_register_operand" "")
217 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
218 (match_operand:SI 2 "const_int_operand" "")
219 (match_operand:SI 3 "const_int_operand" "")))]
221 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
222 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
224 HOST_WIDE_INT temp = INTVAL (operands[2]);
226 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
227 operands[3] = GEN_INT (32 - temp);
231 (define_insn "thumb1_bicsi3"
232 [(set (match_operand:SI 0 "register_operand" "=l")
233 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
234 (match_operand:SI 2 "register_operand" "0")))]
237 [(set_attr "length" "2")
238 (set_attr "conds" "set")
239 (set_attr "type" "logics_reg")]
242 ;; Changes to the constraints of this pattern must be propagated to those of
243 ;; atomic inclusive ORs in sync.md and to the logic for bind_old_new in
244 ;; arm_split_atomic_op in arm.cc. These must be at least as strict as the
245 ;; constraints here and aim to be as permissive.
246 (define_insn "*thumb1_iorsi3_insn"
247 [(set (match_operand:SI 0 "register_operand" "=l")
248 (ior:SI (match_operand:SI 1 "register_operand" "%0")
249 (match_operand:SI 2 "register_operand" "l")))]
252 [(set_attr "length" "2")
253 (set_attr "conds" "set")
254 (set_attr "type" "logics_reg")])
256 ;; Changes to the constraints of this pattern must be propagated to those of
257 ;; atomic exclusive ORs in sync.md and to the logic for bind_old_new in
258 ;; arm_split_atomic_op in arm.cc. These must be at least as strict as the
259 ;; constraints here and aim to be as permissive.
260 (define_insn "*thumb1_xorsi3_insn"
261 [(set (match_operand:SI 0 "register_operand" "=l")
262 (xor:SI (match_operand:SI 1 "register_operand" "%0")
263 (match_operand:SI 2 "register_operand" "l")))]
266 [(set_attr "length" "2")
267 (set_attr "conds" "set")
268 (set_attr "type" "logics_reg")]
271 (define_insn "*thumb1_ashlsi3"
272 [(set (match_operand:SI 0 "register_operand" "=l,l")
273 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
274 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
277 [(set_attr "length" "2")
278 (set_attr "type" "shift_imm,shift_reg")
279 (set_attr "conds" "set")])
281 (define_insn "*thumb1_ashrsi3"
282 [(set (match_operand:SI 0 "register_operand" "=l,l")
283 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
284 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
287 [(set_attr "length" "2")
288 (set_attr "type" "shift_imm,shift_reg")
289 (set_attr "conds" "set")])
291 (define_insn "*thumb1_lshrsi3"
292 [(set (match_operand:SI 0 "register_operand" "=l,l")
293 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
294 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
297 [(set_attr "length" "2")
298 (set_attr "type" "shift_imm,shift_reg")
299 (set_attr "conds" "set")])
301 (define_insn "*thumb1_rotrsi3"
302 [(set (match_operand:SI 0 "register_operand" "=l")
303 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
304 (match_operand:SI 2 "register_operand" "l")))]
307 [(set_attr "type" "shift_reg")
308 (set_attr "length" "2")]
311 (define_insn "*thumb1_negdi2"
312 [(set (match_operand:DI 0 "register_operand" "=&l")
313 (neg:DI (match_operand:DI 1 "register_operand" "l")))
314 (clobber (reg:CC CC_REGNUM))]
316 "movs\\t%R0, #0\;rsbs\\t%Q0, %Q1, #0\;sbcs\\t%R0, %R1"
317 [(set_attr "length" "6")
318 (set_attr "type" "multiple")]
321 (define_insn "*thumb1_negsi2"
322 [(set (match_operand:SI 0 "register_operand" "=l")
323 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
326 [(set_attr "length" "2")
327 (set_attr "type" "alu_imm")]
330 (define_insn_and_split "*thumb1_abssi2"
331 [(set (match_operand:SI 0 "s_register_operand" "=l")
332 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
333 (clobber (match_scratch:SI 2 "=&l"))]
336 "TARGET_THUMB1 && reload_completed"
337 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
338 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
339 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
341 [(set_attr "length" "6")
342 (set_attr "type" "multiple")]
345 (define_insn_and_split "*thumb1_neg_abssi2"
346 [(set (match_operand:SI 0 "s_register_operand" "=l")
347 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
348 (clobber (match_scratch:SI 2 "=&l"))]
351 "TARGET_THUMB1 && reload_completed"
352 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
353 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
354 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
356 [(set_attr "length" "6")
357 (set_attr "type" "multiple")]
360 (define_insn "*thumb1_one_cmplsi2"
361 [(set (match_operand:SI 0 "register_operand" "=l")
362 (not:SI (match_operand:SI 1 "register_operand" "l")))]
365 [(set_attr "length" "2")
366 (set_attr "type" "mvn_reg")]
369 (define_insn "*thumb1_zero_extendhisi2"
370 [(set (match_operand:SI 0 "register_operand" "=l,l")
371 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
376 if (which_alternative == 0 && arm_arch6)
377 return "uxth\t%0, %1";
378 if (which_alternative == 0)
381 mem = XEXP (operands[1], 0);
383 if (GET_CODE (mem) == CONST)
386 if (GET_CODE (mem) == PLUS)
388 rtx a = XEXP (mem, 0);
390 /* This can happen due to bugs in reload. */
391 if (REG_P (a) && REGNO (a) == SP_REGNUM)
394 ops[0] = operands[0];
397 output_asm_insn ("mov\t%0, %1", ops);
399 XEXP (mem, 0) = operands[0];
403 return "ldrh\t%0, %1";
405 [(set_attr_alternative "length"
406 [(if_then_else (eq_attr "is_arch6" "yes")
407 (const_int 2) (const_int 4))
409 (set_attr "type" "extend,load_byte")]
412 (define_insn "*thumb1_zero_extendqisi2"
413 [(set (match_operand:SI 0 "register_operand" "=l,l")
414 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
415 "TARGET_THUMB1 && !arm_arch6"
419 [(set_attr "length" "4,2")
420 (set_attr "type" "alu_shift_reg,load_byte")
421 (set_attr "pool_range" "*,32")]
424 (define_insn "*thumb1_zero_extendqisi2_v6"
425 [(set (match_operand:SI 0 "register_operand" "=l,l")
426 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
427 "TARGET_THUMB1 && arm_arch6"
431 [(set_attr "length" "2")
432 (set_attr "type" "extend,load_byte")]
435 ;; We used to have an early-clobber on the scratch register here.
436 ;; However, there's a bug somewhere in reload which means that this
437 ;; can be partially ignored during spill allocation if the memory
438 ;; address also needs reloading; this causes us to die later on when
439 ;; we try to verify the operands. Fortunately, we don't really need
440 ;; the early-clobber: we can always use operand 0 if operand 2
441 ;; overlaps the address.
442 (define_insn "thumb1_extendhisi2"
443 [(set (match_operand:SI 0 "register_operand" "=l,l")
444 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
445 (clobber (match_scratch:SI 2 "=X,l"))]
452 if (which_alternative == 0 && !arm_arch6)
454 if (which_alternative == 0)
455 return \"sxth\\t%0, %1\";
457 mem = XEXP (operands[1], 0);
459 /* This code used to try to use 'V', and fix the address only if it was
460 offsettable, but this fails for e.g. REG+48 because 48 is outside the
461 range of QImode offsets, and offsettable_address_p does a QImode
464 if (GET_CODE (mem) == CONST)
467 if (GET_CODE (mem) == LABEL_REF)
468 return \"ldr\\t%0, %1\";
470 if (GET_CODE (mem) == PLUS)
472 rtx a = XEXP (mem, 0);
473 rtx b = XEXP (mem, 1);
475 if (GET_CODE (a) == LABEL_REF
477 return \"ldr\\t%0, %1\";
480 return \"ldrsh\\t%0, %1\";
491 gcc_assert (REG_P (ops[1]));
493 ops[0] = operands[0];
494 if (reg_mentioned_p (operands[2], ops[1]))
497 ops[3] = operands[2];
498 output_asm_insn (\"movs\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
501 [(set_attr_alternative "length"
502 [(if_then_else (eq_attr "is_arch6" "yes")
503 (const_int 2) (const_int 4))
505 (set_attr "type" "extend,load_byte")
506 (set_attr "pool_range" "*,1018")]
510 [(set (match_operand:SI 0 "register_operand" "")
511 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
512 "TARGET_THUMB1 && reload_completed"
513 [(set (match_dup 0) (match_dup 2))
514 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
516 rtx addr = XEXP (operands[1], 0);
518 if (GET_CODE (addr) == CONST)
519 addr = XEXP (addr, 0);
521 if (GET_CODE (addr) == PLUS
522 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
523 /* No split necessary. */
526 if (GET_CODE (addr) == PLUS
527 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
530 if (reg_overlap_mentioned_p (operands[0], addr))
532 rtx t = gen_lowpart (QImode, operands[0]);
533 emit_move_insn (t, operands[1]);
534 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
540 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
541 operands[2] = const0_rtx;
543 else if (GET_CODE (addr) != PLUS)
545 else if (REG_P (XEXP (addr, 0)))
547 operands[2] = XEXP (addr, 1);
548 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
552 operands[2] = XEXP (addr, 0);
553 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
556 operands[3] = change_address (operands[1], QImode, addr);
560 [(set (match_operand:SI 0 "register_operand" "")
561 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
562 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
563 (set (match_operand:SI 3 "register_operand" "")
564 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
566 && GET_CODE (XEXP (operands[4], 0)) == PLUS
567 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
568 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
569 && (peep2_reg_dead_p (3, operands[0])
570 || rtx_equal_p (operands[0], operands[3]))
571 && (peep2_reg_dead_p (3, operands[2])
572 || rtx_equal_p (operands[2], operands[3]))"
573 [(set (match_dup 2) (match_dup 1))
574 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
576 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
577 operands[4] = change_address (operands[4], QImode, addr);
580 (define_insn "thumb1_extendqisi2"
581 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
582 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
587 if (which_alternative == 0 && arm_arch6)
588 return "sxtb\\t%0, %1";
589 if (which_alternative == 0)
592 addr = XEXP (operands[1], 0);
593 if (GET_CODE (addr) == PLUS
594 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
595 return "ldrsb\\t%0, %1";
599 [(set_attr_alternative "length"
600 [(if_then_else (eq_attr "is_arch6" "yes")
601 (const_int 2) (const_int 4))
603 (if_then_else (eq_attr "is_arch6" "yes")
604 (const_int 4) (const_int 6))])
605 (set_attr "type" "extend,load_byte,load_byte")]
608 ;;; ??? This should have alternatives for constants.
609 ;;; ??? This was originally identical to the movdf_insn pattern.
610 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
611 ;;; thumb_reorg with a memory reference.
612 (define_insn "*thumb1_movdi_insn"
613 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,r,l,>,l, m,*r")
614 (match_operand:DI 1 "general_operand" "l, I,J,j,>,l,mi,l,*r"))]
616 && ( register_operand (operands[0], DImode)
617 || register_operand (operands[1], DImode))"
620 switch (which_alternative)
624 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
625 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
626 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
628 return \"movs\\t%Q0, %1\;movs\\t%R0, #0\";
630 operands[1] = GEN_INT (- INTVAL (operands[1]));
631 return \"movs\\t%Q0, %1\;rsbs\\t%Q0, %Q0, #0\;asrs\\t%R0, %Q0, #31\";
633 gcc_assert (TARGET_HAVE_MOVT);
634 return \"movw\\t%Q0, %L1\;movs\\tR0, #0\";
636 return \"ldmia\\t%1, {%0, %H0}\";
638 return \"stmia\\t%0, {%1, %H1}\";
640 return thumb_load_double_from_address (operands);
642 operands[2] = gen_rtx_MEM (SImode,
643 plus_constant (Pmode, XEXP (operands[0], 0), 4));
644 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
647 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
648 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
649 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
652 [(set_attr "length" "4,4,6,6,2,2,6,4,4")
653 (set_attr "type" "multiple,multiple,multiple,multiple,load_8,store_8,load_8,store_8,multiple")
654 (set_attr "arch" "t1,t1,t1,v8mb,t1,t1,t1,t1,t1")
655 (set_attr "pool_range" "*,*,*,*,*,*,1018,*,*")]
659 ;; match patterns usable by ldmia/stmia
661 [(set (match_operand:DIDF 0 "low_register_operand" "")
662 (match_operand:DIDF 1 "memory_operand" ""))]
664 && low_register_operand (XEXP (operands[1], 0), SImode)
665 && !reg_overlap_mentioned_p (XEXP (operands[1], 0), operands[0])
666 && peep2_reg_dead_p (1, XEXP (operands[1], 0))"
670 operands[1] = change_address (operands[1], VOIDmode,
671 gen_rtx_POST_INC (SImode,
672 XEXP (operands[1], 0)));
677 [(set (match_operand:DIDF 0 "memory_operand" "")
678 (match_operand:DIDF 1 "low_register_operand" ""))]
680 && low_register_operand (XEXP (operands[0], 0), SImode)
681 && peep2_reg_dead_p (1, XEXP (operands[0], 0))
682 /* The low register in the transfer list may overlap the address,
683 but the second cannot. */
684 && REGNO (XEXP (operands[0], 0)) != (REGNO (operands[1]) + 1)"
688 operands[0] = change_address (operands[0], VOIDmode,
689 gen_rtx_POST_INC (SImode,
690 XEXP (operands[0], 0)));
694 (define_insn "*thumb1_movsi_insn"
695 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,r,l,l,l,>,l, l, m,*l*h*k")
696 (match_operand:SI 1 "general_operand" "l, I,j,J,K,>,l,i, mi,l,*l*h*k"))]
698 && ( register_operand (operands[0], SImode)
699 || register_operand (operands[1], SImode))"
701 switch (which_alternative)
704 case 0: return "movs\t%0, %1";
705 case 1: return "movs\t%0, %1";
706 case 2: return "movw\t%0, %1";
709 case 5: return "ldmia\t%1, {%0}";
710 case 6: return "stmia\t%0, {%1}";
712 /* pure-code alternative: build the constant byte by byte,
713 instead of loading it from a constant pool. */
714 if (arm_valid_symbolic_address_p (operands[1]))
716 output_asm_insn (\"movs\\t%0, #:upper8_15:%1\", operands);
717 output_asm_insn (\"lsls\\t%0, #8\", operands);
718 output_asm_insn (\"adds\\t%0, #:upper0_7:%1\", operands);
719 output_asm_insn (\"lsls\\t%0, #8\", operands);
720 output_asm_insn (\"adds\\t%0, #:lower8_15:%1\", operands);
721 output_asm_insn (\"lsls\\t%0, #8\", operands);
722 output_asm_insn (\"adds\\t%0, #:lower0_7:%1\", operands);
725 else if (GET_CODE (operands[1]) == CONST_INT)
727 thumb1_gen_const_int_print (operands[0], INTVAL (operands[1]));
733 case 8: return "ldr\t%0, %1";
734 case 9: return "str\t%1, %0";
735 case 10: return "mov\t%0, %1";
738 [(set_attr "length" "2,2,4,4,4,2,2,14,2,2,2")
739 (set_attr "type" "mov_reg,mov_imm,mov_imm,multiple,multiple,load_4,store_4,alu_sreg,load_4,store_4,mov_reg")
740 (set_attr "pool_range" "*,*,*,*,*,*,*, *,1018,*,*")
741 (set_attr "arch" "t1,t1,v8mb,t1,t1,t1,t1,t1,t1,t1,t1")
742 (set_attr "required_for_purecode" "no,no,no,no,no,no,no,yes,no,no,no")
743 (set_attr "conds" "set,clob,nocond,*,*,nocond,nocond,clob,nocond,nocond,nocond")])
745 ; Split the load of 64-bit constant into two loads for high and low 32-bit parts respectively
746 ; to see if we can load them in fewer instructions or fewer cycles.
747 ; For the small 64-bit integer constants that satisfy constraint J, the instruction pattern
748 ; thumb1_movdi_insn has a better way to handle them.
750 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
751 (match_operand:ANY64 1 "immediate_operand" ""))]
752 "TARGET_THUMB1 && reload_completed && !satisfies_constraint_J (operands[1])"
753 [(set (match_dup 0) (match_dup 1))
754 (set (match_dup 2) (match_dup 3))]
756 operands[2] = gen_highpart (SImode, operands[0]);
757 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
759 operands[0] = gen_lowpart (SImode, operands[0]);
760 operands[1] = gen_lowpart (SImode, operands[1]);
765 [(set (match_operand:SI 0 "register_operand" "")
766 (match_operand:SI 1 "const_int_operand" ""))]
767 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
768 [(set (match_dup 2) (match_dup 1))
769 (set (match_dup 0) (neg:SI (match_dup 2)))]
772 operands[1] = GEN_INT (- INTVAL (operands[1]));
773 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
778 [(set (match_operand:SI 0 "register_operand" "")
779 (match_operand:SI 1 "const_int_operand" ""))]
780 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])
781 && !(TARGET_HAVE_MOVT && satisfies_constraint_j (operands[1]))"
782 [(set (match_dup 2) (match_dup 1))
783 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
786 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
787 unsigned HOST_WIDE_INT mask = 0xff;
790 for (i = 0; i < 25; i++)
791 if ((val & (mask << i)) == val)
794 /* Don't split if the shift is zero. */
798 operands[1] = GEN_INT (val >> i);
799 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
800 operands[3] = GEN_INT (i);
804 ;; For thumb1 split imm move [256-510] into mov [1-255] and add #255
806 [(set (match_operand:SI 0 "register_operand" "")
807 (match_operand:SI 1 "const_int_operand" ""))]
808 "TARGET_THUMB1 && satisfies_constraint_Pe (operands[1])
809 && !(TARGET_HAVE_MOVT && satisfies_constraint_j (operands[1]))"
810 [(set (match_dup 2) (match_dup 1))
811 (set (match_dup 0) (plus:SI (match_dup 2) (match_dup 3)))]
814 operands[1] = GEN_INT (INTVAL (operands[1]) - 255);
815 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
816 operands[3] = GEN_INT (255);
821 [(set (match_operand:SI 0 "register_operand" "")
822 (match_operand:SI 1 "const_int_operand" ""))]
824 && arm_disable_literal_pool
825 && GET_CODE (operands[1]) == CONST_INT
827 && !satisfies_constraint_K (operands[1])"
828 [(clobber (const_int 0))]
830 thumb1_gen_const_int_rtl (operands[0], INTVAL (operands[1]));
835 (define_insn "*thumb1_movhi_insn"
836 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,l*r,*h,l,r")
837 (match_operand:HI 1 "general_operand" "l,m,l,k*h,*r,I,n"))]
839 && ( register_operand (operands[0], HImode)
840 || register_operand (operands[1], HImode))"
842 switch (which_alternative)
844 case 0: return \"adds %0, %1, #0\";
845 case 2: return \"strh %1, %0\";
846 case 3: return \"mov %0, %1\";
847 case 4: return \"mov %0, %1\";
848 case 5: return \"movs %0, %1\";
849 case 6: gcc_assert (TARGET_HAVE_MOVT);
850 return \"movw %0, %L1\";
851 default: gcc_unreachable ();
853 /* The stack pointer can end up being taken as an index register.
854 Catch this case here and deal with it. */
855 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
856 && REG_P (XEXP (XEXP (operands[1], 0), 0))
857 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
860 ops[0] = operands[0];
861 ops[1] = XEXP (XEXP (operands[1], 0), 0);
863 output_asm_insn (\"mov %0, %1\", ops);
865 XEXP (XEXP (operands[1], 0), 0) = operands[0];
868 return \"ldrh %0, %1\";
870 [(set_attr "length" "2,4,2,2,2,2,4")
871 (set_attr "type" "alus_imm,load_4,store_4,mov_reg,mov_reg,mov_imm,mov_imm")
872 (set_attr "arch" "t1,t1,t1,t1,t1,t1,v8mb")
873 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob,nocond")])
875 (define_expand "thumb_movhi_clobber"
876 [(set (match_operand:HI 0 "memory_operand")
877 (match_operand:HI 1 "register_operand"))
878 (clobber (match_operand:DI 2 "register_operand"))]
881 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
882 && REGNO (operands[1]) <= LAST_LO_REGNUM)
884 emit_insn (gen_movhi (operands[0], operands[1]));
887 /* XXX Fixme, need to handle other cases here as well. */
892 (define_insn "*thumb1_movqi_insn"
893 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,l*r,*h,l")
894 (match_operand:QI 1 "general_operand" "l,m,l,k*h,*r,I"))]
896 && ( register_operand (operands[0], QImode)
897 || register_operand (operands[1], QImode))"
905 [(set_attr "length" "2")
906 (set_attr "type" "alu_imm,load_4,store_4,mov_reg,mov_imm,mov_imm")
907 (set_attr "pool_range" "*,32,*,*,*,*")
908 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
910 (define_insn "*thumb1_movhf"
911 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,l,m,*r,*h")
912 (match_operand:HF 1 "general_operand" "l, m,F,l,*h,*r"))]
914 && ( s_register_operand (operands[0], HFmode)
915 || s_register_operand (operands[1], HFmode))"
917 switch (which_alternative)
920 return \"movs\\t%0, %1\";
924 gcc_assert (MEM_P (operands[1]));
925 addr = XEXP (operands[1], 0);
926 if (GET_CODE (addr) == LABEL_REF
927 || (GET_CODE (addr) == CONST
928 && GET_CODE (XEXP (addr, 0)) == PLUS
929 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
930 && CONST_INT_P (XEXP (XEXP (addr, 0), 1))))
932 /* Constant pool entry. */
933 return \"ldr\\t%0, %1\";
935 return \"ldrh\\t%0, %1\";
943 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
945 ops[0] = operands[0];
946 high = (bits >> 8) & 0xff;
947 ops[1] = GEN_INT (high);
948 ops[2] = GEN_INT (bits & 0xff);
950 output_asm_insn (\"movs\\t%0, %1\;lsls\\t%0, #8\;adds\\t%0, %2\", ops);
952 output_asm_insn (\"movs\\t%0, %2\", ops);
956 case 3: return \"strh\\t%1, %0\";
957 default: return \"mov\\t%0, %1\";
960 [(set_attr "length" "2,2,6,2,2,2")
961 (set_attr "type" "mov_reg,load_4,mov_reg,store_4,mov_reg,mov_reg")
962 (set_attr "pool_range" "*,1018,*,*,*,*")
963 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond")])
965 ;;; ??? This should have alternatives for constants.
966 (define_insn "*thumb1_movsf_insn"
967 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
968 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
970 && ( register_operand (operands[0], SFmode)
971 || register_operand (operands[1], SFmode))"
980 [(set_attr "length" "2")
981 (set_attr "type" "alus_imm,load_4,store_4,load_4,store_4,mov_reg,mov_reg")
982 (set_attr "pool_range" "*,*,*,1018,*,*,*")
983 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
986 ;;; ??? This should have alternatives for constants.
987 ;;; ??? This was originally identical to the movdi_insn pattern.
988 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
989 ;;; thumb_reorg with a memory reference.
990 (define_insn "*thumb_movdf_insn"
991 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
992 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
994 && ( register_operand (operands[0], DFmode)
995 || register_operand (operands[1], DFmode))"
997 switch (which_alternative)
1001 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
1002 return \"adds\\t%0, %1, #0\;adds\\t%H0, %H1, #0\";
1003 return \"adds\\t%H0, %H1, #0\;adds\\t%0, %1, #0\";
1005 return \"ldmia\\t%1, {%0, %H0}\";
1007 return \"stmia\\t%0, {%1, %H1}\";
1009 return thumb_load_double_from_address (operands);
1011 operands[2] = gen_rtx_MEM (SImode,
1012 plus_constant (Pmode,
1013 XEXP (operands[0], 0), 4));
1014 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
1017 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
1018 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
1019 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
1022 [(set_attr "length" "4,2,2,6,4,4")
1023 (set_attr "type" "multiple,load_8,store_8,load_8,store_8,multiple")
1024 (set_attr "pool_range" "*,*,*,1018,*,*")]
1028 ;; Thumb block-move insns
1030 (define_insn "cpymem12b"
1031 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
1032 (mem:SI (match_operand:SI 3 "register_operand" "1")))
1033 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
1034 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
1035 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
1036 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
1037 (set (match_operand:SI 0 "register_operand" "=l")
1038 (plus:SI (match_dup 2) (const_int 12)))
1039 (set (match_operand:SI 1 "register_operand" "=l")
1040 (plus:SI (match_dup 3) (const_int 12)))
1041 (clobber (match_scratch:SI 4 "=&l"))
1042 (clobber (match_scratch:SI 5 "=&l"))
1043 (clobber (match_scratch:SI 6 "=&l"))]
1045 "* return thumb_output_move_mem_multiple (3, operands);"
1046 [(set_attr "length" "4")
1047 ; This isn't entirely accurate... It loads as well, but in terms of
1048 ; scheduling the following insn it is better to consider it as a store
1049 (set_attr "type" "store_12")]
1052 (define_insn "cpymem8b"
1053 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
1054 (mem:SI (match_operand:SI 3 "register_operand" "1")))
1055 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
1056 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
1057 (set (match_operand:SI 0 "register_operand" "=l")
1058 (plus:SI (match_dup 2) (const_int 8)))
1059 (set (match_operand:SI 1 "register_operand" "=l")
1060 (plus:SI (match_dup 3) (const_int 8)))
1061 (clobber (match_scratch:SI 4 "=&l"))
1062 (clobber (match_scratch:SI 5 "=&l"))]
1064 "* return thumb_output_move_mem_multiple (2, operands);"
1065 [(set_attr "length" "4")
1066 ; This isn't entirely accurate... It loads as well, but in terms of
1067 ; scheduling the following insn it is better to consider it as a store
1068 (set_attr "type" "store_8")]
1072 ;; A pattern to recognize a special situation and optimize for it.
1073 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
1074 ;; due to the available addressing modes. Hence, convert a signed comparison
1075 ;; with zero into an unsigned comparison with 127 if possible.
1076 (define_expand "cbranchqi4"
1077 [(set (pc) (if_then_else
1078 (match_operator 0 "lt_ge_comparison_operator"
1079 [(match_operand:QI 1 "memory_operand")
1080 (match_operand:QI 2 "const0_operand")])
1081 (label_ref (match_operand 3 "" ""))
1086 xops[1] = gen_reg_rtx (SImode);
1087 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
1088 xops[2] = GEN_INT (127);
1089 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
1090 VOIDmode, xops[1], xops[2]);
1091 xops[3] = operands[3];
1092 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
1096 ;; A pattern for the CB(N)Z instruction added in ARMv8-M Baseline profile,
1097 ;; adapted from cbranchsi4_insn. Modifying cbranchsi4_insn instead leads to
1098 ;; code generation difference for ARMv6-M because the minimum length of the
1099 ;; instruction becomes 2 even for ARMv6-M due to a limitation in genattrtab's
1100 ;; handling of PC in the length condition.
1101 (define_insn "thumb1_cbz"
1102 [(set (pc) (if_then_else
1103 (match_operator 0 "equality_operator"
1104 [(match_operand:SI 1 "s_register_operand" "l")
1106 (label_ref (match_operand 2 "" ""))
1108 "TARGET_THUMB1 && TARGET_HAVE_CBZ"
1110 if (get_attr_length (insn) == 2)
1112 if (GET_CODE (operands[0]) == EQ)
1113 return "cbz\t%1, %l2";
1115 return "cbnz\t%1, %l2";
1119 rtx t = cfun->machine->thumb1_cc_insn;
1122 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
1123 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
1125 if (cfun->machine->thumb1_cc_mode == CC_NZmode)
1127 if (!nz_comparison_operator (operands[0], VOIDmode))
1130 else if (cfun->machine->thumb1_cc_mode != CCmode)
1135 output_asm_insn ("cmp\t%1, #0", operands);
1136 cfun->machine->thumb1_cc_insn = insn;
1137 cfun->machine->thumb1_cc_op0 = operands[1];
1138 cfun->machine->thumb1_cc_op1 = operands[2];
1139 cfun->machine->thumb1_cc_mode = CCmode;
1142 /* Ensure we emit the right type of condition code on the jump. */
1143 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
1146 switch (get_attr_length (insn))
1148 case 4: return "b%d0\t%l2";
1149 case 6: return "b%D0\t.LCB%=\;b\t%l2\t%@long jump\n.LCB%=:";
1150 case 8: return "b%D0\t.LCB%=\;bl\t%l2\t%@far jump\n.LCB%=:";
1151 default: gcc_unreachable ();
1155 [(set (attr "far_jump")
1157 (eq_attr "length" "8")
1158 (const_string "yes")
1159 (const_string "no")))
1160 (set (attr "length")
1162 (and (ge (minus (match_dup 2) (pc)) (const_int 2))
1163 (le (minus (match_dup 2) (pc)) (const_int 128)))
1166 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
1167 (le (minus (match_dup 2) (pc)) (const_int 256)))
1170 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
1171 (le (minus (match_dup 2) (pc)) (const_int 2048)))
1176 (eq_attr "length" "2")
1177 (const_string "branch")
1178 (const_string "multiple")))]
1181 ;; Changes to the constraints of this pattern must be propagated to those of
1182 ;; atomic compare_and_swap splitters in sync.md. These must be at least as
1183 ;; strict as the constraints here and aim to be as permissive.
1184 (define_insn "cbranchsi4_insn"
1185 [(set (pc) (if_then_else
1186 (match_operator 0 "arm_comparison_operator"
1187 [(match_operand:SI 1 "s_register_operand" "l,l*h")
1188 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
1189 (label_ref (match_operand 3 "" ""))
1193 rtx t = cfun->machine->thumb1_cc_insn;
1196 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
1197 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
1199 if (cfun->machine->thumb1_cc_mode == CC_NZmode)
1201 if (!nz_comparison_operator (operands[0], VOIDmode))
1204 else if (cfun->machine->thumb1_cc_mode != CCmode)
1209 output_asm_insn ("cmp\t%1, %2", operands);
1210 cfun->machine->thumb1_cc_insn = insn;
1211 cfun->machine->thumb1_cc_op0 = operands[1];
1212 cfun->machine->thumb1_cc_op1 = operands[2];
1213 cfun->machine->thumb1_cc_mode = CCmode;
1216 /* Ensure we emit the right type of condition code on the jump. */
1217 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
1220 switch (get_attr_length (insn))
1222 case 4: return \"b%d0\\t%l3\";
1223 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1224 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1227 [(set (attr "far_jump")
1229 (eq_attr "length" "8")
1230 (const_string "yes")
1231 (const_string "no")))
1232 (set (attr "length")
1234 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1235 (le (minus (match_dup 3) (pc)) (const_int 256)))
1238 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1239 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1242 (set_attr "type" "multiple")]
1245 ;; An expander which makes use of the cbranchsi4_scratch insn, but can
1246 ;; be used safely after RA.
1247 (define_expand "cbranchsi4_neg_late"
1249 (set (pc) (if_then_else
1250 (match_operator 4 "arm_comparison_operator"
1251 [(match_operand:SI 1 "s_register_operand")
1252 (match_operand:SI 2 "thumb1_cmpneg_operand")])
1253 (label_ref (match_operand 3 "" ""))
1255 (clobber (match_operand:SI 0 "s_register_operand"))
1260 ;; Changes to the constraints of this pattern must be propagated to those of
1261 ;; atomic compare_and_swap splitters in sync.md. These must be at least as
1262 ;; strict as the constraints here and aim to be as permissive.
1263 (define_insn "cbranchsi4_scratch"
1264 [(set (pc) (if_then_else
1265 (match_operator 4 "arm_comparison_operator"
1266 [(match_operand:SI 1 "s_register_operand" "l,0")
1267 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
1268 (label_ref (match_operand 3 "" ""))
1270 (clobber (match_scratch:SI 0 "=l,l"))]
1273 output_asm_insn (\"adds\\t%0, %1, #%n2\", operands);
1275 switch (get_attr_length (insn))
1277 case 4: return \"b%d4\\t%l3\";
1278 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1279 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1282 [(set (attr "far_jump")
1284 (eq_attr "length" "8")
1285 (const_string "yes")
1286 (const_string "no")))
1287 (set (attr "length")
1289 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1290 (le (minus (match_dup 3) (pc)) (const_int 256)))
1293 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1294 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1297 (set_attr "type" "multiple")]
1300 (define_insn "*negated_cbranchsi4"
1303 (match_operator 0 "equality_operator"
1304 [(match_operand:SI 1 "s_register_operand" "l")
1305 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
1306 (label_ref (match_operand 3 "" ""))
1310 output_asm_insn (\"cmn\\t%1, %2\", operands);
1311 switch (get_attr_length (insn))
1313 case 4: return \"b%d0\\t%l3\";
1314 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1315 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1318 [(set (attr "far_jump")
1320 (eq_attr "length" "8")
1321 (const_string "yes")
1322 (const_string "no")))
1323 (set (attr "length")
1325 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1326 (le (minus (match_dup 3) (pc)) (const_int 256)))
1329 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1330 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1333 (set_attr "type" "multiple")]
1336 (define_insn "*tbit_cbranch"
1339 (match_operator 0 "equality_operator"
1340 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
1342 (match_operand:SI 2 "const_int_operand" "i"))
1344 (label_ref (match_operand 3 "" ""))
1346 (clobber (match_scratch:SI 4 "=l"))]
1351 op[0] = operands[4];
1352 op[1] = operands[1];
1353 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
1355 output_asm_insn (\"lsls\\t%0, %1, %2\", op);
1356 switch (get_attr_length (insn))
1358 case 4: return \"b%d0\\t%l3\";
1359 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1360 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1363 [(set (attr "far_jump")
1365 (eq_attr "length" "8")
1366 (const_string "yes")
1367 (const_string "no")))
1368 (set (attr "length")
1370 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1371 (le (minus (match_dup 3) (pc)) (const_int 256)))
1374 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1375 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1378 (set_attr "type" "multiple")]
1381 (define_insn "*tlobits_cbranch"
1384 (match_operator 0 "equality_operator"
1385 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
1386 (match_operand:SI 2 "const_int_operand" "i")
1389 (label_ref (match_operand 3 "" ""))
1391 (clobber (match_scratch:SI 4 "=l"))]
1396 op[0] = operands[4];
1397 op[1] = operands[1];
1398 op[2] = GEN_INT (32 - INTVAL (operands[2]));
1400 output_asm_insn (\"lsls\\t%0, %1, %2\", op);
1401 switch (get_attr_length (insn))
1403 case 4: return \"b%d0\\t%l3\";
1404 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1405 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1408 [(set (attr "far_jump")
1410 (eq_attr "length" "8")
1411 (const_string "yes")
1412 (const_string "no")))
1413 (set (attr "length")
1415 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1416 (le (minus (match_dup 3) (pc)) (const_int 256)))
1419 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1420 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1423 (set_attr "type" "multiple")]
1426 (define_insn "*tstsi3_cbranch"
1429 (match_operator 3 "equality_operator"
1430 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
1431 (match_operand:SI 1 "s_register_operand" "l"))
1433 (label_ref (match_operand 2 "" ""))
1438 output_asm_insn (\"tst\\t%0, %1\", operands);
1439 switch (get_attr_length (insn))
1441 case 4: return \"b%d3\\t%l2\";
1442 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
1443 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
1446 [(set (attr "far_jump")
1448 (eq_attr "length" "8")
1449 (const_string "yes")
1450 (const_string "no")))
1451 (set (attr "length")
1453 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
1454 (le (minus (match_dup 2) (pc)) (const_int 256)))
1457 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
1458 (le (minus (match_dup 2) (pc)) (const_int 2048)))
1461 (set_attr "type" "multiple")]
1464 (define_insn "*cbranchne_decr1"
1466 (if_then_else (match_operator 3 "equality_operator"
1467 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
1469 (label_ref (match_operand 4 "" ""))
1471 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
1472 (plus:SI (match_dup 2) (const_int -1)))
1473 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
1478 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1480 VOIDmode, operands[2], const1_rtx);
1481 cond[1] = operands[4];
1483 if (which_alternative == 0)
1484 output_asm_insn (\"subs\\t%0, %2, #1\", operands);
1485 else if (which_alternative == 1)
1487 /* We must provide an alternative for a hi reg because reload
1488 cannot handle output reloads on a jump instruction, but we
1489 can't subtract into that. Fortunately a mov from lo to hi
1490 does not clobber the condition codes. */
1491 output_asm_insn (\"subs\\t%1, %2, #1\", operands);
1492 output_asm_insn (\"mov\\t%0, %1\", operands);
1496 /* Similarly, but the target is memory. */
1497 output_asm_insn (\"subs\\t%1, %2, #1\", operands);
1498 output_asm_insn (\"str\\t%1, %0\", operands);
1501 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
1504 output_asm_insn (\"b%d0\\t%l1\", cond);
1507 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
1508 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
1510 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
1511 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
1515 [(set (attr "far_jump")
1517 (ior (and (eq (symbol_ref ("which_alternative"))
1519 (eq_attr "length" "8"))
1520 (eq_attr "length" "10"))
1521 (const_string "yes")
1522 (const_string "no")))
1523 (set_attr_alternative "length"
1527 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
1528 (le (minus (match_dup 4) (pc)) (const_int 256)))
1531 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
1532 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1537 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
1538 (le (minus (match_dup 4) (pc)) (const_int 256)))
1541 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
1542 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1547 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
1548 (le (minus (match_dup 4) (pc)) (const_int 256)))
1551 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
1552 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1557 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
1558 (le (minus (match_dup 4) (pc)) (const_int 256)))
1561 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
1562 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1565 (set_attr "type" "multiple")]
1568 (define_insn "*addsi3_cbranch"
1571 (match_operator 4 "arm_comparison_operator"
1573 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
1574 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
1576 (label_ref (match_operand 5 "" ""))
1579 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
1580 (plus:SI (match_dup 2) (match_dup 3)))
1581 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
1583 && (GET_CODE (operands[4]) == EQ
1584 || GET_CODE (operands[4]) == NE
1585 || GET_CODE (operands[4]) == GE
1586 || GET_CODE (operands[4]) == LT)"
1591 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
1592 cond[1] = operands[2];
1593 cond[2] = operands[3];
1595 if (CONST_INT_P (cond[2]) && INTVAL (cond[2]) < 0)
1596 output_asm_insn (\"subs\\t%0, %1, #%n2\", cond);
1598 output_asm_insn (\"adds\\t%0, %1, %2\", cond);
1600 if (which_alternative >= 2
1601 && which_alternative < 4)
1602 output_asm_insn (\"mov\\t%0, %1\", operands);
1603 else if (which_alternative >= 4)
1604 output_asm_insn (\"str\\t%1, %0\", operands);
1606 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
1609 return \"b%d4\\t%l5\";
1611 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
1613 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
1617 [(set (attr "far_jump")
1619 (ior (and (lt (symbol_ref ("which_alternative"))
1621 (eq_attr "length" "8"))
1622 (eq_attr "length" "10"))
1623 (const_string "yes")
1624 (const_string "no")))
1625 (set (attr "length")
1627 (lt (symbol_ref ("which_alternative"))
1630 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
1631 (le (minus (match_dup 5) (pc)) (const_int 256)))
1634 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
1635 (le (minus (match_dup 5) (pc)) (const_int 2048)))
1639 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
1640 (le (minus (match_dup 5) (pc)) (const_int 256)))
1643 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
1644 (le (minus (match_dup 5) (pc)) (const_int 2048)))
1647 (set_attr "type" "multiple")]
1650 (define_insn "*addsi3_cbranch_scratch"
1653 (match_operator 3 "arm_comparison_operator"
1655 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
1656 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
1658 (label_ref (match_operand 4 "" ""))
1660 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
1662 && (GET_CODE (operands[3]) == EQ
1663 || GET_CODE (operands[3]) == NE
1664 || GET_CODE (operands[3]) == GE
1665 || GET_CODE (operands[3]) == LT)"
1668 switch (which_alternative)
1671 output_asm_insn (\"cmp\t%1, #%n2\", operands);
1674 output_asm_insn (\"cmn\t%1, %2\", operands);
1677 if (INTVAL (operands[2]) < 0)
1678 output_asm_insn (\"subs\t%0, %1, %2\", operands);
1680 output_asm_insn (\"adds\t%0, %1, %2\", operands);
1683 if (INTVAL (operands[2]) < 0)
1684 output_asm_insn (\"subs\t%0, %0, %2\", operands);
1686 output_asm_insn (\"adds\t%0, %0, %2\", operands);
1690 switch (get_attr_length (insn))
1693 return \"b%d3\\t%l4\";
1695 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
1697 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
1701 [(set (attr "far_jump")
1703 (eq_attr "length" "8")
1704 (const_string "yes")
1705 (const_string "no")))
1706 (set (attr "length")
1708 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
1709 (le (minus (match_dup 4) (pc)) (const_int 256)))
1712 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
1713 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1716 (set_attr "type" "multiple")]
1719 (define_insn "*thumb_cmpdi_zero"
1720 [(set (reg:CC_Z CC_REGNUM)
1721 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
1723 (clobber (match_scratch:SI 1 "=l"))]
1725 "orrs\\t%1, %Q0, %R0"
1726 [(set_attr "conds" "set")
1727 (set_attr "length" "2")
1728 (set_attr "type" "logics_reg")]
1731 (define_expand "cstoresi_eq0_thumb1"
1733 [(set (match_operand:SI 0 "s_register_operand")
1734 (eq:SI (match_operand:SI 1 "s_register_operand")
1736 (clobber (match_dup:SI 2))])]
1738 "operands[2] = gen_reg_rtx (SImode);"
1741 (define_expand "cstoresi_ne0_thumb1"
1743 [(set (match_operand:SI 0 "s_register_operand")
1744 (ne:SI (match_operand:SI 1 "s_register_operand")
1746 (clobber (match_dup:SI 2))])]
1748 "operands[2] = gen_reg_rtx (SImode);"
1751 (define_insn "*cstoresi_eq0_thumb1_insn"
1752 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
1753 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
1755 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
1758 rsbs\\t%0, %1, #0\;adcs\\t%0, %0, %1
1759 rsbs\\t%2, %1, #0\;adcs\\t%0, %1, %2"
1760 [(set_attr "length" "4")
1761 (set_attr "type" "multiple")]
1764 (define_insn "*cstoresi_ne0_thumb1_insn"
1765 [(set (match_operand:SI 0 "s_register_operand" "=l")
1766 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
1768 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
1770 "subs\\t%2, %1, #1\;sbcs\\t%0, %1, %2"
1771 [(set_attr "length" "4")]
1774 ;; Used as part of the expansion of thumb ltu and gtu sequences
1775 (define_insn "cstoresi_nltu_thumb1"
1776 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
1777 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
1778 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
1780 "cmp\\t%1, %2\;sbcs\\t%0, %0, %0"
1781 [(set_attr "length" "4")
1782 (set_attr "type" "multiple")]
1785 (define_insn_and_split "cstoresi_ltu_thumb1"
1786 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
1787 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
1788 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
1793 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
1794 (set (match_dup 0) (neg:SI (match_dup 3)))]
1795 "operands[3] = gen_reg_rtx (SImode);"
1796 [(set_attr "length" "4")
1797 (set_attr "type" "multiple")]
1800 ;; Used as part of the expansion of thumb les sequence.
1801 (define_insn "thumb1_addsi3_addgeu"
1802 [(set (match_operand:SI 0 "s_register_operand" "=l")
1803 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
1804 (match_operand:SI 2 "s_register_operand" "l"))
1805 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
1806 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
1808 "cmp\\t%3, %4\;adcs\\t%0, %1, %2"
1809 [(set_attr "length" "4")
1810 (set_attr "type" "multiple")]
1814 (define_insn "*thumb_jump"
1816 (label_ref (match_operand 0 "" "")))]
1819 if (get_attr_length (insn) == 2)
1821 return \"bl\\t%l0\\t%@ far jump\";
1823 [(set (attr "far_jump")
1825 (eq_attr "length" "4")
1826 (const_string "yes")
1827 (const_string "no")))
1828 (set (attr "length")
1830 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
1831 (le (minus (match_dup 0) (pc)) (const_int 2048)))
1834 (set_attr "type" "branch")]
1837 (define_insn "*call_reg_thumb1_v5"
1838 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
1839 (match_operand 1 "" ""))
1840 (use (match_operand 2 "" ""))
1841 (clobber (reg:SI LR_REGNUM))]
1842 "TARGET_THUMB1 && arm_arch5t && !SIBLING_CALL_P (insn)"
1844 [(set_attr "length" "2")
1845 (set_attr "type" "call")]
1848 (define_insn "*nonsecure_call_reg_thumb1_v5"
1849 [(call (unspec:SI [(mem:SI (reg:SI R4_REGNUM))]
1850 UNSPEC_NONSECURE_MEM)
1851 (match_operand 0 "" ""))
1852 (use (match_operand 1 "" ""))
1853 (clobber (reg:SI LR_REGNUM))]
1854 "TARGET_THUMB1 && use_cmse && !SIBLING_CALL_P (insn)"
1855 "bl\\t__gnu_cmse_nonsecure_call"
1856 [(set_attr "length" "4")
1857 (set_attr "type" "call")]
1860 (define_insn "*call_reg_thumb1"
1861 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
1862 (match_operand 1 "" ""))
1863 (use (match_operand 2 "" ""))
1864 (clobber (reg:SI LR_REGNUM))]
1865 "TARGET_THUMB1 && !arm_arch5t && !SIBLING_CALL_P (insn)"
1868 if (!TARGET_CALLER_INTERWORKING)
1869 return thumb_call_via_reg (operands[0]);
1870 else if (operands[1] == const0_rtx)
1871 return \"bl\\t%__interwork_call_via_%0\";
1872 else if (frame_pointer_needed)
1873 return \"bl\\t%__interwork_r7_call_via_%0\";
1875 return \"bl\\t%__interwork_r11_call_via_%0\";
1877 [(set_attr "type" "call")]
1880 (define_insn "*call_value_reg_thumb1_v5"
1881 [(set (match_operand 0 "" "")
1882 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
1883 (match_operand 2 "" "")))
1884 (use (match_operand 3 "" ""))
1885 (clobber (reg:SI LR_REGNUM))]
1886 "TARGET_THUMB1 && arm_arch5t"
1888 [(set_attr "length" "2")
1889 (set_attr "type" "call")]
1892 (define_insn "*nonsecure_call_value_reg_thumb1_v5"
1893 [(set (match_operand 0 "" "")
1895 [(mem:SI (reg:SI R4_REGNUM))]
1896 UNSPEC_NONSECURE_MEM)
1897 (match_operand 1 "" "")))
1898 (use (match_operand 2 "" ""))
1899 (clobber (reg:SI LR_REGNUM))]
1900 "TARGET_THUMB1 && use_cmse"
1901 "bl\\t__gnu_cmse_nonsecure_call"
1902 [(set_attr "length" "4")
1903 (set_attr "type" "call")]
1906 (define_insn "*call_value_reg_thumb1"
1907 [(set (match_operand 0 "" "")
1908 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
1909 (match_operand 2 "" "")))
1910 (use (match_operand 3 "" ""))
1911 (clobber (reg:SI LR_REGNUM))]
1912 "TARGET_THUMB1 && !arm_arch5t"
1915 if (!TARGET_CALLER_INTERWORKING)
1916 return thumb_call_via_reg (operands[1]);
1917 else if (operands[2] == const0_rtx)
1918 return \"bl\\t%__interwork_call_via_%1\";
1919 else if (frame_pointer_needed)
1920 return \"bl\\t%__interwork_r7_call_via_%1\";
1922 return \"bl\\t%__interwork_r11_call_via_%1\";
1924 [(set_attr "type" "call")]
1927 (define_insn "*call_insn"
1928 [(call (mem:SI (match_operand:SI 0 "" ""))
1929 (match_operand:SI 1 "" ""))
1930 (use (match_operand 2 "" ""))
1931 (clobber (reg:SI LR_REGNUM))]
1933 && GET_CODE (operands[0]) == SYMBOL_REF
1934 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
1936 [(set_attr "length" "4")
1937 (set_attr "type" "call")]
1940 (define_insn "*call_value_insn"
1941 [(set (match_operand 0 "" "")
1942 (call (mem:SI (match_operand 1 "" ""))
1943 (match_operand 2 "" "")))
1944 (use (match_operand 3 "" ""))
1945 (clobber (reg:SI LR_REGNUM))]
1947 && GET_CODE (operands[1]) == SYMBOL_REF
1948 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
1950 [(set_attr "length" "4")
1951 (set_attr "type" "call")]
1954 (define_expand "thumb1_casesi_internal_pic"
1955 [(match_operand:SI 0 "s_register_operand")
1956 (match_operand:SI 1 "thumb1_cmp_operand")
1957 (match_operand 2 "" "")
1958 (match_operand 3 "" "")]
1962 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
1963 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
1965 reg0 = gen_rtx_REG (SImode, 0);
1966 emit_move_insn (reg0, operands[0]);
1967 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
1972 (define_insn "thumb1_casesi_dispatch"
1973 [(parallel [(set (pc) (unspec [(reg:SI 0)
1974 (label_ref (match_operand 0 "" ""))
1975 ;; (label_ref (match_operand 1 "" ""))
1977 UNSPEC_THUMB1_CASESI))
1978 (clobber (reg:SI IP_REGNUM))
1979 (clobber (reg:SI LR_REGNUM))])]
1981 "* return thumb1_output_casesi(operands);"
1982 [(set_attr "length" "4")
1983 (set_attr "type" "multiple")]
1986 ;; NB Never uses BX.
1987 (define_insn "*thumb1_indirect_jump"
1989 (match_operand:SI 0 "register_operand" "l*r"))]
1992 [(set_attr "conds" "clob")
1993 (set_attr "length" "2")
1994 (set_attr "type" "branch")]
1998 (define_insn "prologue_thumb1_interwork"
1999 [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
2001 "* return thumb1_output_interwork ();"
2002 [(set_attr "length" "8")
2003 (set_attr "type" "multiple")]
2006 (define_insn "*epilogue_insns"
2007 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
2010 return thumb1_unexpanded_epilogue ();
2012 ; Length is absolute worst case, when using CMSE and if this is an entry
2013 ; function an extra 4 (MSR) bytes will be added.
2014 [(set (attr "length")
2016 (match_test "IS_CMSE_ENTRY (arm_current_func_type ())")
2019 (set_attr "type" "block")
2020 ;; We don't clobber the conditions, but the potential length of this
2021 ;; operation is sufficient to make conditionalizing the sequence
2022 ;; unlikely to be profitable.
2023 (set_attr "conds" "clob")]
2026 ;; Miscellaneous Thumb patterns
2027 (define_expand "tablejump"
2028 [(parallel [(set (pc) (match_operand:SI 0 "register_operand"))
2029 (use (label_ref (match_operand 1 "" "")))])]
2034 /* Hopefully, CSE will eliminate this copy. */
2035 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
2036 rtx reg2 = gen_reg_rtx (SImode);
2038 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
2044 (define_insn "*thumb1_movpc_insn"
2045 [(set (match_operand:SI 0 "s_register_operand" "=l")
2046 (reg:SI PC_REGNUM))]
2049 [(set_attr "length" "2")
2050 (set_attr "conds" "nocond")
2051 (set_attr "type" "mov_reg")]
2054 ;; NB never uses BX.
2055 (define_insn "*thumb1_tablejump"
2056 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
2057 (use (label_ref (match_operand 1 "" "")))]
2060 [(set_attr "length" "2")
2061 (set_attr "type" "branch")]
2064 (define_insn_and_split "thumb_eh_return"
2065 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
2067 (clobber (match_scratch:SI 1 "=&l"))]
2070 "&& reload_completed"
2074 thumb_set_return_address (operands[0], operands[1]);
2077 [(set_attr "type" "mov_reg")]
2080 ;; DO NOT SPLIT THIS PATTERN. It is important for security reasons that the
2081 ;; canary value does not live beyond the end of this sequence.
2082 (define_insn "thumb1_stack_protect_test_insn"
2083 [(set (match_operand:SI 0 "register_operand" "=&l")
2084 (unspec:SI [(match_operand:SI 1 "memory_operand" "m")
2085 (mem:SI (match_operand:SI 2 "register_operand" "+l"))]
2087 (clobber (match_dup 2))]
2089 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0\;movs\t%2, #0"
2090 [(set_attr "length" "10")
2091 (set_attr "conds" "clob")
2092 (set_attr "type" "multiple")]