1 ;; ----------------------------------------------------------------------
3 ;; ----------------------------------------------------------------------
5 ;; (define_insn_and_split "*tst_extzv_1_n"
7 ;; (compare (zero_extract:SI (match_operand:QI 0 "general_operand_src" "r,rU,mn>")
9 ;; (match_operand 1 "const_int_operand" "n,n,n"))
11 ;; (clobber (match_scratch:QI 2 "=X,X,&r"))]
12 ;; "!CONSTANT_P (operands[0])"
17 ;; "&& reload_completed
18 ;; && !satisfies_constraint_U (operands[0])"
19 ;; [(set (match_dup 2)
21 ;; (parallel [(set (cc0) (compare (zero_extract:SI (match_dup 2)
25 ;; (clobber (scratch:QI))])]
27 ;; [(set_attr "length" "2,8,10")])
30 [(set (reg:CCZ CC_REG)
31 (eq (zero_extract:HSI (match_operand:HSI 0 "register_operand" "r")
33 (match_operand 1 "const_int_operand" "n"))
35 "INTVAL (operands[1]) < 16"
37 [(set_attr "length" "2")])
39 (define_insn "*tst<mode>"
40 [(set (reg:CCZN CC_REG)
41 (compare:CCZN (match_operand:QHSI 0 "register_operand" "r")
45 if (<MODE>mode == QImode)
46 return "mov.b %X0,%X0";
47 else if (<MODE>mode == HImode)
48 return "mov.w %T0,%T0";
49 else if (<MODE>mode == SImode)
50 return "mov.l %S0,%S0";
53 [(set_attr "length" "2")])
55 (define_insn "*tsthi_upper"
56 [(set (reg:CCZN CC_REG)
57 (compare (and:HI (match_operand:HI 0 "register_operand" "r")
62 [(set_attr "length" "2")])
64 (define_insn "*tsthi_upper_z"
65 [(set (reg:CCZ CC_REG)
66 (compare (and:HI (match_operand:HI 0 "register_operand" "r")
71 [(set_attr "length" "2")])
73 (define_insn "*tstsi_upper"
74 [(set (reg:CCZN CC_REG)
75 (compare (and:SI (match_operand:SI 0 "register_operand" "r")
80 [(set_attr "length" "2")])
82 (define_insn "*cmp<mode>_c"
83 [(set (reg:CCC CC_REG)
84 (ltu (match_operand:QHSI 0 "h8300_dst_operand" "rQ")
85 (match_operand:QHSI 1 "h8300_src_operand" "rQi")))]
88 if (<MODE>mode == QImode)
89 return "cmp.b %X1,%X0";
90 else if (<MODE>mode == HImode)
91 return "cmp.w %T1,%T0";
92 else if (<MODE>mode == SImode)
93 return "cmp.l %S1,%S0";
96 [(set_attr "length_table" "add")])
98 (define_insn "*cmpqi_z"
99 [(set (reg:CCZ CC_REG)
100 (eq (match_operand:QI 0 "h8300_dst_operand" "rQ")
101 (match_operand:QI 1 "h8300_src_operand" "rQi")))]
103 { return "cmp.b %X1,%X0"; }
104 [(set_attr "length_table" "add")])
106 (define_insn "*cmphi_z"
107 [(set (reg:CCZ CC_REG)
108 (eq (match_operand:HI 0 "h8300_dst_operand" "rQ")
109 (match_operand:HI 1 "h8300_src_operand" "rQi")))]
111 { return "cmp.w %T1,%T0"; }
112 [(set_attr "length_table" "add")])
114 (define_insn "*cmpsi_z"
115 [(set (reg:CCZ CC_REG)
116 (eq (match_operand:SI 0 "h8300_dst_operand" "rQ")
117 (match_operand:SI 1 "h8300_src_operand" "rQi")))]
119 { return "cmp.l %S1,%S0"; }
120 [(set_attr "length_table" "add")])
122 (define_insn "*cmpqi"
123 [(set (reg:CC CC_REG)
124 (compare (match_operand:QI 0 "h8300_dst_operand" "rQ")
125 (match_operand:QI 1 "h8300_src_operand" "rQi")))]
128 [(set_attr "length_table" "add")])
130 (define_insn "*cmphi"
131 [(set (reg:CC CC_REG)
132 (compare (match_operand:HI 0 "h8300_dst_operand" "rU,rQ")
133 (match_operand:HI 1 "h8300_src_operand" "P3>X,rQi")))]
136 switch (which_alternative)
140 return "cmp.w %T1,%T0";
142 return "cmp.w %T1:3,%T0";
144 return "cmp.w %T1,%T0";
149 [(set_attr "length_table" "short_immediate,add")])
152 [(set (reg:CC CC_REG)
153 (compare (match_operand:SI 0 "h8300_dst_operand" "r,rQ")
154 (match_operand:SI 1 "h8300_src_operand" "P3>X,rQi")))]
157 switch (which_alternative)
161 return "cmp.l %S1,%S0";
163 return "cmp.l %S1:3,%S0";
165 return "cmp.l %S1,%S0";
170 [(set_attr "length" "2,*")
171 (set_attr "length_table" "*,add")])
173 ;; Convert a memory comparison to a move if there is a scratch register.
174 ;; This is preferred over the next as we can proactively avoid the
177 [(match_scratch:QHSI 1 "r")
179 (compare (match_operand:QHSI 0 "memory_operand" "")
181 "!mode_dependent_address_p (XEXP (operands[0], 0), MEM_ADDR_SPACE (operands[0]))"
182 [(parallel [(set (reg:CCZN CC_REG) (compare:CCZN (match_dup 0) (const_int 0)))
183 (set (match_dup 1) (match_dup 0))])])
185 ;; Similarly, but used when the memory reference is an autoinc address
188 [(match_scratch:QHSI 1 "r")
190 (compare (match_operand:QHSI 0 "memory_operand" "")
192 "mode_dependent_address_p (XEXP (operands[0], 0), MEM_ADDR_SPACE (operands[0]))"
193 [(parallel [(set (match_dup 1) (match_dup 0)) (clobber (reg:CC CC_REG))])
194 (set (reg:CC CC_REG) (compare:CC (match_dup 1) (const_int 0)))])
196 ;; The compare-elimination pass does not handle memory reference. So this
197 ;; little peephole helps fill the gap and avoid code quality regressions.
199 [(parallel [(set (match_operand:QHSI 0 "register_operand" "")
200 (match_operand:QHSI 1 "simple_memory_operand" ""))
201 (clobber (reg:CC CC_REG))])
202 (set (reg:CCZN CC_REG)
203 (compare:CCZN (match_dup 0) (const_int 0)))]
205 [(parallel [(set (reg:CCZN CC_REG) (compare:CCZN (match_dup 1) (const_int 0)))
206 (set (match_dup 0) (match_dup 1))])])
208 ;; This exists solely to convince ifcvt to try some store-flag sequences.
210 ;; Essentially we don't want to expose a general store-flag capability.
211 ;; The only generally useful/profitable case is when we want to test the
212 ;; C bit. In that case we can use addx, subx, bst, or bist to get the bit
215 ;; Others could be handled with stc, shifts and masking, but it likely isn't
218 (define_expand "cstore<mode>4"
219 [(use (match_operator 1 "eqne_operator"
220 [(match_operand:QHSI 2 "h8300_dst_operand" "")
221 (match_operand:QHSI 3 "h8300_src_operand" "")]))
222 (clobber (match_operand:QHSI 0 "register_operand"))]
228 ;; Storing the C bit is pretty simple since there are many ways to
229 ;; introduce it into a GPR. addx, subx and a variety of bit manipulation
232 (define_insn "*store_c_<mode>"
233 [(set (match_operand:QHSI 0 "register_operand" "=r")
234 (eqne:QHSI (reg:CCC CC_REG) (const_int 0)))]
239 if (<MODE>mode == QImode)
240 return "xor.b\t%X0,%X0\;bst\t#0,%X0";
241 else if (<MODE>mode == HImode)
242 return "xor.w\t%T0,%T0\;bst\t#0,%s0";
243 else if (<MODE>mode == SImode)
244 return "xor.l\t%S0,%S0\;bst\t#0,%w0";
247 else if (<CODE> == EQ)
249 if (<MODE>mode == QImode)
250 return "xor.b\t%X0,%X0\;bist\t#0,%X0";
251 else if (<MODE>mode == HImode)
252 return "xor.w\t%T0,%T0\;bist\t#0,%s0";
253 else if (<MODE>mode == SImode)
254 return "xor.l\t%S0,%S0\;bist\t#0,%w0";
260 [(set (attr "length") (symbol_ref "<MODE>mode == SImode ? 6 : 4"))])
262 ;; Similarly, but with a negated result
263 (define_insn "*store_neg_c_<mode>"
264 [(set (match_operand:QHSI 0 "register_operand" "=r")
265 (neg:QHSI (ne:QHSI (reg:CCC CC_REG) (const_int 0))))]
268 if (<MODE>mode == QImode)
269 return "subx\t%X0,%X0";
270 else if (<MODE>mode == HImode)
271 return "subx\t%X0,%X0\;exts.w\t%T0";
272 else if (<MODE>mode == SImode)
273 return "subx\t%X0,%X0\;exts.w\t%T0\;exts.l\t%S0";
278 (symbol_ref "(<MODE>mode == SImode ? 6 : <MODE>mode == HImode ? 4 : 2)"))])
280 ;; Using b[i]st we can store the C bit into any of the low 16 bits of
281 ;; a destination. We can also rotate it up into the high bit of a 32 bit
283 (define_insn "*store_shifted_c<mode>"
284 [(set (match_operand:QHSI 0 "register_operand" "=r")
285 (ashift:QHSI (eqne:QHSI (reg:CCC CC_REG) (const_int 0))
286 (match_operand 1 "immediate_operand" "n")))]
288 && (INTVAL (operands[1]) == 31 || INTVAL (operands[1]) <= 15))"
292 if (<MODE>mode == QImode)
293 return "xor.b\t%X0,%X0\;bst\t%1,%X0";
294 else if (<MODE>mode == HImode && INTVAL (operands[1]) < 8)
295 return "xor.w\t%T0,%T0\;bst\t%1,%X0";
296 else if (<MODE>mode == HImode)
298 operands[1] = GEN_INT (INTVAL (operands[1]) - 8);
299 output_asm_insn ("xor.w\t%T0,%T0\;bst\t%1,%t0", operands);
302 else if (<MODE>mode == SImode && INTVAL (operands[1]) == 31)
303 return "xor.l\t%S0,%S0\;rotxr.l\t%S0";
304 else if (<MODE>mode == SImode && INTVAL (operands[1]) < 8)
305 return "xor.l\t%S0,%S0\;bst\t%1,%X0";
306 else if (<MODE>mode == SImode)
308 operands[1] = GEN_INT (INTVAL (operands[1]) - 8);
309 output_asm_insn ("xor.l\t%S0,%S0\;bst\t%1,%t0", operands);
314 else if (<CODE> == EQ)
316 if (<MODE>mode == QImode)
317 return "xor.b\t%X0,%X0\;bist\t%1,%X0";
318 else if (<MODE>mode == HImode && INTVAL (operands[1]) < 8)
319 return "xor.w\t%T0,%T0\;bist\t%1,%X0";
320 else if (<MODE>mode == HImode)
322 operands[1] = GEN_INT (INTVAL (operands[1]) - 8);
323 output_asm_insn ("xor.w\t%T0,%T0\;bist\t%1,%t0", operands);
326 else if (<MODE>mode == SImode && INTVAL (operands[1]) == 31)
327 return "xor.l\t%S0,%S0\;bixor\t#0,%X0\;rotxr.l\t%S0";
328 else if (<MODE>mode == SImode && INTVAL (operands[1]) < 8)
329 return "xor.l\t%S0,%S0\;bist\t%1,%X0";
330 else if (<MODE>mode == SImode)
332 operands[1] = GEN_INT (INTVAL (operands[1]) - 8);
333 output_asm_insn ("xor.l\t%S0,%S0\;bist\t%1,%t0", operands);
342 (symbol_ref "(<MODE>mode == QImode ? 4
343 : <MODE>mode == HImode ? 4
345 : INTVAL (operands[1]) == 31 ? 8 : 6)"))])
347 ;; Recognize this scc and generate code we can match
348 (define_insn_and_split "*store_c"
349 [(set (match_operand:QHSI 0 "register_operand" "=r")
350 (geultu:QHSI (match_operand:QHSI2 1 "register_operand" "r")
351 (match_operand:QHSI2 2 "register_operand" "r")))]
354 "&& reload_completed"
355 [(set (reg:CCC CC_REG)
356 (ltu:CCC (match_dup 1) (match_dup 2)))
358 (<geultu_to_c>:QHSI (reg:CCC CC_REG) (const_int 0)))])
360 ;; We can fold in negation of the result and generate better code than
361 ;; what the generic bits would do when testing for C == 1
362 (define_insn_and_split "*store_neg_c"
363 [(set (match_operand:QHSI 0 "register_operand" "=r")
365 (ltu:QHSI (match_operand:QHSI2 1 "register_operand" "r")
366 (match_operand:QHSI2 2 "register_operand" "r"))))]
369 "&& reload_completed"
370 [(set (reg:CCC CC_REG)
371 (ltu:CCC (match_dup 1) (match_dup 2)))
373 (neg:QHSI (ne:QHSI (reg:CCC CC_REG) (const_int 0))))])
375 ;; We can use rotates and bst/bist to put the C bit into various places
376 ;; in the destination.
377 (define_insn_and_split "*store_shifted_c"
378 [(set (match_operand:QHSI 0 "register_operand" "=r")
379 (ashift:QHSI (geultu:QHSI (match_operand:QHSI2 1 "register_operand" "r")
380 (match_operand:QHSI2 2 "register_operand" "r"))
381 (match_operand 3 "immediate_operand" "n")))]
382 "INTVAL (operands[3]) == 31 || INTVAL (operands[3]) <= 15"
384 "&& reload_completed"
385 [(set (reg:CCC CC_REG) (ltu:CCC (match_dup 1) (match_dup 2)))
387 (ashift:QHSI (<geultu_to_c>:QHSI (reg:CCC CC_REG) (const_int 0))
390 ;; Storing Z into a QImode destination is fairly easy on the H8/S and
391 ;; newer as the stc; shift; mask is just 3 insns/6 bytes. On the H8/300H
392 ;; it is 4 insns/8 bytes which is a speed improvement, but a size
393 ;; regression relative to the branchy sequence
395 ;; Storing inverted Z in QImode is not profitable on the H8/300H, but
396 ;; is a speed improvement on the H8S.
397 (define_insn_and_split "*store_z_qi"
398 [(set (match_operand:QI 0 "register_operand" "=r")
399 (eq:QI (match_operand:HI 1 "register_operand" "r")
400 (match_operand:HI 2 "register_operand" "r")))]
401 "TARGET_H8300S || !optimize_size"
403 "&& reload_completed"
404 [(set (reg:CCZ CC_REG)
405 (eq:CCZ (match_dup 1) (match_dup 2)))
407 (ne:QI (reg:CCZ CC_REG) (const_int 0)))])
409 (define_insn_and_split "*store_z_i_qi"
410 [(set (match_operand:QI 0 "register_operand" "=r")
411 (ne:QI (match_operand:HI 1 "register_operand" "r")
412 (match_operand:HI 2 "register_operand" "r")))]
415 "&& reload_completed"
416 [(set (reg:CCZ CC_REG)
417 (eq:CCZ (match_dup 1) (match_dup 2)))
419 (eq:QI (reg:CCZ CC_REG) (const_int 0)))])
421 (define_insn "*store_z_qi"
422 [(set (match_operand:QI 0 "register_operand" "=r")
423 (ne:QI (reg:CCZ CC_REG) (const_int 0)))]
424 "(TARGET_H8300S || !optimize_size) && reload_completed"
427 return "stc\tccr,%X0\;shar\t#2,%X0\;and\t#0x1,%X0";
429 return "stc\tccr,%X0\;shar\t%X0\;shar\t%X0\;and\t#0x1,%X0";
431 [(set (attr "length") (symbol_ref "TARGET_H8300S ? 6 : 8"))])
433 (define_insn "*store_z_i_qi"
434 [(set (match_operand:QI 0 "register_operand" "=r")
435 (eq:QI (reg:CCZ CC_REG) (const_int 0)))]
436 "(TARGET_H8300S || !optimize_size) && reload_completed"
437 "stc\tccr,%X0\;bld\t#2,%X0\;xor.w\t%T0,%T0\;bist\t#0,%X0";
438 [(set_attr "length" "8")])
440 ;; Storing Z or an inverted Z into a HImode destination is
441 ;; profitable on the H8/S and older variants, but not on the
442 ;; H8/SX where the branchy sequence can use the two-byte
443 ;; mov-immediate that is specific to the H8/SX
444 (define_insn_and_split "*store_z_hi"
445 [(set (match_operand:HSI 0 "register_operand" "=r")
446 (eqne:HSI (match_operand:HSI2 1 "register_operand" "r")
447 (match_operand:HSI2 2 "register_operand" "r")))]
450 "&& reload_completed"
451 [(set (reg:CCZ CC_REG)
452 (eq:CCZ (match_dup 1) (match_dup 2)))
454 (<eqne_invert>:HSI (reg:CCZ CC_REG) (const_int 0)))])
456 ;; Similar, but putting the result into the sign bit
457 (define_insn_and_split "*store_z_hi_sb"
458 [(set (match_operand:HSI 0 "register_operand" "=r")
459 (ashift:HSI (eqne:HSI (match_operand:HSI2 1 "register_operand" "r")
460 (match_operand:HSI2 2 "register_operand" "r"))
464 "&& reload_completed"
465 [(set (reg:CCZ CC_REG)
466 (eq:CCZ (match_dup 1) (match_dup 2)))
468 (ashift:HSI (<eqne_invert>:HSI (reg:CCZ CC_REG) (const_int 0))
471 ;; Similar, but negating the result
472 (define_insn_and_split "*store_z_hi_neg"
473 [(set (match_operand:HSI 0 "register_operand" "=r")
474 (neg:HSI (eqne:HSI (match_operand:HSI2 1 "register_operand" "r")
475 (match_operand:HSI2 2 "register_operand" "r"))))]
478 "&& reload_completed"
479 [(set (reg:CCZ CC_REG)
480 (eq:CCZ (match_dup 1) (match_dup 2)))
482 (neg:HSI (<eqne_invert>:HSI (reg:CCZ CC_REG) (const_int 0))))])
484 (define_insn_and_split "*store_z_hi_and"
485 [(set (match_operand:HSI 0 "register_operand" "=r")
486 (and:HSI (eqne:HSI (match_operand:HSI2 1 "register_operand" "r")
487 (match_operand:HSI2 2 "register_operand" "r"))
488 (match_operand:HSI 3 "register_operand" "r")))]
491 "&& reload_completed"
492 [(set (reg:CCZ CC_REG)
493 (eq:CCZ (match_dup 1) (match_dup 2)))
495 (and:HSI (<eqne_invert>:HSI (reg:CCZ CC_REG) (const_int 0))
498 (define_insn "*store_z_<mode>"
499 [(set (match_operand:HSI 0 "register_operand" "=r")
500 (eqne:HSI (reg:CCZ CC_REG) (const_int 0)))]
503 if (<MODE>mode == HImode)
508 return "stc\tccr,%X0\;shlr.b\t#2,%X0\;and.w\t#1,%T0";
509 return "stc\tccr,%X0\;bld\t#2,%X0\;xor.w\t%T0,%T0\;bst\t#0,%X0";
512 return "stc\tccr,%X0\;bld\t#2,%X0\;xor.w\t%T0,%T0\;bist\t#0,%X0";
514 else if (<MODE>mode == SImode)
519 return "stc\tccr,%X0\;shlr.b\t#2,%X0\;and.l\t#1,%S0";
520 return "stc\tccr,%X0\;bld\t#2,%X0\;xor.l\t%S0,%S0\;bst\t#0,%X0";
523 return "stc\tccr,%X0\;bld\t#2,%X0\;xor.l\t%S0,%S0\;bist\t#0,%X0";
527 ;; XXXSImode is 2 bytes longer
528 [(set_attr "length" "8")])
530 (define_insn "*store_z_<mode>_sb"
531 [(set (match_operand:HSI 0 "register_operand" "=r")
532 (ashift:HSI (eqne:HSI (reg:CCZ CC_REG) (const_int 0))
536 if (<MODE>mode == HImode)
539 return "stc\tccr,%X0\;bld\t#2,%X0\;xor.w\t%T0,%T0\;bst\t#7,%t0";
541 return "stc\tccr,%X0\;bld\t#2,%X0\;xor.w\t%T0,%T0\;bist\t#7,%t0";
543 else if (<MODE>mode == SImode)
546 return "stc\tccr,%X0\;bld\t#2,%X0\;xor.l\t%T0,%T0\;rotxr.l\t%S0";
548 return "stc\tccr,%X0\;bild\t#2,%X0\;xor.l\t%T0,%T0\;rotxr.l\t%S0";
552 ;; XXX SImode is larger
553 [(set_attr "length" "8")])
555 (define_insn "*store_z_<mode>_neg"
556 [(set (match_operand:HSI 0 "register_operand" "=r")
557 (neg:HSI (eqne:HSI (reg:CCZ CC_REG) (const_int 0))))]
560 if (<MODE>mode == HImode)
563 return "stc\tccr,%X0\;bld\t#2,%X0\;subx.b\t%X0,%X0\;exts.w\t%T0";
565 return "stc\tccr,%X0\;bild\t#2,%X0\;subx.b\t%X0,%X0\;exts.w\t%T0";
567 else if (<MODE>mode == SImode)
570 return "stc\tccr,%X0\;bld\t#2,%X0\;subx.b\t%X0,%X0\;exts.w\t%T0\;exts.l\t%S0";
572 return "stc\tccr,%X0\;bild\t#2,%X0\;subx.b\t%X0,%X0\;exts.w\t%T0\;exts.l\t%S0";
576 ;; XXX simode is an instruction longer
577 [(set_attr "length" "8")])
579 (define_insn "*store_z_<mode>_and"
580 [(set (match_operand:HSI 0 "register_operand" "=r")
581 (and:HSI (eqne:HSI (reg:CCZ CC_REG) (const_int 0))
582 (match_operand:HSI 1 "register_operand" "r")))]
585 if (<MODE>mode == HImode)
588 return "bld\t#0,%X1\;stc\tccr,%X0\;band\t#2,%X0\;xor.w\t%T0,%T0\;bst\t#0,%X0";
590 return "bild\t#0,%X1\;stc\tccr,%X0\;band\t#2,%X0\;xor.w\t%T0,%T0\;bist\t#0,X0";
592 else if (<MODE>mode == SImode)
595 return "bld\t#0,%X1\;stc\tccr,%X0\;band\t#2,%X0\;xor.l\t%S0,%S0\;bst\t#0,%X0";
597 return "bild\t#0,%X1\;stc\tccr,%X0\;band\t#2,%X0\;xor.l\t%S0,%S0\;bist\t#0,X0";
601 ;; XXX simode is an instruction longer
602 [(set_attr "length" "8")])
604 ;; We can test the upper byte of a HImode register and the upper word
605 ;; of a SImode register
607 ;; We can test the upper byte of a HImode register and the upper word
608 ;; of a SImode register
609 (define_insn_and_split "*store_z"
610 [(set (match_operand:HI 0 "register_operand" "=r")
611 (eqne:HI (and:HI (match_operand:HI 1 "register_operand" "r")
616 "&& reload_completed"
617 [(set (reg:CCZ CC_REG)
618 (compare (and:HI (match_dup 1) (const_int -256))
621 (<eqne_invert>:HI (reg:CCZ CC_REG) (const_int 0)))])