1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc -mtriple=riscv32 -verify-machineinstrs < %s \
3 ; RUN: | FileCheck -check-prefixes=RV32,RV32I %s
4 ; RUN: llc -mtriple=riscv64 -verify-machineinstrs < %s \
5 ; RUN: | FileCheck -check-prefixes=RV64I %s
6 ; RUN: llc -mtriple=riscv64 -mcpu=sifive-u74 -verify-machineinstrs < %s \
7 ; RUN: | FileCheck -check-prefix=SFB64 %s
8 ; RUN: llc -mtriple=riscv64 -mattr=+xventanacondops -verify-machineinstrs < %s \
9 ; RUN: | FileCheck -check-prefixes=VTCONDOPS64 %s
10 ; RUN: llc -mtriple=riscv32 -mattr=+experimental-zicond -verify-machineinstrs < %s \
11 ; RUN: | FileCheck -check-prefixes=RV32,ZICOND,ZICOND32 %s
12 ; RUN: llc -mtriple=riscv64 -mattr=+experimental-zicond -verify-machineinstrs < %s \
13 ; RUN: | FileCheck -check-prefixes=ZICOND,ZICOND64 %s
15 ; InstCombine canonicalizes (c ? x | y : x) to (x | (c ? y : 0)) similar for
16 ; other binary operations using their identity value as the constant.
18 ; We can reverse this for and/or/xor. Allowing us to pull the binop into
19 ; the basic block we create when we expand select.
21 define signext i32 @and_select_all_ones_i32(i1 zeroext %c, i32 signext %x, i32 signext %y) {
22 ; RV32I-LABEL: and_select_all_ones_i32:
24 ; RV32I-NEXT: addi a0, a0, -1
25 ; RV32I-NEXT: or a0, a0, a1
26 ; RV32I-NEXT: and a0, a0, a2
29 ; RV64I-LABEL: and_select_all_ones_i32:
31 ; RV64I-NEXT: addi a0, a0, -1
32 ; RV64I-NEXT: or a0, a0, a1
33 ; RV64I-NEXT: and a0, a0, a2
36 ; SFB64-LABEL: and_select_all_ones_i32:
38 ; SFB64-NEXT: beqz a0, .LBB0_2
39 ; SFB64-NEXT: # %bb.1:
40 ; SFB64-NEXT: and a2, a2, a1
41 ; SFB64-NEXT: .LBB0_2:
42 ; SFB64-NEXT: mv a0, a2
45 ; VTCONDOPS64-LABEL: and_select_all_ones_i32:
46 ; VTCONDOPS64: # %bb.0:
47 ; VTCONDOPS64-NEXT: vt.maskcn a0, a2, a0
48 ; VTCONDOPS64-NEXT: and a1, a2, a1
49 ; VTCONDOPS64-NEXT: or a0, a1, a0
50 ; VTCONDOPS64-NEXT: ret
52 ; ZICOND-LABEL: and_select_all_ones_i32:
54 ; ZICOND-NEXT: czero.nez a0, a2, a0
55 ; ZICOND-NEXT: and a1, a2, a1
56 ; ZICOND-NEXT: or a0, a1, a0
58 %a = select i1 %c, i32 %x, i32 -1
63 define signext i32 @and_select_all_ones_i32_cmp(i32 signext %x, i32 signext %y, i32 signext %z) {
64 ; RV32I-LABEL: and_select_all_ones_i32_cmp:
66 ; RV32I-NEXT: addi a2, a2, -4
67 ; RV32I-NEXT: seqz a2, a2
68 ; RV32I-NEXT: addi a2, a2, -1
69 ; RV32I-NEXT: or a0, a2, a0
70 ; RV32I-NEXT: and a0, a0, a1
73 ; RV64I-LABEL: and_select_all_ones_i32_cmp:
75 ; RV64I-NEXT: addi a2, a2, -4
76 ; RV64I-NEXT: seqz a2, a2
77 ; RV64I-NEXT: addi a2, a2, -1
78 ; RV64I-NEXT: or a0, a2, a0
79 ; RV64I-NEXT: and a0, a0, a1
82 ; SFB64-LABEL: and_select_all_ones_i32_cmp:
84 ; SFB64-NEXT: li a3, 4
85 ; SFB64-NEXT: bne a2, a3, .LBB1_2
86 ; SFB64-NEXT: # %bb.1:
87 ; SFB64-NEXT: and a1, a1, a0
88 ; SFB64-NEXT: .LBB1_2:
89 ; SFB64-NEXT: mv a0, a1
92 ; VTCONDOPS64-LABEL: and_select_all_ones_i32_cmp:
93 ; VTCONDOPS64: # %bb.0:
94 ; VTCONDOPS64-NEXT: addi a2, a2, -4
95 ; VTCONDOPS64-NEXT: and a0, a1, a0
96 ; VTCONDOPS64-NEXT: vt.maskc a1, a1, a2
97 ; VTCONDOPS64-NEXT: or a0, a0, a1
98 ; VTCONDOPS64-NEXT: ret
100 ; ZICOND-LABEL: and_select_all_ones_i32_cmp:
102 ; ZICOND-NEXT: addi a2, a2, -4
103 ; ZICOND-NEXT: and a0, a1, a0
104 ; ZICOND-NEXT: czero.eqz a1, a1, a2
105 ; ZICOND-NEXT: or a0, a0, a1
107 %c = icmp eq i32 %z, 4
108 %a = select i1 %c, i32 %x, i32 -1
113 define signext i32 @and_select_all_ones_i32_cmp2(i32 signext %x, i32 signext %y, i32 signext %z) {
114 ; RV32I-LABEL: and_select_all_ones_i32_cmp2:
116 ; RV32I-NEXT: slti a2, a2, 4
117 ; RV32I-NEXT: addi a2, a2, -1
118 ; RV32I-NEXT: or a0, a2, a0
119 ; RV32I-NEXT: and a0, a0, a1
122 ; RV64I-LABEL: and_select_all_ones_i32_cmp2:
124 ; RV64I-NEXT: slti a2, a2, 4
125 ; RV64I-NEXT: addi a2, a2, -1
126 ; RV64I-NEXT: or a0, a2, a0
127 ; RV64I-NEXT: and a0, a0, a1
130 ; SFB64-LABEL: and_select_all_ones_i32_cmp2:
132 ; SFB64-NEXT: li a3, 4
133 ; SFB64-NEXT: bge a2, a3, .LBB2_2
134 ; SFB64-NEXT: # %bb.1:
135 ; SFB64-NEXT: and a1, a1, a0
136 ; SFB64-NEXT: .LBB2_2:
137 ; SFB64-NEXT: mv a0, a1
140 ; VTCONDOPS64-LABEL: and_select_all_ones_i32_cmp2:
141 ; VTCONDOPS64: # %bb.0:
142 ; VTCONDOPS64-NEXT: slti a2, a2, 4
143 ; VTCONDOPS64-NEXT: and a0, a1, a0
144 ; VTCONDOPS64-NEXT: vt.maskcn a1, a1, a2
145 ; VTCONDOPS64-NEXT: or a0, a0, a1
146 ; VTCONDOPS64-NEXT: ret
148 ; ZICOND-LABEL: and_select_all_ones_i32_cmp2:
150 ; ZICOND-NEXT: slti a2, a2, 4
151 ; ZICOND-NEXT: and a0, a1, a0
152 ; ZICOND-NEXT: czero.nez a1, a1, a2
153 ; ZICOND-NEXT: or a0, a0, a1
155 %c = icmp slt i32 %z, 4
156 %a = select i1 %c, i32 %x, i32 -1
161 define i64 @and_select_all_ones_i64(i1 zeroext %c, i64 %x, i64 %y) {
162 ; RV32-LABEL: and_select_all_ones_i64:
164 ; RV32-NEXT: neg a0, a0
165 ; RV32-NEXT: or a2, a0, a2
166 ; RV32-NEXT: or a0, a0, a1
167 ; RV32-NEXT: and a0, a3, a0
168 ; RV32-NEXT: and a1, a4, a2
171 ; RV64I-LABEL: and_select_all_ones_i64:
173 ; RV64I-NEXT: neg a0, a0
174 ; RV64I-NEXT: or a0, a0, a1
175 ; RV64I-NEXT: and a0, a2, a0
178 ; SFB64-LABEL: and_select_all_ones_i64:
180 ; SFB64-NEXT: bnez a0, .LBB3_2
181 ; SFB64-NEXT: # %bb.1:
182 ; SFB64-NEXT: and a2, a2, a1
183 ; SFB64-NEXT: .LBB3_2:
184 ; SFB64-NEXT: mv a0, a2
187 ; VTCONDOPS64-LABEL: and_select_all_ones_i64:
188 ; VTCONDOPS64: # %bb.0:
189 ; VTCONDOPS64-NEXT: vt.maskc a0, a2, a0
190 ; VTCONDOPS64-NEXT: and a1, a2, a1
191 ; VTCONDOPS64-NEXT: or a0, a1, a0
192 ; VTCONDOPS64-NEXT: ret
194 ; ZICOND64-LABEL: and_select_all_ones_i64:
196 ; ZICOND64-NEXT: czero.eqz a0, a2, a0
197 ; ZICOND64-NEXT: and a1, a2, a1
198 ; ZICOND64-NEXT: or a0, a1, a0
200 %a = select i1 %c, i64 -1, i64 %x
205 define i64 @and_select_all_ones_i64_cmp(i64 %x, i64 %y, i64 %z) {
206 ; RV32-LABEL: and_select_all_ones_i64_cmp:
208 ; RV32-NEXT: xori a4, a4, 4
209 ; RV32-NEXT: or a4, a4, a5
210 ; RV32-NEXT: seqz a4, a4
211 ; RV32-NEXT: addi a4, a4, -1
212 ; RV32-NEXT: or a1, a4, a1
213 ; RV32-NEXT: or a0, a4, a0
214 ; RV32-NEXT: and a0, a0, a2
215 ; RV32-NEXT: and a1, a1, a3
218 ; RV64I-LABEL: and_select_all_ones_i64_cmp:
220 ; RV64I-NEXT: addi a2, a2, -4
221 ; RV64I-NEXT: seqz a2, a2
222 ; RV64I-NEXT: addi a2, a2, -1
223 ; RV64I-NEXT: or a0, a2, a0
224 ; RV64I-NEXT: and a0, a0, a1
227 ; SFB64-LABEL: and_select_all_ones_i64_cmp:
229 ; SFB64-NEXT: li a3, 4
230 ; SFB64-NEXT: bne a2, a3, .LBB4_2
231 ; SFB64-NEXT: # %bb.1:
232 ; SFB64-NEXT: and a1, a1, a0
233 ; SFB64-NEXT: .LBB4_2:
234 ; SFB64-NEXT: mv a0, a1
237 ; VTCONDOPS64-LABEL: and_select_all_ones_i64_cmp:
238 ; VTCONDOPS64: # %bb.0:
239 ; VTCONDOPS64-NEXT: addi a2, a2, -4
240 ; VTCONDOPS64-NEXT: and a0, a1, a0
241 ; VTCONDOPS64-NEXT: vt.maskc a1, a1, a2
242 ; VTCONDOPS64-NEXT: or a0, a0, a1
243 ; VTCONDOPS64-NEXT: ret
245 ; ZICOND64-LABEL: and_select_all_ones_i64_cmp:
247 ; ZICOND64-NEXT: addi a2, a2, -4
248 ; ZICOND64-NEXT: and a0, a1, a0
249 ; ZICOND64-NEXT: czero.eqz a1, a1, a2
250 ; ZICOND64-NEXT: or a0, a0, a1
252 %c = icmp eq i64 %z, 4
253 %a = select i1 %c, i64 %x, i64 -1
258 define i64 @and_select_all_ones_i64_cmp2(i64 %x, i64 %y, i64 %z) {
259 ; RV32I-LABEL: and_select_all_ones_i64_cmp2:
261 ; RV32I-NEXT: beqz a5, .LBB5_2
262 ; RV32I-NEXT: # %bb.1:
263 ; RV32I-NEXT: slti a4, a5, 0
264 ; RV32I-NEXT: j .LBB5_3
265 ; RV32I-NEXT: .LBB5_2:
266 ; RV32I-NEXT: sltiu a4, a4, 4
267 ; RV32I-NEXT: .LBB5_3:
268 ; RV32I-NEXT: addi a4, a4, -1
269 ; RV32I-NEXT: or a1, a4, a1
270 ; RV32I-NEXT: or a0, a4, a0
271 ; RV32I-NEXT: and a0, a0, a2
272 ; RV32I-NEXT: and a1, a1, a3
275 ; RV64I-LABEL: and_select_all_ones_i64_cmp2:
277 ; RV64I-NEXT: slti a2, a2, 4
278 ; RV64I-NEXT: addi a2, a2, -1
279 ; RV64I-NEXT: or a0, a2, a0
280 ; RV64I-NEXT: and a0, a0, a1
283 ; SFB64-LABEL: and_select_all_ones_i64_cmp2:
285 ; SFB64-NEXT: li a3, 4
286 ; SFB64-NEXT: bge a2, a3, .LBB5_2
287 ; SFB64-NEXT: # %bb.1:
288 ; SFB64-NEXT: and a1, a1, a0
289 ; SFB64-NEXT: .LBB5_2:
290 ; SFB64-NEXT: mv a0, a1
293 ; VTCONDOPS64-LABEL: and_select_all_ones_i64_cmp2:
294 ; VTCONDOPS64: # %bb.0:
295 ; VTCONDOPS64-NEXT: slti a2, a2, 4
296 ; VTCONDOPS64-NEXT: and a0, a1, a0
297 ; VTCONDOPS64-NEXT: vt.maskcn a1, a1, a2
298 ; VTCONDOPS64-NEXT: or a0, a0, a1
299 ; VTCONDOPS64-NEXT: ret
301 ; ZICOND32-LABEL: and_select_all_ones_i64_cmp2:
303 ; ZICOND32-NEXT: slti a6, a5, 0
304 ; ZICOND32-NEXT: czero.eqz a6, a6, a5
305 ; ZICOND32-NEXT: sltiu a4, a4, 4
306 ; ZICOND32-NEXT: czero.nez a4, a4, a5
307 ; ZICOND32-NEXT: or a4, a4, a6
308 ; ZICOND32-NEXT: addi a4, a4, -1
309 ; ZICOND32-NEXT: or a1, a4, a1
310 ; ZICOND32-NEXT: or a0, a4, a0
311 ; ZICOND32-NEXT: and a0, a0, a2
312 ; ZICOND32-NEXT: and a1, a1, a3
315 ; ZICOND64-LABEL: and_select_all_ones_i64_cmp2:
317 ; ZICOND64-NEXT: slti a2, a2, 4
318 ; ZICOND64-NEXT: and a0, a1, a0
319 ; ZICOND64-NEXT: czero.nez a1, a1, a2
320 ; ZICOND64-NEXT: or a0, a0, a1
322 %c = icmp slt i64 %z, 4
323 %a = select i1 %c, i64 %x, i64 -1
328 define signext i32 @or_select_all_zeros_i32(i1 zeroext %c, i32 signext %x, i32 signext %y) {
329 ; RV32I-LABEL: or_select_all_zeros_i32:
331 ; RV32I-NEXT: neg a0, a0
332 ; RV32I-NEXT: and a0, a0, a1
333 ; RV32I-NEXT: or a0, a2, a0
336 ; RV64I-LABEL: or_select_all_zeros_i32:
338 ; RV64I-NEXT: neg a0, a0
339 ; RV64I-NEXT: and a0, a0, a1
340 ; RV64I-NEXT: or a0, a2, a0
343 ; SFB64-LABEL: or_select_all_zeros_i32:
345 ; SFB64-NEXT: beqz a0, .LBB6_2
346 ; SFB64-NEXT: # %bb.1:
347 ; SFB64-NEXT: or a2, a2, a1
348 ; SFB64-NEXT: .LBB6_2:
349 ; SFB64-NEXT: mv a0, a2
352 ; VTCONDOPS64-LABEL: or_select_all_zeros_i32:
353 ; VTCONDOPS64: # %bb.0:
354 ; VTCONDOPS64-NEXT: vt.maskc a0, a1, a0
355 ; VTCONDOPS64-NEXT: or a0, a2, a0
356 ; VTCONDOPS64-NEXT: ret
358 ; ZICOND-LABEL: or_select_all_zeros_i32:
360 ; ZICOND-NEXT: czero.eqz a0, a1, a0
361 ; ZICOND-NEXT: or a0, a2, a0
363 %a = select i1 %c, i32 %x, i32 0
368 define i64 @or_select_all_zeros_i64(i1 zeroext %c, i64 %x, i64 %y) {
369 ; RV32I-LABEL: or_select_all_zeros_i64:
371 ; RV32I-NEXT: addi a0, a0, -1
372 ; RV32I-NEXT: and a2, a0, a2
373 ; RV32I-NEXT: and a0, a0, a1
374 ; RV32I-NEXT: or a0, a0, a3
375 ; RV32I-NEXT: or a1, a2, a4
378 ; RV64I-LABEL: or_select_all_zeros_i64:
380 ; RV64I-NEXT: addi a0, a0, -1
381 ; RV64I-NEXT: and a0, a0, a1
382 ; RV64I-NEXT: or a0, a0, a2
385 ; SFB64-LABEL: or_select_all_zeros_i64:
387 ; SFB64-NEXT: bnez a0, .LBB7_2
388 ; SFB64-NEXT: # %bb.1:
389 ; SFB64-NEXT: or a2, a2, a1
390 ; SFB64-NEXT: .LBB7_2:
391 ; SFB64-NEXT: mv a0, a2
394 ; VTCONDOPS64-LABEL: or_select_all_zeros_i64:
395 ; VTCONDOPS64: # %bb.0:
396 ; VTCONDOPS64-NEXT: vt.maskcn a0, a1, a0
397 ; VTCONDOPS64-NEXT: or a0, a0, a2
398 ; VTCONDOPS64-NEXT: ret
400 ; ZICOND32-LABEL: or_select_all_zeros_i64:
402 ; ZICOND32-NEXT: czero.nez a2, a2, a0
403 ; ZICOND32-NEXT: czero.nez a0, a1, a0
404 ; ZICOND32-NEXT: or a0, a0, a3
405 ; ZICOND32-NEXT: or a1, a2, a4
408 ; ZICOND64-LABEL: or_select_all_zeros_i64:
410 ; ZICOND64-NEXT: czero.nez a0, a1, a0
411 ; ZICOND64-NEXT: or a0, a0, a2
413 %a = select i1 %c, i64 0, i64 %x
418 define signext i32 @xor_select_all_zeros_i32(i1 zeroext %c, i32 signext %x, i32 signext %y) {
419 ; RV32I-LABEL: xor_select_all_zeros_i32:
421 ; RV32I-NEXT: addi a0, a0, -1
422 ; RV32I-NEXT: and a0, a0, a1
423 ; RV32I-NEXT: xor a0, a2, a0
426 ; RV64I-LABEL: xor_select_all_zeros_i32:
428 ; RV64I-NEXT: addi a0, a0, -1
429 ; RV64I-NEXT: and a0, a0, a1
430 ; RV64I-NEXT: xor a0, a2, a0
433 ; SFB64-LABEL: xor_select_all_zeros_i32:
435 ; SFB64-NEXT: bnez a0, .LBB8_2
436 ; SFB64-NEXT: # %bb.1:
437 ; SFB64-NEXT: xor a2, a2, a1
438 ; SFB64-NEXT: .LBB8_2:
439 ; SFB64-NEXT: mv a0, a2
442 ; VTCONDOPS64-LABEL: xor_select_all_zeros_i32:
443 ; VTCONDOPS64: # %bb.0:
444 ; VTCONDOPS64-NEXT: vt.maskcn a0, a1, a0
445 ; VTCONDOPS64-NEXT: xor a0, a2, a0
446 ; VTCONDOPS64-NEXT: ret
448 ; ZICOND-LABEL: xor_select_all_zeros_i32:
450 ; ZICOND-NEXT: czero.nez a0, a1, a0
451 ; ZICOND-NEXT: xor a0, a2, a0
453 %a = select i1 %c, i32 0, i32 %x
458 define i64 @xor_select_all_zeros_i64(i1 zeroext %c, i64 %x, i64 %y) {
459 ; RV32I-LABEL: xor_select_all_zeros_i64:
461 ; RV32I-NEXT: neg a0, a0
462 ; RV32I-NEXT: and a2, a0, a2
463 ; RV32I-NEXT: and a0, a0, a1
464 ; RV32I-NEXT: xor a0, a0, a3
465 ; RV32I-NEXT: xor a1, a2, a4
468 ; RV64I-LABEL: xor_select_all_zeros_i64:
470 ; RV64I-NEXT: neg a0, a0
471 ; RV64I-NEXT: and a0, a0, a1
472 ; RV64I-NEXT: xor a0, a0, a2
475 ; SFB64-LABEL: xor_select_all_zeros_i64:
477 ; SFB64-NEXT: beqz a0, .LBB9_2
478 ; SFB64-NEXT: # %bb.1:
479 ; SFB64-NEXT: xor a2, a2, a1
480 ; SFB64-NEXT: .LBB9_2:
481 ; SFB64-NEXT: mv a0, a2
484 ; VTCONDOPS64-LABEL: xor_select_all_zeros_i64:
485 ; VTCONDOPS64: # %bb.0:
486 ; VTCONDOPS64-NEXT: vt.maskc a0, a1, a0
487 ; VTCONDOPS64-NEXT: xor a0, a0, a2
488 ; VTCONDOPS64-NEXT: ret
490 ; ZICOND32-LABEL: xor_select_all_zeros_i64:
492 ; ZICOND32-NEXT: czero.eqz a2, a2, a0
493 ; ZICOND32-NEXT: czero.eqz a0, a1, a0
494 ; ZICOND32-NEXT: xor a0, a0, a3
495 ; ZICOND32-NEXT: xor a1, a2, a4
498 ; ZICOND64-LABEL: xor_select_all_zeros_i64:
500 ; ZICOND64-NEXT: czero.eqz a0, a1, a0
501 ; ZICOND64-NEXT: xor a0, a0, a2
503 %a = select i1 %c, i64 %x, i64 0
508 define signext i32 @add_select_all_zeros_i32(i1 zeroext %c, i32 signext %x, i32 signext %y) {
509 ; RV32I-LABEL: add_select_all_zeros_i32:
511 ; RV32I-NEXT: addi a0, a0, -1
512 ; RV32I-NEXT: and a0, a0, a1
513 ; RV32I-NEXT: add a0, a2, a0
516 ; RV64I-LABEL: add_select_all_zeros_i32:
518 ; RV64I-NEXT: addi a0, a0, -1
519 ; RV64I-NEXT: and a0, a0, a1
520 ; RV64I-NEXT: addw a0, a2, a0
523 ; SFB64-LABEL: add_select_all_zeros_i32:
525 ; SFB64-NEXT: bnez a0, .LBB10_2
526 ; SFB64-NEXT: # %bb.1:
527 ; SFB64-NEXT: addw a2, a2, a1
528 ; SFB64-NEXT: .LBB10_2:
529 ; SFB64-NEXT: mv a0, a2
532 ; VTCONDOPS64-LABEL: add_select_all_zeros_i32:
533 ; VTCONDOPS64: # %bb.0:
534 ; VTCONDOPS64-NEXT: vt.maskcn a0, a1, a0
535 ; VTCONDOPS64-NEXT: addw a0, a2, a0
536 ; VTCONDOPS64-NEXT: ret
538 ; ZICOND32-LABEL: add_select_all_zeros_i32:
540 ; ZICOND32-NEXT: czero.nez a0, a1, a0
541 ; ZICOND32-NEXT: add a0, a2, a0
544 ; ZICOND64-LABEL: add_select_all_zeros_i32:
546 ; ZICOND64-NEXT: czero.nez a0, a1, a0
547 ; ZICOND64-NEXT: addw a0, a2, a0
549 %a = select i1 %c, i32 0, i32 %x
554 define i64 @add_select_all_zeros_i64(i1 zeroext %c, i64 %x, i64 %y) {
555 ; RV32I-LABEL: add_select_all_zeros_i64:
557 ; RV32I-NEXT: neg a0, a0
558 ; RV32I-NEXT: and a2, a0, a2
559 ; RV32I-NEXT: and a1, a0, a1
560 ; RV32I-NEXT: add a0, a1, a3
561 ; RV32I-NEXT: sltu a1, a0, a1
562 ; RV32I-NEXT: add a2, a2, a4
563 ; RV32I-NEXT: add a1, a2, a1
566 ; RV64I-LABEL: add_select_all_zeros_i64:
568 ; RV64I-NEXT: neg a0, a0
569 ; RV64I-NEXT: and a0, a0, a1
570 ; RV64I-NEXT: add a0, a0, a2
573 ; SFB64-LABEL: add_select_all_zeros_i64:
575 ; SFB64-NEXT: beqz a0, .LBB11_2
576 ; SFB64-NEXT: # %bb.1:
577 ; SFB64-NEXT: add a2, a2, a1
578 ; SFB64-NEXT: .LBB11_2:
579 ; SFB64-NEXT: mv a0, a2
582 ; VTCONDOPS64-LABEL: add_select_all_zeros_i64:
583 ; VTCONDOPS64: # %bb.0:
584 ; VTCONDOPS64-NEXT: vt.maskc a0, a1, a0
585 ; VTCONDOPS64-NEXT: add a0, a0, a2
586 ; VTCONDOPS64-NEXT: ret
588 ; ZICOND32-LABEL: add_select_all_zeros_i64:
590 ; ZICOND32-NEXT: czero.eqz a2, a2, a0
591 ; ZICOND32-NEXT: czero.eqz a1, a1, a0
592 ; ZICOND32-NEXT: add a0, a1, a3
593 ; ZICOND32-NEXT: sltu a1, a0, a1
594 ; ZICOND32-NEXT: add a2, a2, a4
595 ; ZICOND32-NEXT: add a1, a2, a1
598 ; ZICOND64-LABEL: add_select_all_zeros_i64:
600 ; ZICOND64-NEXT: czero.eqz a0, a1, a0
601 ; ZICOND64-NEXT: add a0, a0, a2
603 %a = select i1 %c, i64 %x, i64 0
608 define signext i32 @sub_select_all_zeros_i32(i1 zeroext %c, i32 signext %x, i32 signext %y) {
609 ; RV32I-LABEL: sub_select_all_zeros_i32:
611 ; RV32I-NEXT: addi a0, a0, -1
612 ; RV32I-NEXT: and a0, a0, a1
613 ; RV32I-NEXT: sub a0, a2, a0
616 ; RV64I-LABEL: sub_select_all_zeros_i32:
618 ; RV64I-NEXT: addi a0, a0, -1
619 ; RV64I-NEXT: and a0, a0, a1
620 ; RV64I-NEXT: subw a0, a2, a0
623 ; SFB64-LABEL: sub_select_all_zeros_i32:
625 ; SFB64-NEXT: bnez a0, .LBB12_2
626 ; SFB64-NEXT: # %bb.1:
627 ; SFB64-NEXT: subw a2, a2, a1
628 ; SFB64-NEXT: .LBB12_2:
629 ; SFB64-NEXT: mv a0, a2
632 ; VTCONDOPS64-LABEL: sub_select_all_zeros_i32:
633 ; VTCONDOPS64: # %bb.0:
634 ; VTCONDOPS64-NEXT: vt.maskcn a0, a1, a0
635 ; VTCONDOPS64-NEXT: subw a0, a2, a0
636 ; VTCONDOPS64-NEXT: ret
638 ; ZICOND32-LABEL: sub_select_all_zeros_i32:
640 ; ZICOND32-NEXT: czero.nez a0, a1, a0
641 ; ZICOND32-NEXT: sub a0, a2, a0
644 ; ZICOND64-LABEL: sub_select_all_zeros_i32:
646 ; ZICOND64-NEXT: czero.nez a0, a1, a0
647 ; ZICOND64-NEXT: subw a0, a2, a0
649 %a = select i1 %c, i32 0, i32 %x
654 define i64 @sub_select_all_zeros_i64(i1 zeroext %c, i64 %x, i64 %y) {
655 ; RV32I-LABEL: sub_select_all_zeros_i64:
657 ; RV32I-NEXT: neg a0, a0
658 ; RV32I-NEXT: and a2, a0, a2
659 ; RV32I-NEXT: and a0, a0, a1
660 ; RV32I-NEXT: sltu a1, a3, a0
661 ; RV32I-NEXT: sub a4, a4, a2
662 ; RV32I-NEXT: sub a1, a4, a1
663 ; RV32I-NEXT: sub a0, a3, a0
666 ; RV64I-LABEL: sub_select_all_zeros_i64:
668 ; RV64I-NEXT: neg a0, a0
669 ; RV64I-NEXT: and a0, a0, a1
670 ; RV64I-NEXT: sub a0, a2, a0
673 ; SFB64-LABEL: sub_select_all_zeros_i64:
675 ; SFB64-NEXT: beqz a0, .LBB13_2
676 ; SFB64-NEXT: # %bb.1:
677 ; SFB64-NEXT: sub a2, a2, a1
678 ; SFB64-NEXT: .LBB13_2:
679 ; SFB64-NEXT: mv a0, a2
682 ; VTCONDOPS64-LABEL: sub_select_all_zeros_i64:
683 ; VTCONDOPS64: # %bb.0:
684 ; VTCONDOPS64-NEXT: vt.maskc a0, a1, a0
685 ; VTCONDOPS64-NEXT: sub a0, a2, a0
686 ; VTCONDOPS64-NEXT: ret
688 ; ZICOND32-LABEL: sub_select_all_zeros_i64:
690 ; ZICOND32-NEXT: czero.eqz a2, a2, a0
691 ; ZICOND32-NEXT: czero.eqz a0, a1, a0
692 ; ZICOND32-NEXT: sltu a1, a3, a0
693 ; ZICOND32-NEXT: sub a4, a4, a2
694 ; ZICOND32-NEXT: sub a1, a4, a1
695 ; ZICOND32-NEXT: sub a0, a3, a0
698 ; ZICOND64-LABEL: sub_select_all_zeros_i64:
700 ; ZICOND64-NEXT: czero.eqz a0, a1, a0
701 ; ZICOND64-NEXT: sub a0, a2, a0
703 %a = select i1 %c, i64 %x, i64 0