1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 2
2 ; RUN: llc -mtriple=riscv32 < %s | FileCheck %s -check-prefix=RV32I
3 ; RUN: llc -mtriple=riscv64 < %s | FileCheck %s -check-prefix=RV64I
4 ; RUN: llc -mtriple=riscv64 -mattr=+xventanacondops < %s | FileCheck %s -check-prefix=RV64XVENTANACONDOPS
5 ; RUN: llc -mtriple=riscv64 -mattr=+xtheadcondmov < %s | FileCheck %s -check-prefix=RV64XTHEADCONDMOV
6 ; RUN: llc -mtriple=riscv32 -mattr=+zicond < %s | FileCheck %s -check-prefix=RV32ZICOND
7 ; RUN: llc -mtriple=riscv64 -mattr=+zicond < %s | FileCheck %s -check-prefix=RV64ZICOND
9 define i32 @shl32(i32 %x, i32 %y, i1 %c) {
12 ; RV32I-NEXT: slli a2, a2, 31
13 ; RV32I-NEXT: srai a2, a2, 31
14 ; RV32I-NEXT: and a1, a2, a1
15 ; RV32I-NEXT: sll a0, a0, a1
20 ; RV64I-NEXT: slli a2, a2, 63
21 ; RV64I-NEXT: srai a2, a2, 63
22 ; RV64I-NEXT: and a1, a2, a1
23 ; RV64I-NEXT: sllw a0, a0, a1
26 ; RV64XVENTANACONDOPS-LABEL: shl32:
27 ; RV64XVENTANACONDOPS: # %bb.0:
28 ; RV64XVENTANACONDOPS-NEXT: andi a2, a2, 1
29 ; RV64XVENTANACONDOPS-NEXT: vt.maskc a1, a1, a2
30 ; RV64XVENTANACONDOPS-NEXT: sllw a0, a0, a1
31 ; RV64XVENTANACONDOPS-NEXT: ret
33 ; RV64XTHEADCONDMOV-LABEL: shl32:
34 ; RV64XTHEADCONDMOV: # %bb.0:
35 ; RV64XTHEADCONDMOV-NEXT: andi a2, a2, 1
36 ; RV64XTHEADCONDMOV-NEXT: th.mveqz a1, zero, a2
37 ; RV64XTHEADCONDMOV-NEXT: sllw a0, a0, a1
38 ; RV64XTHEADCONDMOV-NEXT: ret
40 ; RV32ZICOND-LABEL: shl32:
41 ; RV32ZICOND: # %bb.0:
42 ; RV32ZICOND-NEXT: andi a2, a2, 1
43 ; RV32ZICOND-NEXT: czero.eqz a1, a1, a2
44 ; RV32ZICOND-NEXT: sll a0, a0, a1
45 ; RV32ZICOND-NEXT: ret
47 ; RV64ZICOND-LABEL: shl32:
48 ; RV64ZICOND: # %bb.0:
49 ; RV64ZICOND-NEXT: andi a2, a2, 1
50 ; RV64ZICOND-NEXT: czero.eqz a1, a1, a2
51 ; RV64ZICOND-NEXT: sllw a0, a0, a1
52 ; RV64ZICOND-NEXT: ret
53 %binop = shl i32 %x, %y
54 %select_ = select i1 %c, i32 %binop, i32 %x
58 define i32 @ashr32(i32 %x, i32 %y, i1 %c) {
59 ; RV32I-LABEL: ashr32:
61 ; RV32I-NEXT: slli a2, a2, 31
62 ; RV32I-NEXT: srai a2, a2, 31
63 ; RV32I-NEXT: and a1, a2, a1
64 ; RV32I-NEXT: sra a0, a0, a1
67 ; RV64I-LABEL: ashr32:
69 ; RV64I-NEXT: slli a2, a2, 63
70 ; RV64I-NEXT: srai a2, a2, 63
71 ; RV64I-NEXT: and a1, a2, a1
72 ; RV64I-NEXT: sraw a0, a0, a1
75 ; RV64XVENTANACONDOPS-LABEL: ashr32:
76 ; RV64XVENTANACONDOPS: # %bb.0:
77 ; RV64XVENTANACONDOPS-NEXT: andi a2, a2, 1
78 ; RV64XVENTANACONDOPS-NEXT: vt.maskc a1, a1, a2
79 ; RV64XVENTANACONDOPS-NEXT: sraw a0, a0, a1
80 ; RV64XVENTANACONDOPS-NEXT: ret
82 ; RV64XTHEADCONDMOV-LABEL: ashr32:
83 ; RV64XTHEADCONDMOV: # %bb.0:
84 ; RV64XTHEADCONDMOV-NEXT: andi a2, a2, 1
85 ; RV64XTHEADCONDMOV-NEXT: th.mveqz a1, zero, a2
86 ; RV64XTHEADCONDMOV-NEXT: sraw a0, a0, a1
87 ; RV64XTHEADCONDMOV-NEXT: ret
89 ; RV32ZICOND-LABEL: ashr32:
90 ; RV32ZICOND: # %bb.0:
91 ; RV32ZICOND-NEXT: andi a2, a2, 1
92 ; RV32ZICOND-NEXT: czero.eqz a1, a1, a2
93 ; RV32ZICOND-NEXT: sra a0, a0, a1
94 ; RV32ZICOND-NEXT: ret
96 ; RV64ZICOND-LABEL: ashr32:
97 ; RV64ZICOND: # %bb.0:
98 ; RV64ZICOND-NEXT: andi a2, a2, 1
99 ; RV64ZICOND-NEXT: czero.eqz a1, a1, a2
100 ; RV64ZICOND-NEXT: sraw a0, a0, a1
101 ; RV64ZICOND-NEXT: ret
102 %binop = ashr i32 %x, %y
103 %select_ = select i1 %c, i32 %binop, i32 %x
107 define i32 @lshr32(i32 %x, i32 %y, i1 %c) {
108 ; RV32I-LABEL: lshr32:
110 ; RV32I-NEXT: slli a2, a2, 31
111 ; RV32I-NEXT: srai a2, a2, 31
112 ; RV32I-NEXT: and a1, a2, a1
113 ; RV32I-NEXT: srl a0, a0, a1
116 ; RV64I-LABEL: lshr32:
118 ; RV64I-NEXT: slli a2, a2, 63
119 ; RV64I-NEXT: srai a2, a2, 63
120 ; RV64I-NEXT: and a1, a2, a1
121 ; RV64I-NEXT: srlw a0, a0, a1
124 ; RV64XVENTANACONDOPS-LABEL: lshr32:
125 ; RV64XVENTANACONDOPS: # %bb.0:
126 ; RV64XVENTANACONDOPS-NEXT: andi a2, a2, 1
127 ; RV64XVENTANACONDOPS-NEXT: vt.maskc a1, a1, a2
128 ; RV64XVENTANACONDOPS-NEXT: srlw a0, a0, a1
129 ; RV64XVENTANACONDOPS-NEXT: ret
131 ; RV64XTHEADCONDMOV-LABEL: lshr32:
132 ; RV64XTHEADCONDMOV: # %bb.0:
133 ; RV64XTHEADCONDMOV-NEXT: andi a2, a2, 1
134 ; RV64XTHEADCONDMOV-NEXT: th.mveqz a1, zero, a2
135 ; RV64XTHEADCONDMOV-NEXT: srlw a0, a0, a1
136 ; RV64XTHEADCONDMOV-NEXT: ret
138 ; RV32ZICOND-LABEL: lshr32:
139 ; RV32ZICOND: # %bb.0:
140 ; RV32ZICOND-NEXT: andi a2, a2, 1
141 ; RV32ZICOND-NEXT: czero.eqz a1, a1, a2
142 ; RV32ZICOND-NEXT: srl a0, a0, a1
143 ; RV32ZICOND-NEXT: ret
145 ; RV64ZICOND-LABEL: lshr32:
146 ; RV64ZICOND: # %bb.0:
147 ; RV64ZICOND-NEXT: andi a2, a2, 1
148 ; RV64ZICOND-NEXT: czero.eqz a1, a1, a2
149 ; RV64ZICOND-NEXT: srlw a0, a0, a1
150 ; RV64ZICOND-NEXT: ret
151 %binop = lshr i32 %x, %y
152 %select_ = select i1 %c, i32 %binop, i32 %x
156 define i32 @sub32(i32 %x, i32 %y, i1 %c) {
157 ; RV32I-LABEL: sub32:
159 ; RV32I-NEXT: slli a2, a2, 31
160 ; RV32I-NEXT: srai a2, a2, 31
161 ; RV32I-NEXT: and a1, a2, a1
162 ; RV32I-NEXT: sub a0, a0, a1
165 ; RV64I-LABEL: sub32:
167 ; RV64I-NEXT: slli a2, a2, 63
168 ; RV64I-NEXT: srai a2, a2, 63
169 ; RV64I-NEXT: and a1, a2, a1
170 ; RV64I-NEXT: subw a0, a0, a1
173 ; RV64XVENTANACONDOPS-LABEL: sub32:
174 ; RV64XVENTANACONDOPS: # %bb.0:
175 ; RV64XVENTANACONDOPS-NEXT: andi a2, a2, 1
176 ; RV64XVENTANACONDOPS-NEXT: vt.maskc a1, a1, a2
177 ; RV64XVENTANACONDOPS-NEXT: subw a0, a0, a1
178 ; RV64XVENTANACONDOPS-NEXT: ret
180 ; RV64XTHEADCONDMOV-LABEL: sub32:
181 ; RV64XTHEADCONDMOV: # %bb.0:
182 ; RV64XTHEADCONDMOV-NEXT: andi a2, a2, 1
183 ; RV64XTHEADCONDMOV-NEXT: th.mveqz a1, zero, a2
184 ; RV64XTHEADCONDMOV-NEXT: subw a0, a0, a1
185 ; RV64XTHEADCONDMOV-NEXT: ret
187 ; RV32ZICOND-LABEL: sub32:
188 ; RV32ZICOND: # %bb.0:
189 ; RV32ZICOND-NEXT: andi a2, a2, 1
190 ; RV32ZICOND-NEXT: czero.eqz a1, a1, a2
191 ; RV32ZICOND-NEXT: sub a0, a0, a1
192 ; RV32ZICOND-NEXT: ret
194 ; RV64ZICOND-LABEL: sub32:
195 ; RV64ZICOND: # %bb.0:
196 ; RV64ZICOND-NEXT: andi a2, a2, 1
197 ; RV64ZICOND-NEXT: czero.eqz a1, a1, a2
198 ; RV64ZICOND-NEXT: subw a0, a0, a1
199 ; RV64ZICOND-NEXT: ret
200 %binop = sub i32 %x, %y
201 %select_ = select i1 %c, i32 %binop, i32 %x
205 define i32 @and32(i32 %x, i32 %y, i1 %c) {
206 ; RV32I-LABEL: and32:
208 ; RV32I-NEXT: andi a2, a2, 1
209 ; RV32I-NEXT: beqz a2, .LBB4_2
210 ; RV32I-NEXT: # %bb.1:
211 ; RV32I-NEXT: and a0, a0, a1
212 ; RV32I-NEXT: .LBB4_2:
215 ; RV64I-LABEL: and32:
217 ; RV64I-NEXT: andi a2, a2, 1
218 ; RV64I-NEXT: beqz a2, .LBB4_2
219 ; RV64I-NEXT: # %bb.1:
220 ; RV64I-NEXT: and a0, a0, a1
221 ; RV64I-NEXT: .LBB4_2:
224 ; RV64XVENTANACONDOPS-LABEL: and32:
225 ; RV64XVENTANACONDOPS: # %bb.0:
226 ; RV64XVENTANACONDOPS-NEXT: andi a2, a2, 1
227 ; RV64XVENTANACONDOPS-NEXT: and a1, a0, a1
228 ; RV64XVENTANACONDOPS-NEXT: vt.maskcn a0, a0, a2
229 ; RV64XVENTANACONDOPS-NEXT: or a0, a1, a0
230 ; RV64XVENTANACONDOPS-NEXT: ret
232 ; RV64XTHEADCONDMOV-LABEL: and32:
233 ; RV64XTHEADCONDMOV: # %bb.0:
234 ; RV64XTHEADCONDMOV-NEXT: andi a2, a2, 1
235 ; RV64XTHEADCONDMOV-NEXT: and a1, a0, a1
236 ; RV64XTHEADCONDMOV-NEXT: th.mvnez a0, a1, a2
237 ; RV64XTHEADCONDMOV-NEXT: ret
239 ; RV32ZICOND-LABEL: and32:
240 ; RV32ZICOND: # %bb.0:
241 ; RV32ZICOND-NEXT: andi a2, a2, 1
242 ; RV32ZICOND-NEXT: and a1, a0, a1
243 ; RV32ZICOND-NEXT: czero.nez a0, a0, a2
244 ; RV32ZICOND-NEXT: or a0, a1, a0
245 ; RV32ZICOND-NEXT: ret
247 ; RV64ZICOND-LABEL: and32:
248 ; RV64ZICOND: # %bb.0:
249 ; RV64ZICOND-NEXT: andi a2, a2, 1
250 ; RV64ZICOND-NEXT: and a1, a0, a1
251 ; RV64ZICOND-NEXT: czero.nez a0, a0, a2
252 ; RV64ZICOND-NEXT: or a0, a1, a0
253 ; RV64ZICOND-NEXT: ret
254 %binop = and i32 %x, %y
255 %select_ = select i1 %c, i32 %binop, i32 %x
260 define i32 @add32(i32 %x, i32 %y, i1 %c) {
261 ; RV32I-LABEL: add32:
263 ; RV32I-NEXT: slli a2, a2, 31
264 ; RV32I-NEXT: srai a2, a2, 31
265 ; RV32I-NEXT: and a1, a2, a1
266 ; RV32I-NEXT: add a0, a0, a1
269 ; RV64I-LABEL: add32:
271 ; RV64I-NEXT: slli a2, a2, 63
272 ; RV64I-NEXT: srai a2, a2, 63
273 ; RV64I-NEXT: and a1, a2, a1
274 ; RV64I-NEXT: addw a0, a0, a1
277 ; RV64XVENTANACONDOPS-LABEL: add32:
278 ; RV64XVENTANACONDOPS: # %bb.0:
279 ; RV64XVENTANACONDOPS-NEXT: andi a2, a2, 1
280 ; RV64XVENTANACONDOPS-NEXT: vt.maskc a1, a1, a2
281 ; RV64XVENTANACONDOPS-NEXT: addw a0, a0, a1
282 ; RV64XVENTANACONDOPS-NEXT: ret
284 ; RV64XTHEADCONDMOV-LABEL: add32:
285 ; RV64XTHEADCONDMOV: # %bb.0:
286 ; RV64XTHEADCONDMOV-NEXT: andi a2, a2, 1
287 ; RV64XTHEADCONDMOV-NEXT: th.mveqz a1, zero, a2
288 ; RV64XTHEADCONDMOV-NEXT: addw a0, a0, a1
289 ; RV64XTHEADCONDMOV-NEXT: ret
291 ; RV32ZICOND-LABEL: add32:
292 ; RV32ZICOND: # %bb.0:
293 ; RV32ZICOND-NEXT: andi a2, a2, 1
294 ; RV32ZICOND-NEXT: czero.eqz a1, a1, a2
295 ; RV32ZICOND-NEXT: add a0, a0, a1
296 ; RV32ZICOND-NEXT: ret
298 ; RV64ZICOND-LABEL: add32:
299 ; RV64ZICOND: # %bb.0:
300 ; RV64ZICOND-NEXT: andi a2, a2, 1
301 ; RV64ZICOND-NEXT: czero.eqz a1, a1, a2
302 ; RV64ZICOND-NEXT: addw a0, a0, a1
303 ; RV64ZICOND-NEXT: ret
304 %binop = add i32 %x, %y
305 %select_ = select i1 %c, i32 %binop, i32 %x
310 define i32 @or32(i32 %x, i32 %y, i1 %c) {
313 ; RV32I-NEXT: slli a2, a2, 31
314 ; RV32I-NEXT: srai a2, a2, 31
315 ; RV32I-NEXT: and a1, a2, a1
316 ; RV32I-NEXT: or a0, a0, a1
321 ; RV64I-NEXT: slli a2, a2, 63
322 ; RV64I-NEXT: srai a2, a2, 63
323 ; RV64I-NEXT: and a1, a2, a1
324 ; RV64I-NEXT: or a0, a0, a1
327 ; RV64XVENTANACONDOPS-LABEL: or32:
328 ; RV64XVENTANACONDOPS: # %bb.0:
329 ; RV64XVENTANACONDOPS-NEXT: andi a2, a2, 1
330 ; RV64XVENTANACONDOPS-NEXT: vt.maskc a1, a1, a2
331 ; RV64XVENTANACONDOPS-NEXT: or a0, a0, a1
332 ; RV64XVENTANACONDOPS-NEXT: ret
334 ; RV64XTHEADCONDMOV-LABEL: or32:
335 ; RV64XTHEADCONDMOV: # %bb.0:
336 ; RV64XTHEADCONDMOV-NEXT: andi a2, a2, 1
337 ; RV64XTHEADCONDMOV-NEXT: th.mveqz a1, zero, a2
338 ; RV64XTHEADCONDMOV-NEXT: or a0, a0, a1
339 ; RV64XTHEADCONDMOV-NEXT: ret
341 ; RV32ZICOND-LABEL: or32:
342 ; RV32ZICOND: # %bb.0:
343 ; RV32ZICOND-NEXT: andi a2, a2, 1
344 ; RV32ZICOND-NEXT: czero.eqz a1, a1, a2
345 ; RV32ZICOND-NEXT: or a0, a0, a1
346 ; RV32ZICOND-NEXT: ret
348 ; RV64ZICOND-LABEL: or32:
349 ; RV64ZICOND: # %bb.0:
350 ; RV64ZICOND-NEXT: andi a2, a2, 1
351 ; RV64ZICOND-NEXT: czero.eqz a1, a1, a2
352 ; RV64ZICOND-NEXT: or a0, a0, a1
353 ; RV64ZICOND-NEXT: ret
354 %binop = or i32 %x, %y
355 %select_ = select i1 %c, i32 %binop, i32 %x
359 define i32 @xor32(i32 %x, i32 %y, i1 %c) {
360 ; RV32I-LABEL: xor32:
362 ; RV32I-NEXT: slli a2, a2, 31
363 ; RV32I-NEXT: srai a2, a2, 31
364 ; RV32I-NEXT: and a1, a2, a1
365 ; RV32I-NEXT: xor a0, a0, a1
368 ; RV64I-LABEL: xor32:
370 ; RV64I-NEXT: slli a2, a2, 63
371 ; RV64I-NEXT: srai a2, a2, 63
372 ; RV64I-NEXT: and a1, a2, a1
373 ; RV64I-NEXT: xor a0, a0, a1
376 ; RV64XVENTANACONDOPS-LABEL: xor32:
377 ; RV64XVENTANACONDOPS: # %bb.0:
378 ; RV64XVENTANACONDOPS-NEXT: andi a2, a2, 1
379 ; RV64XVENTANACONDOPS-NEXT: vt.maskc a1, a1, a2
380 ; RV64XVENTANACONDOPS-NEXT: xor a0, a0, a1
381 ; RV64XVENTANACONDOPS-NEXT: ret
383 ; RV64XTHEADCONDMOV-LABEL: xor32:
384 ; RV64XTHEADCONDMOV: # %bb.0:
385 ; RV64XTHEADCONDMOV-NEXT: andi a2, a2, 1
386 ; RV64XTHEADCONDMOV-NEXT: th.mveqz a1, zero, a2
387 ; RV64XTHEADCONDMOV-NEXT: xor a0, a0, a1
388 ; RV64XTHEADCONDMOV-NEXT: ret
390 ; RV32ZICOND-LABEL: xor32:
391 ; RV32ZICOND: # %bb.0:
392 ; RV32ZICOND-NEXT: andi a2, a2, 1
393 ; RV32ZICOND-NEXT: czero.eqz a1, a1, a2
394 ; RV32ZICOND-NEXT: xor a0, a0, a1
395 ; RV32ZICOND-NEXT: ret
397 ; RV64ZICOND-LABEL: xor32:
398 ; RV64ZICOND: # %bb.0:
399 ; RV64ZICOND-NEXT: andi a2, a2, 1
400 ; RV64ZICOND-NEXT: czero.eqz a1, a1, a2
401 ; RV64ZICOND-NEXT: xor a0, a0, a1
402 ; RV64ZICOND-NEXT: ret
403 %binop = xor i32 %x, %y
404 %select_ = select i1 %c, i32 %binop, i32 %x
408 define i64 @shl64(i64 %x, i64 %y, i1 %c) {
409 ; RV32I-LABEL: shl64:
411 ; RV32I-NEXT: slli a4, a4, 31
412 ; RV32I-NEXT: srai a4, a4, 31
413 ; RV32I-NEXT: and a4, a4, a2
414 ; RV32I-NEXT: addi a3, a4, -32
415 ; RV32I-NEXT: sll a2, a0, a4
416 ; RV32I-NEXT: bltz a3, .LBB8_2
417 ; RV32I-NEXT: # %bb.1:
418 ; RV32I-NEXT: mv a1, a2
419 ; RV32I-NEXT: j .LBB8_3
420 ; RV32I-NEXT: .LBB8_2:
421 ; RV32I-NEXT: sll a1, a1, a4
422 ; RV32I-NEXT: not a4, a4
423 ; RV32I-NEXT: srli a0, a0, 1
424 ; RV32I-NEXT: srl a0, a0, a4
425 ; RV32I-NEXT: or a1, a1, a0
426 ; RV32I-NEXT: .LBB8_3:
427 ; RV32I-NEXT: srai a0, a3, 31
428 ; RV32I-NEXT: and a0, a0, a2
431 ; RV64I-LABEL: shl64:
433 ; RV64I-NEXT: slli a2, a2, 63
434 ; RV64I-NEXT: srai a2, a2, 63
435 ; RV64I-NEXT: and a1, a2, a1
436 ; RV64I-NEXT: sll a0, a0, a1
439 ; RV64XVENTANACONDOPS-LABEL: shl64:
440 ; RV64XVENTANACONDOPS: # %bb.0:
441 ; RV64XVENTANACONDOPS-NEXT: andi a2, a2, 1
442 ; RV64XVENTANACONDOPS-NEXT: vt.maskc a1, a1, a2
443 ; RV64XVENTANACONDOPS-NEXT: sll a0, a0, a1
444 ; RV64XVENTANACONDOPS-NEXT: ret
446 ; RV64XTHEADCONDMOV-LABEL: shl64:
447 ; RV64XTHEADCONDMOV: # %bb.0:
448 ; RV64XTHEADCONDMOV-NEXT: andi a2, a2, 1
449 ; RV64XTHEADCONDMOV-NEXT: th.mveqz a1, zero, a2
450 ; RV64XTHEADCONDMOV-NEXT: sll a0, a0, a1
451 ; RV64XTHEADCONDMOV-NEXT: ret
453 ; RV32ZICOND-LABEL: shl64:
454 ; RV32ZICOND: # %bb.0:
455 ; RV32ZICOND-NEXT: andi a4, a4, 1
456 ; RV32ZICOND-NEXT: czero.eqz a2, a2, a4
457 ; RV32ZICOND-NEXT: sll a3, a0, a2
458 ; RV32ZICOND-NEXT: addi a4, a2, -32
459 ; RV32ZICOND-NEXT: slti a4, a4, 0
460 ; RV32ZICOND-NEXT: czero.nez a5, a3, a4
461 ; RV32ZICOND-NEXT: sll a1, a1, a2
462 ; RV32ZICOND-NEXT: not a2, a2
463 ; RV32ZICOND-NEXT: srli a0, a0, 1
464 ; RV32ZICOND-NEXT: srl a0, a0, a2
465 ; RV32ZICOND-NEXT: or a0, a1, a0
466 ; RV32ZICOND-NEXT: czero.eqz a1, a0, a4
467 ; RV32ZICOND-NEXT: or a1, a1, a5
468 ; RV32ZICOND-NEXT: czero.eqz a0, a3, a4
469 ; RV32ZICOND-NEXT: ret
471 ; RV64ZICOND-LABEL: shl64:
472 ; RV64ZICOND: # %bb.0:
473 ; RV64ZICOND-NEXT: andi a2, a2, 1
474 ; RV64ZICOND-NEXT: czero.eqz a1, a1, a2
475 ; RV64ZICOND-NEXT: sll a0, a0, a1
476 ; RV64ZICOND-NEXT: ret
477 %binop = shl i64 %x, %y
478 %select_ = select i1 %c, i64 %binop, i64 %x
482 define i64 @ashr64(i64 %x, i64 %y, i1 %c) {
483 ; RV32I-LABEL: ashr64:
485 ; RV32I-NEXT: mv a3, a0
486 ; RV32I-NEXT: slli a4, a4, 31
487 ; RV32I-NEXT: srai a4, a4, 31
488 ; RV32I-NEXT: and a2, a4, a2
489 ; RV32I-NEXT: addi a4, a2, -32
490 ; RV32I-NEXT: sra a0, a1, a2
491 ; RV32I-NEXT: bltz a4, .LBB9_2
492 ; RV32I-NEXT: # %bb.1:
493 ; RV32I-NEXT: srai a1, a1, 31
495 ; RV32I-NEXT: .LBB9_2:
496 ; RV32I-NEXT: srl a3, a3, a2
497 ; RV32I-NEXT: not a2, a2
498 ; RV32I-NEXT: slli a1, a1, 1
499 ; RV32I-NEXT: sll a1, a1, a2
500 ; RV32I-NEXT: or a3, a3, a1
501 ; RV32I-NEXT: mv a1, a0
502 ; RV32I-NEXT: mv a0, a3
505 ; RV64I-LABEL: ashr64:
507 ; RV64I-NEXT: slli a2, a2, 63
508 ; RV64I-NEXT: srai a2, a2, 63
509 ; RV64I-NEXT: and a1, a2, a1
510 ; RV64I-NEXT: sra a0, a0, a1
513 ; RV64XVENTANACONDOPS-LABEL: ashr64:
514 ; RV64XVENTANACONDOPS: # %bb.0:
515 ; RV64XVENTANACONDOPS-NEXT: andi a2, a2, 1
516 ; RV64XVENTANACONDOPS-NEXT: vt.maskc a1, a1, a2
517 ; RV64XVENTANACONDOPS-NEXT: sra a0, a0, a1
518 ; RV64XVENTANACONDOPS-NEXT: ret
520 ; RV64XTHEADCONDMOV-LABEL: ashr64:
521 ; RV64XTHEADCONDMOV: # %bb.0:
522 ; RV64XTHEADCONDMOV-NEXT: andi a2, a2, 1
523 ; RV64XTHEADCONDMOV-NEXT: th.mveqz a1, zero, a2
524 ; RV64XTHEADCONDMOV-NEXT: sra a0, a0, a1
525 ; RV64XTHEADCONDMOV-NEXT: ret
527 ; RV32ZICOND-LABEL: ashr64:
528 ; RV32ZICOND: # %bb.0:
529 ; RV32ZICOND-NEXT: andi a4, a4, 1
530 ; RV32ZICOND-NEXT: czero.eqz a2, a2, a4
531 ; RV32ZICOND-NEXT: sra a3, a1, a2
532 ; RV32ZICOND-NEXT: addi a4, a2, -32
533 ; RV32ZICOND-NEXT: slti a4, a4, 0
534 ; RV32ZICOND-NEXT: czero.nez a5, a3, a4
535 ; RV32ZICOND-NEXT: srl a0, a0, a2
536 ; RV32ZICOND-NEXT: not a2, a2
537 ; RV32ZICOND-NEXT: slli a6, a1, 1
538 ; RV32ZICOND-NEXT: sll a2, a6, a2
539 ; RV32ZICOND-NEXT: or a0, a0, a2
540 ; RV32ZICOND-NEXT: czero.eqz a0, a0, a4
541 ; RV32ZICOND-NEXT: or a0, a0, a5
542 ; RV32ZICOND-NEXT: czero.eqz a2, a3, a4
543 ; RV32ZICOND-NEXT: srai a1, a1, 31
544 ; RV32ZICOND-NEXT: czero.nez a1, a1, a4
545 ; RV32ZICOND-NEXT: or a1, a2, a1
546 ; RV32ZICOND-NEXT: ret
548 ; RV64ZICOND-LABEL: ashr64:
549 ; RV64ZICOND: # %bb.0:
550 ; RV64ZICOND-NEXT: andi a2, a2, 1
551 ; RV64ZICOND-NEXT: czero.eqz a1, a1, a2
552 ; RV64ZICOND-NEXT: sra a0, a0, a1
553 ; RV64ZICOND-NEXT: ret
554 %binop = ashr i64 %x, %y
555 %select_ = select i1 %c, i64 %binop, i64 %x
559 define i64 @lshr64(i64 %x, i64 %y, i1 %c) {
560 ; RV32I-LABEL: lshr64:
562 ; RV32I-NEXT: slli a4, a4, 31
563 ; RV32I-NEXT: srai a4, a4, 31
564 ; RV32I-NEXT: and a4, a4, a2
565 ; RV32I-NEXT: addi a3, a4, -32
566 ; RV32I-NEXT: srl a2, a1, a4
567 ; RV32I-NEXT: bltz a3, .LBB10_2
568 ; RV32I-NEXT: # %bb.1:
569 ; RV32I-NEXT: mv a0, a2
570 ; RV32I-NEXT: j .LBB10_3
571 ; RV32I-NEXT: .LBB10_2:
572 ; RV32I-NEXT: srl a0, a0, a4
573 ; RV32I-NEXT: not a4, a4
574 ; RV32I-NEXT: slli a1, a1, 1
575 ; RV32I-NEXT: sll a1, a1, a4
576 ; RV32I-NEXT: or a0, a0, a1
577 ; RV32I-NEXT: .LBB10_3:
578 ; RV32I-NEXT: srai a1, a3, 31
579 ; RV32I-NEXT: and a1, a1, a2
582 ; RV64I-LABEL: lshr64:
584 ; RV64I-NEXT: slli a2, a2, 63
585 ; RV64I-NEXT: srai a2, a2, 63
586 ; RV64I-NEXT: and a1, a2, a1
587 ; RV64I-NEXT: srl a0, a0, a1
590 ; RV64XVENTANACONDOPS-LABEL: lshr64:
591 ; RV64XVENTANACONDOPS: # %bb.0:
592 ; RV64XVENTANACONDOPS-NEXT: andi a2, a2, 1
593 ; RV64XVENTANACONDOPS-NEXT: vt.maskc a1, a1, a2
594 ; RV64XVENTANACONDOPS-NEXT: srl a0, a0, a1
595 ; RV64XVENTANACONDOPS-NEXT: ret
597 ; RV64XTHEADCONDMOV-LABEL: lshr64:
598 ; RV64XTHEADCONDMOV: # %bb.0:
599 ; RV64XTHEADCONDMOV-NEXT: andi a2, a2, 1
600 ; RV64XTHEADCONDMOV-NEXT: th.mveqz a1, zero, a2
601 ; RV64XTHEADCONDMOV-NEXT: srl a0, a0, a1
602 ; RV64XTHEADCONDMOV-NEXT: ret
604 ; RV32ZICOND-LABEL: lshr64:
605 ; RV32ZICOND: # %bb.0:
606 ; RV32ZICOND-NEXT: andi a4, a4, 1
607 ; RV32ZICOND-NEXT: czero.eqz a2, a2, a4
608 ; RV32ZICOND-NEXT: srl a3, a1, a2
609 ; RV32ZICOND-NEXT: addi a4, a2, -32
610 ; RV32ZICOND-NEXT: slti a4, a4, 0
611 ; RV32ZICOND-NEXT: czero.nez a5, a3, a4
612 ; RV32ZICOND-NEXT: srl a0, a0, a2
613 ; RV32ZICOND-NEXT: not a2, a2
614 ; RV32ZICOND-NEXT: slli a1, a1, 1
615 ; RV32ZICOND-NEXT: sll a1, a1, a2
616 ; RV32ZICOND-NEXT: or a0, a0, a1
617 ; RV32ZICOND-NEXT: czero.eqz a0, a0, a4
618 ; RV32ZICOND-NEXT: or a0, a0, a5
619 ; RV32ZICOND-NEXT: czero.eqz a1, a3, a4
620 ; RV32ZICOND-NEXT: ret
622 ; RV64ZICOND-LABEL: lshr64:
623 ; RV64ZICOND: # %bb.0:
624 ; RV64ZICOND-NEXT: andi a2, a2, 1
625 ; RV64ZICOND-NEXT: czero.eqz a1, a1, a2
626 ; RV64ZICOND-NEXT: srl a0, a0, a1
627 ; RV64ZICOND-NEXT: ret
628 %binop = lshr i64 %x, %y
629 %select_ = select i1 %c, i64 %binop, i64 %x
633 define i64 @sub64(i64 %x, i64 %y, i1 %c) {
634 ; RV32I-LABEL: sub64:
636 ; RV32I-NEXT: slli a4, a4, 31
637 ; RV32I-NEXT: srai a4, a4, 31
638 ; RV32I-NEXT: and a2, a4, a2
639 ; RV32I-NEXT: sltu a5, a0, a2
640 ; RV32I-NEXT: and a3, a4, a3
641 ; RV32I-NEXT: sub a1, a1, a3
642 ; RV32I-NEXT: sub a1, a1, a5
643 ; RV32I-NEXT: sub a0, a0, a2
646 ; RV64I-LABEL: sub64:
648 ; RV64I-NEXT: slli a2, a2, 63
649 ; RV64I-NEXT: srai a2, a2, 63
650 ; RV64I-NEXT: and a1, a2, a1
651 ; RV64I-NEXT: sub a0, a0, a1
654 ; RV64XVENTANACONDOPS-LABEL: sub64:
655 ; RV64XVENTANACONDOPS: # %bb.0:
656 ; RV64XVENTANACONDOPS-NEXT: andi a2, a2, 1
657 ; RV64XVENTANACONDOPS-NEXT: vt.maskc a1, a1, a2
658 ; RV64XVENTANACONDOPS-NEXT: sub a0, a0, a1
659 ; RV64XVENTANACONDOPS-NEXT: ret
661 ; RV64XTHEADCONDMOV-LABEL: sub64:
662 ; RV64XTHEADCONDMOV: # %bb.0:
663 ; RV64XTHEADCONDMOV-NEXT: andi a2, a2, 1
664 ; RV64XTHEADCONDMOV-NEXT: th.mveqz a1, zero, a2
665 ; RV64XTHEADCONDMOV-NEXT: sub a0, a0, a1
666 ; RV64XTHEADCONDMOV-NEXT: ret
668 ; RV32ZICOND-LABEL: sub64:
669 ; RV32ZICOND: # %bb.0:
670 ; RV32ZICOND-NEXT: andi a4, a4, 1
671 ; RV32ZICOND-NEXT: czero.eqz a2, a2, a4
672 ; RV32ZICOND-NEXT: sltu a5, a0, a2
673 ; RV32ZICOND-NEXT: czero.eqz a3, a3, a4
674 ; RV32ZICOND-NEXT: sub a1, a1, a3
675 ; RV32ZICOND-NEXT: sub a1, a1, a5
676 ; RV32ZICOND-NEXT: sub a0, a0, a2
677 ; RV32ZICOND-NEXT: ret
679 ; RV64ZICOND-LABEL: sub64:
680 ; RV64ZICOND: # %bb.0:
681 ; RV64ZICOND-NEXT: andi a2, a2, 1
682 ; RV64ZICOND-NEXT: czero.eqz a1, a1, a2
683 ; RV64ZICOND-NEXT: sub a0, a0, a1
684 ; RV64ZICOND-NEXT: ret
685 %binop = sub i64 %x, %y
686 %select_ = select i1 %c, i64 %binop, i64 %x
690 define i64 @and64(i64 %x, i64 %y, i1 %c) {
691 ; RV32I-LABEL: and64:
693 ; RV32I-NEXT: andi a4, a4, 1
694 ; RV32I-NEXT: beqz a4, .LBB12_2
695 ; RV32I-NEXT: # %bb.1:
696 ; RV32I-NEXT: and a1, a1, a3
697 ; RV32I-NEXT: and a0, a0, a2
698 ; RV32I-NEXT: .LBB12_2:
701 ; RV64I-LABEL: and64:
703 ; RV64I-NEXT: andi a2, a2, 1
704 ; RV64I-NEXT: beqz a2, .LBB12_2
705 ; RV64I-NEXT: # %bb.1:
706 ; RV64I-NEXT: and a0, a0, a1
707 ; RV64I-NEXT: .LBB12_2:
710 ; RV64XVENTANACONDOPS-LABEL: and64:
711 ; RV64XVENTANACONDOPS: # %bb.0:
712 ; RV64XVENTANACONDOPS-NEXT: andi a2, a2, 1
713 ; RV64XVENTANACONDOPS-NEXT: and a1, a0, a1
714 ; RV64XVENTANACONDOPS-NEXT: vt.maskcn a0, a0, a2
715 ; RV64XVENTANACONDOPS-NEXT: or a0, a1, a0
716 ; RV64XVENTANACONDOPS-NEXT: ret
718 ; RV64XTHEADCONDMOV-LABEL: and64:
719 ; RV64XTHEADCONDMOV: # %bb.0:
720 ; RV64XTHEADCONDMOV-NEXT: andi a2, a2, 1
721 ; RV64XTHEADCONDMOV-NEXT: and a1, a0, a1
722 ; RV64XTHEADCONDMOV-NEXT: th.mvnez a0, a1, a2
723 ; RV64XTHEADCONDMOV-NEXT: ret
725 ; RV32ZICOND-LABEL: and64:
726 ; RV32ZICOND: # %bb.0:
727 ; RV32ZICOND-NEXT: andi a4, a4, 1
728 ; RV32ZICOND-NEXT: and a3, a1, a3
729 ; RV32ZICOND-NEXT: and a2, a0, a2
730 ; RV32ZICOND-NEXT: czero.nez a0, a0, a4
731 ; RV32ZICOND-NEXT: or a0, a2, a0
732 ; RV32ZICOND-NEXT: czero.nez a1, a1, a4
733 ; RV32ZICOND-NEXT: or a1, a3, a1
734 ; RV32ZICOND-NEXT: ret
736 ; RV64ZICOND-LABEL: and64:
737 ; RV64ZICOND: # %bb.0:
738 ; RV64ZICOND-NEXT: andi a2, a2, 1
739 ; RV64ZICOND-NEXT: and a1, a0, a1
740 ; RV64ZICOND-NEXT: czero.nez a0, a0, a2
741 ; RV64ZICOND-NEXT: or a0, a1, a0
742 ; RV64ZICOND-NEXT: ret
743 %binop = and i64 %x, %y
744 %select_ = select i1 %c, i64 %binop, i64 %x
749 define i64 @add64(i64 %x, i64 %y, i1 %c) {
750 ; RV32I-LABEL: add64:
752 ; RV32I-NEXT: slli a4, a4, 31
753 ; RV32I-NEXT: srai a4, a4, 31
754 ; RV32I-NEXT: and a3, a4, a3
755 ; RV32I-NEXT: add a1, a1, a3
756 ; RV32I-NEXT: and a2, a4, a2
757 ; RV32I-NEXT: add a2, a0, a2
758 ; RV32I-NEXT: sltu a0, a2, a0
759 ; RV32I-NEXT: add a1, a1, a0
760 ; RV32I-NEXT: mv a0, a2
763 ; RV64I-LABEL: add64:
765 ; RV64I-NEXT: slli a2, a2, 63
766 ; RV64I-NEXT: srai a2, a2, 63
767 ; RV64I-NEXT: and a1, a2, a1
768 ; RV64I-NEXT: add a0, a0, a1
771 ; RV64XVENTANACONDOPS-LABEL: add64:
772 ; RV64XVENTANACONDOPS: # %bb.0:
773 ; RV64XVENTANACONDOPS-NEXT: andi a2, a2, 1
774 ; RV64XVENTANACONDOPS-NEXT: vt.maskc a1, a1, a2
775 ; RV64XVENTANACONDOPS-NEXT: add a0, a0, a1
776 ; RV64XVENTANACONDOPS-NEXT: ret
778 ; RV64XTHEADCONDMOV-LABEL: add64:
779 ; RV64XTHEADCONDMOV: # %bb.0:
780 ; RV64XTHEADCONDMOV-NEXT: andi a2, a2, 1
781 ; RV64XTHEADCONDMOV-NEXT: th.mveqz a1, zero, a2
782 ; RV64XTHEADCONDMOV-NEXT: add a0, a0, a1
783 ; RV64XTHEADCONDMOV-NEXT: ret
785 ; RV32ZICOND-LABEL: add64:
786 ; RV32ZICOND: # %bb.0:
787 ; RV32ZICOND-NEXT: andi a4, a4, 1
788 ; RV32ZICOND-NEXT: czero.eqz a3, a3, a4
789 ; RV32ZICOND-NEXT: add a1, a1, a3
790 ; RV32ZICOND-NEXT: czero.eqz a2, a2, a4
791 ; RV32ZICOND-NEXT: add a2, a0, a2
792 ; RV32ZICOND-NEXT: sltu a0, a2, a0
793 ; RV32ZICOND-NEXT: add a1, a1, a0
794 ; RV32ZICOND-NEXT: mv a0, a2
795 ; RV32ZICOND-NEXT: ret
797 ; RV64ZICOND-LABEL: add64:
798 ; RV64ZICOND: # %bb.0:
799 ; RV64ZICOND-NEXT: andi a2, a2, 1
800 ; RV64ZICOND-NEXT: czero.eqz a1, a1, a2
801 ; RV64ZICOND-NEXT: add a0, a0, a1
802 ; RV64ZICOND-NEXT: ret
803 %binop = add i64 %x, %y
804 %select_ = select i1 %c, i64 %binop, i64 %x
809 define i64 @or64(i64 %x, i64 %y, i1 %c) {
812 ; RV32I-NEXT: slli a4, a4, 31
813 ; RV32I-NEXT: srai a4, a4, 31
814 ; RV32I-NEXT: and a2, a4, a2
815 ; RV32I-NEXT: or a0, a0, a2
816 ; RV32I-NEXT: and a3, a4, a3
817 ; RV32I-NEXT: or a1, a1, a3
822 ; RV64I-NEXT: slli a2, a2, 63
823 ; RV64I-NEXT: srai a2, a2, 63
824 ; RV64I-NEXT: and a1, a2, a1
825 ; RV64I-NEXT: or a0, a0, a1
828 ; RV64XVENTANACONDOPS-LABEL: or64:
829 ; RV64XVENTANACONDOPS: # %bb.0:
830 ; RV64XVENTANACONDOPS-NEXT: andi a2, a2, 1
831 ; RV64XVENTANACONDOPS-NEXT: vt.maskc a1, a1, a2
832 ; RV64XVENTANACONDOPS-NEXT: or a0, a0, a1
833 ; RV64XVENTANACONDOPS-NEXT: ret
835 ; RV64XTHEADCONDMOV-LABEL: or64:
836 ; RV64XTHEADCONDMOV: # %bb.0:
837 ; RV64XTHEADCONDMOV-NEXT: andi a2, a2, 1
838 ; RV64XTHEADCONDMOV-NEXT: th.mveqz a1, zero, a2
839 ; RV64XTHEADCONDMOV-NEXT: or a0, a0, a1
840 ; RV64XTHEADCONDMOV-NEXT: ret
842 ; RV32ZICOND-LABEL: or64:
843 ; RV32ZICOND: # %bb.0:
844 ; RV32ZICOND-NEXT: andi a4, a4, 1
845 ; RV32ZICOND-NEXT: czero.eqz a2, a2, a4
846 ; RV32ZICOND-NEXT: or a0, a0, a2
847 ; RV32ZICOND-NEXT: czero.eqz a2, a3, a4
848 ; RV32ZICOND-NEXT: or a1, a1, a2
849 ; RV32ZICOND-NEXT: ret
851 ; RV64ZICOND-LABEL: or64:
852 ; RV64ZICOND: # %bb.0:
853 ; RV64ZICOND-NEXT: andi a2, a2, 1
854 ; RV64ZICOND-NEXT: czero.eqz a1, a1, a2
855 ; RV64ZICOND-NEXT: or a0, a0, a1
856 ; RV64ZICOND-NEXT: ret
857 %binop = or i64 %x, %y
858 %select_ = select i1 %c, i64 %binop, i64 %x
862 define i64 @xor64(i64 %x, i64 %y, i1 %c) {
863 ; RV32I-LABEL: xor64:
865 ; RV32I-NEXT: slli a4, a4, 31
866 ; RV32I-NEXT: srai a4, a4, 31
867 ; RV32I-NEXT: and a2, a4, a2
868 ; RV32I-NEXT: xor a0, a0, a2
869 ; RV32I-NEXT: and a3, a4, a3
870 ; RV32I-NEXT: xor a1, a1, a3
873 ; RV64I-LABEL: xor64:
875 ; RV64I-NEXT: slli a2, a2, 63
876 ; RV64I-NEXT: srai a2, a2, 63
877 ; RV64I-NEXT: and a1, a2, a1
878 ; RV64I-NEXT: xor a0, a0, a1
881 ; RV64XVENTANACONDOPS-LABEL: xor64:
882 ; RV64XVENTANACONDOPS: # %bb.0:
883 ; RV64XVENTANACONDOPS-NEXT: andi a2, a2, 1
884 ; RV64XVENTANACONDOPS-NEXT: vt.maskc a1, a1, a2
885 ; RV64XVENTANACONDOPS-NEXT: xor a0, a0, a1
886 ; RV64XVENTANACONDOPS-NEXT: ret
888 ; RV64XTHEADCONDMOV-LABEL: xor64:
889 ; RV64XTHEADCONDMOV: # %bb.0:
890 ; RV64XTHEADCONDMOV-NEXT: andi a2, a2, 1
891 ; RV64XTHEADCONDMOV-NEXT: th.mveqz a1, zero, a2
892 ; RV64XTHEADCONDMOV-NEXT: xor a0, a0, a1
893 ; RV64XTHEADCONDMOV-NEXT: ret
895 ; RV32ZICOND-LABEL: xor64:
896 ; RV32ZICOND: # %bb.0:
897 ; RV32ZICOND-NEXT: andi a4, a4, 1
898 ; RV32ZICOND-NEXT: czero.eqz a2, a2, a4
899 ; RV32ZICOND-NEXT: xor a0, a0, a2
900 ; RV32ZICOND-NEXT: czero.eqz a2, a3, a4
901 ; RV32ZICOND-NEXT: xor a1, a1, a2
902 ; RV32ZICOND-NEXT: ret
904 ; RV64ZICOND-LABEL: xor64:
905 ; RV64ZICOND: # %bb.0:
906 ; RV64ZICOND-NEXT: andi a2, a2, 1
907 ; RV64ZICOND-NEXT: czero.eqz a1, a1, a2
908 ; RV64ZICOND-NEXT: xor a0, a0, a1
909 ; RV64ZICOND-NEXT: ret
910 %binop = xor i64 %x, %y
911 %select_ = select i1 %c, i64 %binop, i64 %x