1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc -mtriple=riscv32 -verify-machineinstrs < %s \
3 ; RUN: | FileCheck %s --check-prefix=RV32I
4 ; RUN: llc -mtriple=riscv32 -mattr=+zbb -verify-machineinstrs < %s \
5 ; RUN: | FileCheck %s --check-prefix=RV32ZBB
6 ; RUN: llc -mtriple=riscv64 -verify-machineinstrs < %s \
7 ; RUN: | FileCheck %s --check-prefix=RV64I
8 ; RUN: llc -mtriple=riscv64 -mattr=+zbb -verify-machineinstrs < %s \
9 ; RUN: | FileCheck %s --check-prefix=RV64ZBB
11 declare i32 @llvm.abs.i32(i32, i1 immarg)
12 declare i64 @llvm.abs.i64(i64, i1 immarg)
14 define i32 @neg_abs32(i32 %x) {
15 ; RV32I-LABEL: neg_abs32:
17 ; RV32I-NEXT: srai a1, a0, 31
18 ; RV32I-NEXT: xor a0, a0, a1
19 ; RV32I-NEXT: sub a0, a1, a0
22 ; RV32ZBB-LABEL: neg_abs32:
24 ; RV32ZBB-NEXT: neg a1, a0
25 ; RV32ZBB-NEXT: min a0, a0, a1
28 ; RV64I-LABEL: neg_abs32:
30 ; RV64I-NEXT: sraiw a1, a0, 31
31 ; RV64I-NEXT: xor a0, a0, a1
32 ; RV64I-NEXT: subw a0, a1, a0
35 ; RV64ZBB-LABEL: neg_abs32:
37 ; RV64ZBB-NEXT: sraiw a1, a0, 31
38 ; RV64ZBB-NEXT: xor a0, a0, a1
39 ; RV64ZBB-NEXT: subw a0, a1, a0
41 %abs = tail call i32 @llvm.abs.i32(i32 %x, i1 true)
42 %neg = sub nsw i32 0, %abs
46 define i32 @select_neg_abs32(i32 %x) {
47 ; RV32I-LABEL: select_neg_abs32:
49 ; RV32I-NEXT: srai a1, a0, 31
50 ; RV32I-NEXT: xor a0, a0, a1
51 ; RV32I-NEXT: sub a0, a1, a0
54 ; RV32ZBB-LABEL: select_neg_abs32:
56 ; RV32ZBB-NEXT: neg a1, a0
57 ; RV32ZBB-NEXT: min a0, a0, a1
60 ; RV64I-LABEL: select_neg_abs32:
62 ; RV64I-NEXT: sraiw a1, a0, 31
63 ; RV64I-NEXT: xor a0, a0, a1
64 ; RV64I-NEXT: subw a0, a1, a0
67 ; RV64ZBB-LABEL: select_neg_abs32:
69 ; RV64ZBB-NEXT: sraiw a1, a0, 31
70 ; RV64ZBB-NEXT: xor a0, a0, a1
71 ; RV64ZBB-NEXT: subw a0, a1, a0
73 %1 = icmp slt i32 %x, 0
74 %2 = sub nsw i32 0, %x
75 %3 = select i1 %1, i32 %x, i32 %2
79 define i64 @neg_abs64(i64 %x) {
80 ; RV32I-LABEL: neg_abs64:
82 ; RV32I-NEXT: srai a2, a1, 31
83 ; RV32I-NEXT: xor a0, a0, a2
84 ; RV32I-NEXT: xor a1, a1, a2
85 ; RV32I-NEXT: sltu a3, a2, a0
86 ; RV32I-NEXT: sub a1, a2, a1
87 ; RV32I-NEXT: sub a1, a1, a3
88 ; RV32I-NEXT: sub a0, a2, a0
91 ; RV32ZBB-LABEL: neg_abs64:
93 ; RV32ZBB-NEXT: srai a2, a1, 31
94 ; RV32ZBB-NEXT: xor a0, a0, a2
95 ; RV32ZBB-NEXT: xor a1, a1, a2
96 ; RV32ZBB-NEXT: sltu a3, a2, a0
97 ; RV32ZBB-NEXT: sub a1, a2, a1
98 ; RV32ZBB-NEXT: sub a1, a1, a3
99 ; RV32ZBB-NEXT: sub a0, a2, a0
102 ; RV64I-LABEL: neg_abs64:
104 ; RV64I-NEXT: srai a1, a0, 63
105 ; RV64I-NEXT: xor a0, a0, a1
106 ; RV64I-NEXT: sub a0, a1, a0
109 ; RV64ZBB-LABEL: neg_abs64:
111 ; RV64ZBB-NEXT: neg a1, a0
112 ; RV64ZBB-NEXT: min a0, a0, a1
114 %abs = tail call i64 @llvm.abs.i64(i64 %x, i1 true)
115 %neg = sub nsw i64 0, %abs
119 define i64 @select_neg_abs64(i64 %x) {
120 ; RV32I-LABEL: select_neg_abs64:
122 ; RV32I-NEXT: srai a2, a1, 31
123 ; RV32I-NEXT: xor a0, a0, a2
124 ; RV32I-NEXT: xor a1, a1, a2
125 ; RV32I-NEXT: sltu a3, a2, a0
126 ; RV32I-NEXT: sub a1, a2, a1
127 ; RV32I-NEXT: sub a1, a1, a3
128 ; RV32I-NEXT: sub a0, a2, a0
131 ; RV32ZBB-LABEL: select_neg_abs64:
133 ; RV32ZBB-NEXT: srai a2, a1, 31
134 ; RV32ZBB-NEXT: xor a0, a0, a2
135 ; RV32ZBB-NEXT: xor a1, a1, a2
136 ; RV32ZBB-NEXT: sltu a3, a2, a0
137 ; RV32ZBB-NEXT: sub a1, a2, a1
138 ; RV32ZBB-NEXT: sub a1, a1, a3
139 ; RV32ZBB-NEXT: sub a0, a2, a0
142 ; RV64I-LABEL: select_neg_abs64:
144 ; RV64I-NEXT: srai a1, a0, 63
145 ; RV64I-NEXT: xor a0, a0, a1
146 ; RV64I-NEXT: sub a0, a1, a0
149 ; RV64ZBB-LABEL: select_neg_abs64:
151 ; RV64ZBB-NEXT: neg a1, a0
152 ; RV64ZBB-NEXT: min a0, a0, a1
154 %1 = icmp slt i64 %x, 0
155 %2 = sub nsw i64 0, %x
156 %3 = select i1 %1, i64 %x, i64 %2
160 define i32 @neg_abs32_multiuse(i32 %x, ptr %y) {
161 ; RV32I-LABEL: neg_abs32_multiuse:
163 ; RV32I-NEXT: srai a2, a0, 31
164 ; RV32I-NEXT: xor a0, a0, a2
165 ; RV32I-NEXT: sub a2, a0, a2
166 ; RV32I-NEXT: neg a0, a2
167 ; RV32I-NEXT: sw a2, 0(a1)
170 ; RV32ZBB-LABEL: neg_abs32_multiuse:
172 ; RV32ZBB-NEXT: neg a2, a0
173 ; RV32ZBB-NEXT: max a2, a0, a2
174 ; RV32ZBB-NEXT: neg a0, a2
175 ; RV32ZBB-NEXT: sw a2, 0(a1)
178 ; RV64I-LABEL: neg_abs32_multiuse:
180 ; RV64I-NEXT: sraiw a2, a0, 31
181 ; RV64I-NEXT: xor a0, a0, a2
182 ; RV64I-NEXT: subw a2, a0, a2
183 ; RV64I-NEXT: negw a0, a2
184 ; RV64I-NEXT: sw a2, 0(a1)
187 ; RV64ZBB-LABEL: neg_abs32_multiuse:
189 ; RV64ZBB-NEXT: sext.w a0, a0
190 ; RV64ZBB-NEXT: negw a2, a0
191 ; RV64ZBB-NEXT: max a2, a0, a2
192 ; RV64ZBB-NEXT: negw a0, a2
193 ; RV64ZBB-NEXT: sw a2, 0(a1)
195 %abs = tail call i32 @llvm.abs.i32(i32 %x, i1 true)
196 store i32 %abs, ptr %y
197 %neg = sub nsw i32 0, %abs
201 define i64 @neg_abs64_multiuse(i64 %x, ptr %y) {
202 ; RV32I-LABEL: neg_abs64_multiuse:
204 ; RV32I-NEXT: bgez a1, .LBB5_2
205 ; RV32I-NEXT: # %bb.1:
206 ; RV32I-NEXT: snez a3, a0
207 ; RV32I-NEXT: neg a1, a1
208 ; RV32I-NEXT: sub a1, a1, a3
209 ; RV32I-NEXT: neg a0, a0
210 ; RV32I-NEXT: .LBB5_2:
211 ; RV32I-NEXT: snez a3, a0
212 ; RV32I-NEXT: neg a4, a1
213 ; RV32I-NEXT: sub a3, a4, a3
214 ; RV32I-NEXT: neg a4, a0
215 ; RV32I-NEXT: sw a0, 0(a2)
216 ; RV32I-NEXT: sw a1, 4(a2)
217 ; RV32I-NEXT: mv a0, a4
218 ; RV32I-NEXT: mv a1, a3
221 ; RV32ZBB-LABEL: neg_abs64_multiuse:
223 ; RV32ZBB-NEXT: bgez a1, .LBB5_2
224 ; RV32ZBB-NEXT: # %bb.1:
225 ; RV32ZBB-NEXT: snez a3, a0
226 ; RV32ZBB-NEXT: neg a1, a1
227 ; RV32ZBB-NEXT: sub a1, a1, a3
228 ; RV32ZBB-NEXT: neg a0, a0
229 ; RV32ZBB-NEXT: .LBB5_2:
230 ; RV32ZBB-NEXT: snez a3, a0
231 ; RV32ZBB-NEXT: neg a4, a1
232 ; RV32ZBB-NEXT: sub a3, a4, a3
233 ; RV32ZBB-NEXT: neg a4, a0
234 ; RV32ZBB-NEXT: sw a0, 0(a2)
235 ; RV32ZBB-NEXT: sw a1, 4(a2)
236 ; RV32ZBB-NEXT: mv a0, a4
237 ; RV32ZBB-NEXT: mv a1, a3
240 ; RV64I-LABEL: neg_abs64_multiuse:
242 ; RV64I-NEXT: srai a2, a0, 63
243 ; RV64I-NEXT: xor a0, a0, a2
244 ; RV64I-NEXT: sub a2, a0, a2
245 ; RV64I-NEXT: neg a0, a2
246 ; RV64I-NEXT: sd a2, 0(a1)
249 ; RV64ZBB-LABEL: neg_abs64_multiuse:
251 ; RV64ZBB-NEXT: neg a2, a0
252 ; RV64ZBB-NEXT: max a2, a0, a2
253 ; RV64ZBB-NEXT: neg a0, a2
254 ; RV64ZBB-NEXT: sd a2, 0(a1)
256 %abs = tail call i64 @llvm.abs.i64(i64 %x, i1 true)
257 store i64 %abs, ptr %y
258 %neg = sub nsw i64 0, %abs
262 define i32 @expanded_neg_abs32(i32 %x) {
263 ; RV32I-LABEL: expanded_neg_abs32:
265 ; RV32I-NEXT: neg a1, a0
266 ; RV32I-NEXT: blt a0, a1, .LBB6_2
267 ; RV32I-NEXT: # %bb.1:
268 ; RV32I-NEXT: mv a1, a0
269 ; RV32I-NEXT: .LBB6_2:
270 ; RV32I-NEXT: neg a0, a1
273 ; RV32ZBB-LABEL: expanded_neg_abs32:
275 ; RV32ZBB-NEXT: neg a1, a0
276 ; RV32ZBB-NEXT: min a0, a0, a1
279 ; RV64I-LABEL: expanded_neg_abs32:
281 ; RV64I-NEXT: sext.w a1, a0
282 ; RV64I-NEXT: negw a0, a0
283 ; RV64I-NEXT: blt a1, a0, .LBB6_2
284 ; RV64I-NEXT: # %bb.1:
285 ; RV64I-NEXT: mv a0, a1
286 ; RV64I-NEXT: .LBB6_2:
287 ; RV64I-NEXT: negw a0, a0
290 ; RV64ZBB-LABEL: expanded_neg_abs32:
292 ; RV64ZBB-NEXT: sext.w a1, a0
293 ; RV64ZBB-NEXT: negw a0, a0
294 ; RV64ZBB-NEXT: max a0, a0, a1
295 ; RV64ZBB-NEXT: negw a0, a0
298 %t = call i32 @llvm.smax.i32(i32 %n, i32 %x)
303 define i32 @expanded_neg_abs32_unsigned(i32 %x) {
304 ; RV32I-LABEL: expanded_neg_abs32_unsigned:
306 ; RV32I-NEXT: neg a1, a0
307 ; RV32I-NEXT: bltu a0, a1, .LBB7_2
308 ; RV32I-NEXT: # %bb.1:
309 ; RV32I-NEXT: mv a1, a0
310 ; RV32I-NEXT: .LBB7_2:
311 ; RV32I-NEXT: neg a0, a1
314 ; RV32ZBB-LABEL: expanded_neg_abs32_unsigned:
316 ; RV32ZBB-NEXT: neg a1, a0
317 ; RV32ZBB-NEXT: minu a0, a0, a1
320 ; RV64I-LABEL: expanded_neg_abs32_unsigned:
322 ; RV64I-NEXT: sext.w a1, a0
323 ; RV64I-NEXT: negw a0, a0
324 ; RV64I-NEXT: bltu a1, a0, .LBB7_2
325 ; RV64I-NEXT: # %bb.1:
326 ; RV64I-NEXT: mv a0, a1
327 ; RV64I-NEXT: .LBB7_2:
328 ; RV64I-NEXT: negw a0, a0
331 ; RV64ZBB-LABEL: expanded_neg_abs32_unsigned:
333 ; RV64ZBB-NEXT: sext.w a1, a0
334 ; RV64ZBB-NEXT: negw a0, a0
335 ; RV64ZBB-NEXT: maxu a0, a0, a1
336 ; RV64ZBB-NEXT: negw a0, a0
339 %t = call i32 @llvm.umax.i32(i32 %n, i32 %x)
344 define i64 @expanded_neg_abs64(i64 %x) {
345 ; RV32I-LABEL: expanded_neg_abs64:
347 ; RV32I-NEXT: snez a2, a0
348 ; RV32I-NEXT: neg a3, a1
349 ; RV32I-NEXT: sub a2, a3, a2
350 ; RV32I-NEXT: neg a3, a0
351 ; RV32I-NEXT: beq a2, a1, .LBB8_2
352 ; RV32I-NEXT: # %bb.1:
353 ; RV32I-NEXT: slt a4, a1, a2
354 ; RV32I-NEXT: beqz a4, .LBB8_3
355 ; RV32I-NEXT: j .LBB8_4
356 ; RV32I-NEXT: .LBB8_2:
357 ; RV32I-NEXT: sltu a4, a0, a3
358 ; RV32I-NEXT: bnez a4, .LBB8_4
359 ; RV32I-NEXT: .LBB8_3:
360 ; RV32I-NEXT: mv a2, a1
361 ; RV32I-NEXT: mv a3, a0
362 ; RV32I-NEXT: .LBB8_4:
363 ; RV32I-NEXT: snez a0, a3
364 ; RV32I-NEXT: add a0, a2, a0
365 ; RV32I-NEXT: neg a1, a0
366 ; RV32I-NEXT: neg a0, a3
369 ; RV32ZBB-LABEL: expanded_neg_abs64:
371 ; RV32ZBB-NEXT: snez a2, a0
372 ; RV32ZBB-NEXT: neg a3, a1
373 ; RV32ZBB-NEXT: sub a2, a3, a2
374 ; RV32ZBB-NEXT: neg a3, a0
375 ; RV32ZBB-NEXT: beq a2, a1, .LBB8_2
376 ; RV32ZBB-NEXT: # %bb.1:
377 ; RV32ZBB-NEXT: slt a4, a1, a2
378 ; RV32ZBB-NEXT: beqz a4, .LBB8_3
379 ; RV32ZBB-NEXT: j .LBB8_4
380 ; RV32ZBB-NEXT: .LBB8_2:
381 ; RV32ZBB-NEXT: sltu a4, a0, a3
382 ; RV32ZBB-NEXT: bnez a4, .LBB8_4
383 ; RV32ZBB-NEXT: .LBB8_3:
384 ; RV32ZBB-NEXT: mv a2, a1
385 ; RV32ZBB-NEXT: mv a3, a0
386 ; RV32ZBB-NEXT: .LBB8_4:
387 ; RV32ZBB-NEXT: snez a0, a3
388 ; RV32ZBB-NEXT: add a0, a2, a0
389 ; RV32ZBB-NEXT: neg a1, a0
390 ; RV32ZBB-NEXT: neg a0, a3
393 ; RV64I-LABEL: expanded_neg_abs64:
395 ; RV64I-NEXT: neg a1, a0
396 ; RV64I-NEXT: blt a0, a1, .LBB8_2
397 ; RV64I-NEXT: # %bb.1:
398 ; RV64I-NEXT: mv a1, a0
399 ; RV64I-NEXT: .LBB8_2:
400 ; RV64I-NEXT: neg a0, a1
403 ; RV64ZBB-LABEL: expanded_neg_abs64:
405 ; RV64ZBB-NEXT: neg a1, a0
406 ; RV64ZBB-NEXT: min a0, a0, a1
409 %t = call i64 @llvm.smax.i64(i64 %n, i64 %x)
414 define i64 @expanded_neg_abs64_unsigned(i64 %x) {
415 ; RV32I-LABEL: expanded_neg_abs64_unsigned:
417 ; RV32I-NEXT: snez a2, a0
418 ; RV32I-NEXT: neg a3, a1
419 ; RV32I-NEXT: sub a2, a3, a2
420 ; RV32I-NEXT: neg a3, a0
421 ; RV32I-NEXT: beq a2, a1, .LBB9_2
422 ; RV32I-NEXT: # %bb.1:
423 ; RV32I-NEXT: sltu a4, a1, a2
424 ; RV32I-NEXT: beqz a4, .LBB9_3
425 ; RV32I-NEXT: j .LBB9_4
426 ; RV32I-NEXT: .LBB9_2:
427 ; RV32I-NEXT: sltu a4, a0, a3
428 ; RV32I-NEXT: bnez a4, .LBB9_4
429 ; RV32I-NEXT: .LBB9_3:
430 ; RV32I-NEXT: mv a2, a1
431 ; RV32I-NEXT: mv a3, a0
432 ; RV32I-NEXT: .LBB9_4:
433 ; RV32I-NEXT: snez a0, a3
434 ; RV32I-NEXT: add a0, a2, a0
435 ; RV32I-NEXT: neg a1, a0
436 ; RV32I-NEXT: neg a0, a3
439 ; RV32ZBB-LABEL: expanded_neg_abs64_unsigned:
441 ; RV32ZBB-NEXT: snez a2, a0
442 ; RV32ZBB-NEXT: neg a3, a1
443 ; RV32ZBB-NEXT: sub a2, a3, a2
444 ; RV32ZBB-NEXT: neg a3, a0
445 ; RV32ZBB-NEXT: beq a2, a1, .LBB9_2
446 ; RV32ZBB-NEXT: # %bb.1:
447 ; RV32ZBB-NEXT: sltu a4, a1, a2
448 ; RV32ZBB-NEXT: beqz a4, .LBB9_3
449 ; RV32ZBB-NEXT: j .LBB9_4
450 ; RV32ZBB-NEXT: .LBB9_2:
451 ; RV32ZBB-NEXT: sltu a4, a0, a3
452 ; RV32ZBB-NEXT: bnez a4, .LBB9_4
453 ; RV32ZBB-NEXT: .LBB9_3:
454 ; RV32ZBB-NEXT: mv a2, a1
455 ; RV32ZBB-NEXT: mv a3, a0
456 ; RV32ZBB-NEXT: .LBB9_4:
457 ; RV32ZBB-NEXT: snez a0, a3
458 ; RV32ZBB-NEXT: add a0, a2, a0
459 ; RV32ZBB-NEXT: neg a1, a0
460 ; RV32ZBB-NEXT: neg a0, a3
463 ; RV64I-LABEL: expanded_neg_abs64_unsigned:
465 ; RV64I-NEXT: neg a1, a0
466 ; RV64I-NEXT: bltu a0, a1, .LBB9_2
467 ; RV64I-NEXT: # %bb.1:
468 ; RV64I-NEXT: mv a1, a0
469 ; RV64I-NEXT: .LBB9_2:
470 ; RV64I-NEXT: neg a0, a1
473 ; RV64ZBB-LABEL: expanded_neg_abs64_unsigned:
475 ; RV64ZBB-NEXT: neg a1, a0
476 ; RV64ZBB-NEXT: minu a0, a0, a1
479 %t = call i64 @llvm.umax.i64(i64 %n, i64 %x)
484 define i32 @expanded_neg_inv_abs32(i32 %x) {
485 ; RV32I-LABEL: expanded_neg_inv_abs32:
487 ; RV32I-NEXT: neg a1, a0
488 ; RV32I-NEXT: blt a1, a0, .LBB10_2
489 ; RV32I-NEXT: # %bb.1:
490 ; RV32I-NEXT: mv a1, a0
491 ; RV32I-NEXT: .LBB10_2:
492 ; RV32I-NEXT: neg a0, a1
495 ; RV32ZBB-LABEL: expanded_neg_inv_abs32:
497 ; RV32ZBB-NEXT: neg a1, a0
498 ; RV32ZBB-NEXT: max a0, a0, a1
501 ; RV64I-LABEL: expanded_neg_inv_abs32:
503 ; RV64I-NEXT: sext.w a1, a0
504 ; RV64I-NEXT: negw a0, a0
505 ; RV64I-NEXT: blt a0, a1, .LBB10_2
506 ; RV64I-NEXT: # %bb.1:
507 ; RV64I-NEXT: mv a0, a1
508 ; RV64I-NEXT: .LBB10_2:
509 ; RV64I-NEXT: negw a0, a0
512 ; RV64ZBB-LABEL: expanded_neg_inv_abs32:
514 ; RV64ZBB-NEXT: sext.w a1, a0
515 ; RV64ZBB-NEXT: negw a0, a0
516 ; RV64ZBB-NEXT: min a0, a0, a1
517 ; RV64ZBB-NEXT: negw a0, a0
520 %t = call i32 @llvm.smin.i32(i32 %n, i32 %x)
525 define i32 @expanded_neg_inv_abs32_unsigned(i32 %x) {
526 ; RV32I-LABEL: expanded_neg_inv_abs32_unsigned:
528 ; RV32I-NEXT: neg a1, a0
529 ; RV32I-NEXT: bltu a1, a0, .LBB11_2
530 ; RV32I-NEXT: # %bb.1:
531 ; RV32I-NEXT: mv a1, a0
532 ; RV32I-NEXT: .LBB11_2:
533 ; RV32I-NEXT: neg a0, a1
536 ; RV32ZBB-LABEL: expanded_neg_inv_abs32_unsigned:
538 ; RV32ZBB-NEXT: neg a1, a0
539 ; RV32ZBB-NEXT: maxu a0, a0, a1
542 ; RV64I-LABEL: expanded_neg_inv_abs32_unsigned:
544 ; RV64I-NEXT: sext.w a1, a0
545 ; RV64I-NEXT: negw a0, a0
546 ; RV64I-NEXT: bltu a0, a1, .LBB11_2
547 ; RV64I-NEXT: # %bb.1:
548 ; RV64I-NEXT: mv a0, a1
549 ; RV64I-NEXT: .LBB11_2:
550 ; RV64I-NEXT: negw a0, a0
553 ; RV64ZBB-LABEL: expanded_neg_inv_abs32_unsigned:
555 ; RV64ZBB-NEXT: sext.w a1, a0
556 ; RV64ZBB-NEXT: negw a0, a0
557 ; RV64ZBB-NEXT: minu a0, a0, a1
558 ; RV64ZBB-NEXT: negw a0, a0
561 %t = call i32 @llvm.umin.i32(i32 %n, i32 %x)
566 define i64 @expanded_neg_inv_abs64(i64 %x) {
567 ; RV32I-LABEL: expanded_neg_inv_abs64:
569 ; RV32I-NEXT: snez a2, a0
570 ; RV32I-NEXT: neg a3, a1
571 ; RV32I-NEXT: sub a2, a3, a2
572 ; RV32I-NEXT: neg a3, a0
573 ; RV32I-NEXT: beq a2, a1, .LBB12_2
574 ; RV32I-NEXT: # %bb.1:
575 ; RV32I-NEXT: slt a4, a2, a1
576 ; RV32I-NEXT: beqz a4, .LBB12_3
577 ; RV32I-NEXT: j .LBB12_4
578 ; RV32I-NEXT: .LBB12_2:
579 ; RV32I-NEXT: sltu a4, a3, a0
580 ; RV32I-NEXT: bnez a4, .LBB12_4
581 ; RV32I-NEXT: .LBB12_3:
582 ; RV32I-NEXT: mv a2, a1
583 ; RV32I-NEXT: mv a3, a0
584 ; RV32I-NEXT: .LBB12_4:
585 ; RV32I-NEXT: snez a0, a3
586 ; RV32I-NEXT: add a0, a2, a0
587 ; RV32I-NEXT: neg a1, a0
588 ; RV32I-NEXT: neg a0, a3
591 ; RV32ZBB-LABEL: expanded_neg_inv_abs64:
593 ; RV32ZBB-NEXT: snez a2, a0
594 ; RV32ZBB-NEXT: neg a3, a1
595 ; RV32ZBB-NEXT: sub a2, a3, a2
596 ; RV32ZBB-NEXT: neg a3, a0
597 ; RV32ZBB-NEXT: beq a2, a1, .LBB12_2
598 ; RV32ZBB-NEXT: # %bb.1:
599 ; RV32ZBB-NEXT: slt a4, a2, a1
600 ; RV32ZBB-NEXT: beqz a4, .LBB12_3
601 ; RV32ZBB-NEXT: j .LBB12_4
602 ; RV32ZBB-NEXT: .LBB12_2:
603 ; RV32ZBB-NEXT: sltu a4, a3, a0
604 ; RV32ZBB-NEXT: bnez a4, .LBB12_4
605 ; RV32ZBB-NEXT: .LBB12_3:
606 ; RV32ZBB-NEXT: mv a2, a1
607 ; RV32ZBB-NEXT: mv a3, a0
608 ; RV32ZBB-NEXT: .LBB12_4:
609 ; RV32ZBB-NEXT: snez a0, a3
610 ; RV32ZBB-NEXT: add a0, a2, a0
611 ; RV32ZBB-NEXT: neg a1, a0
612 ; RV32ZBB-NEXT: neg a0, a3
615 ; RV64I-LABEL: expanded_neg_inv_abs64:
617 ; RV64I-NEXT: neg a1, a0
618 ; RV64I-NEXT: blt a1, a0, .LBB12_2
619 ; RV64I-NEXT: # %bb.1:
620 ; RV64I-NEXT: mv a1, a0
621 ; RV64I-NEXT: .LBB12_2:
622 ; RV64I-NEXT: neg a0, a1
625 ; RV64ZBB-LABEL: expanded_neg_inv_abs64:
627 ; RV64ZBB-NEXT: neg a1, a0
628 ; RV64ZBB-NEXT: max a0, a0, a1
631 %t = call i64 @llvm.smin.i64(i64 %n, i64 %x)
636 define i64 @expanded_neg_inv_abs64_unsigned(i64 %x) {
637 ; RV32I-LABEL: expanded_neg_inv_abs64_unsigned:
639 ; RV32I-NEXT: snez a2, a0
640 ; RV32I-NEXT: neg a3, a1
641 ; RV32I-NEXT: sub a2, a3, a2
642 ; RV32I-NEXT: neg a3, a0
643 ; RV32I-NEXT: beq a2, a1, .LBB13_2
644 ; RV32I-NEXT: # %bb.1:
645 ; RV32I-NEXT: sltu a4, a2, a1
646 ; RV32I-NEXT: beqz a4, .LBB13_3
647 ; RV32I-NEXT: j .LBB13_4
648 ; RV32I-NEXT: .LBB13_2:
649 ; RV32I-NEXT: sltu a4, a3, a0
650 ; RV32I-NEXT: bnez a4, .LBB13_4
651 ; RV32I-NEXT: .LBB13_3:
652 ; RV32I-NEXT: mv a2, a1
653 ; RV32I-NEXT: mv a3, a0
654 ; RV32I-NEXT: .LBB13_4:
655 ; RV32I-NEXT: snez a0, a3
656 ; RV32I-NEXT: add a0, a2, a0
657 ; RV32I-NEXT: neg a1, a0
658 ; RV32I-NEXT: neg a0, a3
661 ; RV32ZBB-LABEL: expanded_neg_inv_abs64_unsigned:
663 ; RV32ZBB-NEXT: snez a2, a0
664 ; RV32ZBB-NEXT: neg a3, a1
665 ; RV32ZBB-NEXT: sub a2, a3, a2
666 ; RV32ZBB-NEXT: neg a3, a0
667 ; RV32ZBB-NEXT: beq a2, a1, .LBB13_2
668 ; RV32ZBB-NEXT: # %bb.1:
669 ; RV32ZBB-NEXT: sltu a4, a2, a1
670 ; RV32ZBB-NEXT: beqz a4, .LBB13_3
671 ; RV32ZBB-NEXT: j .LBB13_4
672 ; RV32ZBB-NEXT: .LBB13_2:
673 ; RV32ZBB-NEXT: sltu a4, a3, a0
674 ; RV32ZBB-NEXT: bnez a4, .LBB13_4
675 ; RV32ZBB-NEXT: .LBB13_3:
676 ; RV32ZBB-NEXT: mv a2, a1
677 ; RV32ZBB-NEXT: mv a3, a0
678 ; RV32ZBB-NEXT: .LBB13_4:
679 ; RV32ZBB-NEXT: snez a0, a3
680 ; RV32ZBB-NEXT: add a0, a2, a0
681 ; RV32ZBB-NEXT: neg a1, a0
682 ; RV32ZBB-NEXT: neg a0, a3
685 ; RV64I-LABEL: expanded_neg_inv_abs64_unsigned:
687 ; RV64I-NEXT: neg a1, a0
688 ; RV64I-NEXT: bltu a1, a0, .LBB13_2
689 ; RV64I-NEXT: # %bb.1:
690 ; RV64I-NEXT: mv a1, a0
691 ; RV64I-NEXT: .LBB13_2:
692 ; RV64I-NEXT: neg a0, a1
695 ; RV64ZBB-LABEL: expanded_neg_inv_abs64_unsigned:
697 ; RV64ZBB-NEXT: neg a1, a0
698 ; RV64ZBB-NEXT: maxu a0, a0, a1
701 %t = call i64 @llvm.umin.i64(i64 %n, i64 %x)