1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc --mtriple=loongarch32 -mattr=+d < %s | FileCheck %s --check-prefix=LA32
3 ; RUN: llc --mtriple=loongarch64 -mattr=+d < %s | FileCheck %s --check-prefix=LA64
5 define signext i32 @rotl_32(i32 signext %x, i32 signext %y) nounwind {
8 ; LA32-NEXT: ori $a2, $zero, 32
9 ; LA32-NEXT: sub.w $a1, $a2, $a1
10 ; LA32-NEXT: rotr.w $a0, $a0, $a1
13 ; LA64-LABEL: rotl_32:
15 ; LA64-NEXT: ori $a2, $zero, 32
16 ; LA64-NEXT: sub.d $a1, $a2, $a1
17 ; LA64-NEXT: rotr.w $a0, $a0, $a1
26 define signext i32 @rotr_32(i32 signext %x, i32 signext %y) nounwind {
27 ; LA32-LABEL: rotr_32:
29 ; LA32-NEXT: rotr.w $a0, $a0, $a1
32 ; LA64-LABEL: rotr_32:
34 ; LA64-NEXT: rotr.w $a0, $a0, $a1
43 define i64 @rotl_64(i64 %x, i64 %y) nounwind {
44 ; LA32-LABEL: rotl_64:
46 ; LA32-NEXT: sll.w $a3, $a1, $a2
47 ; LA32-NEXT: xori $a4, $a2, 31
48 ; LA32-NEXT: srli.w $a5, $a0, 1
49 ; LA32-NEXT: srl.w $a4, $a5, $a4
50 ; LA32-NEXT: or $a3, $a3, $a4
51 ; LA32-NEXT: addi.w $a4, $a2, -32
52 ; LA32-NEXT: slti $a5, $a4, 0
53 ; LA32-NEXT: maskeqz $a3, $a3, $a5
54 ; LA32-NEXT: sll.w $a6, $a0, $a4
55 ; LA32-NEXT: masknez $a5, $a6, $a5
56 ; LA32-NEXT: or $a3, $a3, $a5
57 ; LA32-NEXT: sll.w $a5, $a0, $a2
58 ; LA32-NEXT: srai.w $a4, $a4, 31
59 ; LA32-NEXT: and $a4, $a4, $a5
60 ; LA32-NEXT: sub.w $a5, $zero, $a2
61 ; LA32-NEXT: srl.w $a6, $a1, $a5
62 ; LA32-NEXT: ori $a7, $zero, 32
63 ; LA32-NEXT: sub.w $a7, $a7, $a2
64 ; LA32-NEXT: slti $t0, $a7, 0
65 ; LA32-NEXT: masknez $t1, $a6, $t0
66 ; LA32-NEXT: srl.w $a0, $a0, $a5
67 ; LA32-NEXT: ori $a5, $zero, 64
68 ; LA32-NEXT: sub.w $a2, $a5, $a2
69 ; LA32-NEXT: xori $a2, $a2, 31
70 ; LA32-NEXT: slli.w $a1, $a1, 1
71 ; LA32-NEXT: sll.w $a1, $a1, $a2
72 ; LA32-NEXT: or $a0, $a0, $a1
73 ; LA32-NEXT: maskeqz $a0, $a0, $t0
74 ; LA32-NEXT: or $a0, $a0, $t1
75 ; LA32-NEXT: srai.w $a1, $a7, 31
76 ; LA32-NEXT: and $a1, $a1, $a6
77 ; LA32-NEXT: or $a1, $a3, $a1
78 ; LA32-NEXT: or $a0, $a4, $a0
81 ; LA64-LABEL: rotl_64:
83 ; LA64-NEXT: ori $a2, $zero, 64
84 ; LA64-NEXT: sub.d $a1, $a2, $a1
85 ; LA64-NEXT: rotr.d $a0, $a0, $a1
94 define i64 @rotr_64(i64 %x, i64 %y) nounwind {
95 ; LA32-LABEL: rotr_64:
97 ; LA32-NEXT: srl.w $a3, $a0, $a2
98 ; LA32-NEXT: xori $a4, $a2, 31
99 ; LA32-NEXT: slli.w $a5, $a1, 1
100 ; LA32-NEXT: sll.w $a4, $a5, $a4
101 ; LA32-NEXT: or $a3, $a3, $a4
102 ; LA32-NEXT: addi.w $a4, $a2, -32
103 ; LA32-NEXT: slti $a5, $a4, 0
104 ; LA32-NEXT: maskeqz $a3, $a3, $a5
105 ; LA32-NEXT: srl.w $a6, $a1, $a4
106 ; LA32-NEXT: masknez $a5, $a6, $a5
107 ; LA32-NEXT: or $a3, $a3, $a5
108 ; LA32-NEXT: srl.w $a5, $a1, $a2
109 ; LA32-NEXT: srai.w $a4, $a4, 31
110 ; LA32-NEXT: and $a4, $a4, $a5
111 ; LA32-NEXT: sub.w $a5, $zero, $a2
112 ; LA32-NEXT: sll.w $a6, $a0, $a5
113 ; LA32-NEXT: ori $a7, $zero, 32
114 ; LA32-NEXT: sub.w $a7, $a7, $a2
115 ; LA32-NEXT: slti $t0, $a7, 0
116 ; LA32-NEXT: masknez $t1, $a6, $t0
117 ; LA32-NEXT: sll.w $a1, $a1, $a5
118 ; LA32-NEXT: ori $a5, $zero, 64
119 ; LA32-NEXT: sub.w $a2, $a5, $a2
120 ; LA32-NEXT: xori $a2, $a2, 31
121 ; LA32-NEXT: srli.w $a0, $a0, 1
122 ; LA32-NEXT: srl.w $a0, $a0, $a2
123 ; LA32-NEXT: or $a0, $a1, $a0
124 ; LA32-NEXT: maskeqz $a0, $a0, $t0
125 ; LA32-NEXT: or $a1, $a0, $t1
126 ; LA32-NEXT: srai.w $a0, $a7, 31
127 ; LA32-NEXT: and $a0, $a0, $a6
128 ; LA32-NEXT: or $a0, $a3, $a0
129 ; LA32-NEXT: or $a1, $a4, $a1
132 ; LA64-LABEL: rotr_64:
134 ; LA64-NEXT: rotr.d $a0, $a0, $a1
143 define signext i32 @rotl_32_mask(i32 signext %x, i32 signext %y) nounwind {
144 ; LA32-LABEL: rotl_32_mask:
146 ; LA32-NEXT: sub.w $a1, $zero, $a1
147 ; LA32-NEXT: rotr.w $a0, $a0, $a1
150 ; LA64-LABEL: rotl_32_mask:
152 ; LA64-NEXT: ori $a2, $zero, 32
153 ; LA64-NEXT: sub.d $a1, $a2, $a1
154 ; LA64-NEXT: rotr.w $a0, $a0, $a1
157 %and = and i32 %z, 31
159 %c = lshr i32 %x, %and
164 define signext i32 @rotl_32_mask_and_63_and_31(i32 signext %x, i32 signext %y) nounwind {
165 ; LA32-LABEL: rotl_32_mask_and_63_and_31:
167 ; LA32-NEXT: sub.w $a1, $zero, $a1
168 ; LA32-NEXT: rotr.w $a0, $a0, $a1
171 ; LA64-LABEL: rotl_32_mask_and_63_and_31:
173 ; LA64-NEXT: ori $a2, $zero, 32
174 ; LA64-NEXT: sub.d $a1, $a2, $a1
175 ; LA64-NEXT: rotr.w $a0, $a0, $a1
186 define signext i32 @rotl_32_mask_or_64_or_32(i32 signext %x, i32 signext %y) nounwind {
187 ; LA32-LABEL: rotl_32_mask_or_64_or_32:
189 ; LA32-NEXT: sub.w $a1, $zero, $a1
190 ; LA32-NEXT: rotr.w $a0, $a0, $a1
193 ; LA64-LABEL: rotl_32_mask_or_64_or_32:
195 ; LA64-NEXT: ori $a2, $zero, 32
196 ; LA64-NEXT: sub.d $a1, $a2, $a1
197 ; LA64-NEXT: rotr.w $a0, $a0, $a1
208 define signext i32 @rotr_32_mask(i32 signext %x, i32 signext %y) nounwind {
209 ; LA32-LABEL: rotr_32_mask:
211 ; LA32-NEXT: rotr.w $a0, $a0, $a1
214 ; LA64-LABEL: rotr_32_mask:
216 ; LA64-NEXT: rotr.w $a0, $a0, $a1
219 %and = and i32 %z, 31
221 %c = shl i32 %x, %and
226 define signext i32 @rotr_32_mask_and_63_and_31(i32 signext %x, i32 signext %y) nounwind {
227 ; LA32-LABEL: rotr_32_mask_and_63_and_31:
229 ; LA32-NEXT: rotr.w $a0, $a0, $a1
232 ; LA64-LABEL: rotr_32_mask_and_63_and_31:
234 ; LA64-NEXT: rotr.w $a0, $a0, $a1
245 define signext i32 @rotr_32_mask_or_64_or_32(i32 signext %x, i32 signext %y) nounwind {
246 ; LA32-LABEL: rotr_32_mask_or_64_or_32:
248 ; LA32-NEXT: rotr.w $a0, $a0, $a1
251 ; LA64-LABEL: rotr_32_mask_or_64_or_32:
253 ; LA64-NEXT: rotr.w $a0, $a0, $a1
264 define i64 @rotl_64_mask(i64 %x, i64 %y) nounwind {
265 ; LA32-LABEL: rotl_64_mask:
267 ; LA32-NEXT: sll.w $a3, $a1, $a2
268 ; LA32-NEXT: xori $a4, $a2, 31
269 ; LA32-NEXT: srli.w $a5, $a0, 1
270 ; LA32-NEXT: srl.w $a4, $a5, $a4
271 ; LA32-NEXT: or $a3, $a3, $a4
272 ; LA32-NEXT: addi.w $a4, $a2, -32
273 ; LA32-NEXT: slti $a5, $a4, 0
274 ; LA32-NEXT: maskeqz $a3, $a3, $a5
275 ; LA32-NEXT: sll.w $a6, $a0, $a4
276 ; LA32-NEXT: masknez $a5, $a6, $a5
277 ; LA32-NEXT: or $a3, $a3, $a5
278 ; LA32-NEXT: sll.w $a5, $a0, $a2
279 ; LA32-NEXT: srai.w $a4, $a4, 31
280 ; LA32-NEXT: and $a4, $a4, $a5
281 ; LA32-NEXT: sub.w $a2, $zero, $a2
282 ; LA32-NEXT: andi $a5, $a2, 63
283 ; LA32-NEXT: addi.w $a6, $a5, -32
284 ; LA32-NEXT: srl.w $a7, $a1, $a6
285 ; LA32-NEXT: slti $t0, $a6, 0
286 ; LA32-NEXT: masknez $a7, $a7, $t0
287 ; LA32-NEXT: srl.w $a0, $a0, $a2
288 ; LA32-NEXT: xori $a5, $a5, 31
289 ; LA32-NEXT: slli.w $t1, $a1, 1
290 ; LA32-NEXT: sll.w $a5, $t1, $a5
291 ; LA32-NEXT: or $a0, $a0, $a5
292 ; LA32-NEXT: maskeqz $a0, $a0, $t0
293 ; LA32-NEXT: or $a0, $a0, $a7
294 ; LA32-NEXT: srl.w $a1, $a1, $a2
295 ; LA32-NEXT: srai.w $a2, $a6, 31
296 ; LA32-NEXT: and $a1, $a2, $a1
297 ; LA32-NEXT: or $a1, $a3, $a1
298 ; LA32-NEXT: or $a0, $a4, $a0
301 ; LA64-LABEL: rotl_64_mask:
303 ; LA64-NEXT: sub.d $a1, $zero, $a1
304 ; LA64-NEXT: rotr.d $a0, $a0, $a1
307 %and = and i64 %z, 63
309 %c = lshr i64 %x, %and
314 define i64 @rotl_64_mask_and_127_and_63(i64 %x, i64 %y) nounwind {
315 ; LA32-LABEL: rotl_64_mask_and_127_and_63:
317 ; LA32-NEXT: sll.w $a3, $a1, $a2
318 ; LA32-NEXT: srli.w $a4, $a0, 1
319 ; LA32-NEXT: andi $a5, $a2, 127
320 ; LA32-NEXT: xori $a6, $a5, 31
321 ; LA32-NEXT: srl.w $a4, $a4, $a6
322 ; LA32-NEXT: or $a3, $a3, $a4
323 ; LA32-NEXT: addi.w $a4, $a5, -32
324 ; LA32-NEXT: slti $a5, $a4, 0
325 ; LA32-NEXT: maskeqz $a3, $a3, $a5
326 ; LA32-NEXT: sll.w $a6, $a0, $a4
327 ; LA32-NEXT: masknez $a5, $a6, $a5
328 ; LA32-NEXT: or $a3, $a3, $a5
329 ; LA32-NEXT: sll.w $a5, $a0, $a2
330 ; LA32-NEXT: srai.w $a4, $a4, 31
331 ; LA32-NEXT: and $a4, $a4, $a5
332 ; LA32-NEXT: sub.w $a2, $zero, $a2
333 ; LA32-NEXT: andi $a5, $a2, 63
334 ; LA32-NEXT: addi.w $a6, $a5, -32
335 ; LA32-NEXT: srl.w $a7, $a1, $a6
336 ; LA32-NEXT: slti $t0, $a6, 0
337 ; LA32-NEXT: masknez $a7, $a7, $t0
338 ; LA32-NEXT: srl.w $a0, $a0, $a2
339 ; LA32-NEXT: xori $a5, $a5, 31
340 ; LA32-NEXT: slli.w $t1, $a1, 1
341 ; LA32-NEXT: sll.w $a5, $t1, $a5
342 ; LA32-NEXT: or $a0, $a0, $a5
343 ; LA32-NEXT: maskeqz $a0, $a0, $t0
344 ; LA32-NEXT: or $a0, $a0, $a7
345 ; LA32-NEXT: srl.w $a1, $a1, $a2
346 ; LA32-NEXT: srai.w $a2, $a6, 31
347 ; LA32-NEXT: and $a1, $a2, $a1
348 ; LA32-NEXT: or $a1, $a3, $a1
349 ; LA32-NEXT: or $a0, $a4, $a0
352 ; LA64-LABEL: rotl_64_mask_and_127_and_63:
354 ; LA64-NEXT: sub.d $a1, $zero, $a1
355 ; LA64-NEXT: rotr.d $a0, $a0, $a1
366 define i64 @rotl_64_mask_or_128_or_64(i64 %x, i64 %y) nounwind {
367 ; LA32-LABEL: rotl_64_mask_or_128_or_64:
369 ; LA32-NEXT: move $a0, $zero
370 ; LA32-NEXT: move $a1, $zero
373 ; LA64-LABEL: rotl_64_mask_or_128_or_64:
375 ; LA64-NEXT: sub.d $a1, $zero, $a1
376 ; LA64-NEXT: rotr.d $a0, $a0, $a1
387 define i64 @rotr_64_mask(i64 %x, i64 %y) nounwind {
388 ; LA32-LABEL: rotr_64_mask:
390 ; LA32-NEXT: srl.w $a3, $a0, $a2
391 ; LA32-NEXT: xori $a4, $a2, 31
392 ; LA32-NEXT: slli.w $a5, $a1, 1
393 ; LA32-NEXT: sll.w $a4, $a5, $a4
394 ; LA32-NEXT: or $a3, $a3, $a4
395 ; LA32-NEXT: addi.w $a4, $a2, -32
396 ; LA32-NEXT: slti $a5, $a4, 0
397 ; LA32-NEXT: maskeqz $a3, $a3, $a5
398 ; LA32-NEXT: srl.w $a6, $a1, $a4
399 ; LA32-NEXT: masknez $a5, $a6, $a5
400 ; LA32-NEXT: or $a3, $a3, $a5
401 ; LA32-NEXT: srl.w $a5, $a1, $a2
402 ; LA32-NEXT: srai.w $a4, $a4, 31
403 ; LA32-NEXT: and $a4, $a4, $a5
404 ; LA32-NEXT: sub.w $a2, $zero, $a2
405 ; LA32-NEXT: andi $a5, $a2, 63
406 ; LA32-NEXT: addi.w $a6, $a5, -32
407 ; LA32-NEXT: sll.w $a7, $a0, $a6
408 ; LA32-NEXT: slti $t0, $a6, 0
409 ; LA32-NEXT: masknez $a7, $a7, $t0
410 ; LA32-NEXT: sll.w $a1, $a1, $a2
411 ; LA32-NEXT: xori $a5, $a5, 31
412 ; LA32-NEXT: srli.w $t1, $a0, 1
413 ; LA32-NEXT: srl.w $a5, $t1, $a5
414 ; LA32-NEXT: or $a1, $a1, $a5
415 ; LA32-NEXT: maskeqz $a1, $a1, $t0
416 ; LA32-NEXT: or $a1, $a1, $a7
417 ; LA32-NEXT: sll.w $a0, $a0, $a2
418 ; LA32-NEXT: srai.w $a2, $a6, 31
419 ; LA32-NEXT: and $a0, $a2, $a0
420 ; LA32-NEXT: or $a0, $a3, $a0
421 ; LA32-NEXT: or $a1, $a4, $a1
424 ; LA64-LABEL: rotr_64_mask:
426 ; LA64-NEXT: rotr.d $a0, $a0, $a1
429 %and = and i64 %z, 63
431 %c = shl i64 %x, %and
436 define i64 @rotr_64_mask_and_127_and_63(i64 %x, i64 %y) nounwind {
437 ; LA32-LABEL: rotr_64_mask_and_127_and_63:
439 ; LA32-NEXT: srl.w $a3, $a0, $a2
440 ; LA32-NEXT: slli.w $a4, $a1, 1
441 ; LA32-NEXT: andi $a5, $a2, 127
442 ; LA32-NEXT: xori $a6, $a5, 31
443 ; LA32-NEXT: sll.w $a4, $a4, $a6
444 ; LA32-NEXT: or $a3, $a3, $a4
445 ; LA32-NEXT: addi.w $a4, $a5, -32
446 ; LA32-NEXT: slti $a5, $a4, 0
447 ; LA32-NEXT: maskeqz $a3, $a3, $a5
448 ; LA32-NEXT: srl.w $a6, $a1, $a4
449 ; LA32-NEXT: masknez $a5, $a6, $a5
450 ; LA32-NEXT: or $a3, $a3, $a5
451 ; LA32-NEXT: srl.w $a5, $a1, $a2
452 ; LA32-NEXT: srai.w $a4, $a4, 31
453 ; LA32-NEXT: and $a4, $a4, $a5
454 ; LA32-NEXT: sub.w $a2, $zero, $a2
455 ; LA32-NEXT: andi $a5, $a2, 63
456 ; LA32-NEXT: addi.w $a6, $a5, -32
457 ; LA32-NEXT: sll.w $a7, $a0, $a6
458 ; LA32-NEXT: slti $t0, $a6, 0
459 ; LA32-NEXT: masknez $a7, $a7, $t0
460 ; LA32-NEXT: sll.w $a1, $a1, $a2
461 ; LA32-NEXT: xori $a5, $a5, 31
462 ; LA32-NEXT: srli.w $t1, $a0, 1
463 ; LA32-NEXT: srl.w $a5, $t1, $a5
464 ; LA32-NEXT: or $a1, $a1, $a5
465 ; LA32-NEXT: maskeqz $a1, $a1, $t0
466 ; LA32-NEXT: or $a1, $a1, $a7
467 ; LA32-NEXT: sll.w $a0, $a0, $a2
468 ; LA32-NEXT: srai.w $a2, $a6, 31
469 ; LA32-NEXT: and $a0, $a2, $a0
470 ; LA32-NEXT: or $a0, $a3, $a0
471 ; LA32-NEXT: or $a1, $a4, $a1
474 ; LA64-LABEL: rotr_64_mask_and_127_and_63:
476 ; LA64-NEXT: rotr.d $a0, $a0, $a1
487 define i64 @rotr_64_mask_or_128_or_64(i64 %x, i64 %y) nounwind {
488 ; LA32-LABEL: rotr_64_mask_or_128_or_64:
490 ; LA32-NEXT: move $a0, $zero
491 ; LA32-NEXT: move $a1, $zero
494 ; LA64-LABEL: rotr_64_mask_or_128_or_64:
496 ; LA64-NEXT: rotr.d $a0, $a0, $a1
507 define signext i32 @rotr_64_trunc_32(i64 %x, i64 %y) nounwind {
508 ; LA32-LABEL: rotr_64_trunc_32:
510 ; LA32-NEXT: srl.w $a3, $a0, $a2
511 ; LA32-NEXT: xori $a4, $a2, 31
512 ; LA32-NEXT: slli.w $a5, $a1, 1
513 ; LA32-NEXT: sll.w $a4, $a5, $a4
514 ; LA32-NEXT: or $a3, $a3, $a4
515 ; LA32-NEXT: addi.w $a4, $a2, -32
516 ; LA32-NEXT: slti $a5, $a4, 0
517 ; LA32-NEXT: maskeqz $a3, $a3, $a5
518 ; LA32-NEXT: srl.w $a1, $a1, $a4
519 ; LA32-NEXT: masknez $a1, $a1, $a5
520 ; LA32-NEXT: or $a1, $a3, $a1
521 ; LA32-NEXT: sub.w $a3, $zero, $a2
522 ; LA32-NEXT: sll.w $a0, $a0, $a3
523 ; LA32-NEXT: ori $a3, $zero, 32
524 ; LA32-NEXT: sub.w $a2, $a3, $a2
525 ; LA32-NEXT: srai.w $a2, $a2, 31
526 ; LA32-NEXT: and $a0, $a2, $a0
527 ; LA32-NEXT: or $a0, $a1, $a0
530 ; LA64-LABEL: rotr_64_trunc_32:
532 ; LA64-NEXT: rotr.d $a0, $a0, $a1
533 ; LA64-NEXT: addi.w $a0, $a0, 0
539 %e = trunc i64 %d to i32
543 define signext i32 @rotri_i32(i32 signext %a) nounwind {
544 ; LA32-LABEL: rotri_i32:
546 ; LA32-NEXT: rotri.w $a0, $a0, 16
549 ; LA64-LABEL: rotri_i32:
551 ; LA64-NEXT: rotri.w $a0, $a0, 16
553 %shl = shl i32 %a, 16
554 %shr = lshr i32 %a, 16
555 %or = or i32 %shl, %shr
559 define i64 @rotri_i64(i64 %a) nounwind {
560 ; LA32-LABEL: rotri_i64:
562 ; LA32-NEXT: move $a2, $a0
563 ; LA32-NEXT: move $a0, $a1
564 ; LA32-NEXT: move $a1, $a2
567 ; LA64-LABEL: rotri_i64:
569 ; LA64-NEXT: rotri.d $a0, $a0, 32
571 %shl = shl i64 %a, 32
572 %shr = lshr i64 %a, 32
573 %or = or i64 %shl, %shr
577 declare i32 @llvm.fshl.i32(i32, i32, i32)
578 declare i64 @llvm.fshl.i64(i64, i64, i64)
579 declare i32 @llvm.fshr.i32(i32, i32, i32)
580 declare i64 @llvm.fshr.i64(i64, i64, i64)
582 define signext i32 @rotl_i32_fshl(i32 signext %a) nounwind {
583 ; LA32-LABEL: rotl_i32_fshl:
585 ; LA32-NEXT: rotri.w $a0, $a0, 20
588 ; LA64-LABEL: rotl_i32_fshl:
590 ; LA64-NEXT: rotri.w $a0, $a0, 20
592 %or = tail call i32 @llvm.fshl.i32(i32 %a, i32 %a, i32 12)
596 define i64 @rotl_i64_fshl(i64 %a) nounwind {
597 ; LA32-LABEL: rotl_i64_fshl:
599 ; LA32-NEXT: srli.w $a2, $a1, 20
600 ; LA32-NEXT: slli.w $a3, $a0, 12
601 ; LA32-NEXT: or $a2, $a3, $a2
602 ; LA32-NEXT: srli.w $a0, $a0, 20
603 ; LA32-NEXT: slli.w $a1, $a1, 12
604 ; LA32-NEXT: or $a1, $a1, $a0
605 ; LA32-NEXT: move $a0, $a2
608 ; LA64-LABEL: rotl_i64_fshl:
610 ; LA64-NEXT: rotri.d $a0, $a0, 52
612 %or = tail call i64 @llvm.fshl.i64(i64 %a, i64 %a, i64 12)
616 define signext i32 @rotr_i32_fshr(i32 signext %a) nounwind {
617 ; LA32-LABEL: rotr_i32_fshr:
619 ; LA32-NEXT: rotri.w $a0, $a0, 12
622 ; LA64-LABEL: rotr_i32_fshr:
624 ; LA64-NEXT: rotri.w $a0, $a0, 12
626 %or = tail call i32 @llvm.fshr.i32(i32 %a, i32 %a, i32 12)
630 define i64 @rotr_i64_fshr(i64 %a) nounwind {
631 ; LA32-LABEL: rotr_i64_fshr:
633 ; LA32-NEXT: srli.w $a2, $a0, 12
634 ; LA32-NEXT: slli.w $a3, $a1, 20
635 ; LA32-NEXT: or $a2, $a3, $a2
636 ; LA32-NEXT: srli.w $a1, $a1, 12
637 ; LA32-NEXT: slli.w $a0, $a0, 20
638 ; LA32-NEXT: or $a1, $a0, $a1
639 ; LA32-NEXT: move $a0, $a2
642 ; LA64-LABEL: rotr_i64_fshr:
644 ; LA64-NEXT: rotri.d $a0, $a0, 12
646 %or = tail call i64 @llvm.fshr.i64(i64 %a, i64 %a, i64 12)