1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc -mtriple=riscv32 -mattr=+v -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK-RV32
3 ; RUN: llc -mtriple=riscv64 -mattr=+v -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK-RV64
5 define <2 x i16> @test_v2i16(<2 x i16> %x) {
6 ; CHECK-RV32-LABEL: test_v2i16:
8 ; CHECK-RV32-NEXT: vsetivli zero, 4, e8, mf4, ta, ma
9 ; CHECK-RV32-NEXT: vsra.vi v8, v8, 7
10 ; CHECK-RV32-NEXT: ret
12 ; CHECK-RV64-LABEL: test_v2i16:
13 ; CHECK-RV64: # %bb.0:
14 ; CHECK-RV64-NEXT: vsetivli zero, 4, e8, mf4, ta, ma
15 ; CHECK-RV64-NEXT: vsra.vi v8, v8, 7
16 ; CHECK-RV64-NEXT: ret
17 %1 = lshr <2 x i16> %x, <i16 7, i16 7>
18 %2 = and <2 x i16> %1, <i16 257, i16 257>
19 %3 = mul <2 x i16> %2, <i16 255, i16 255>
23 define <vscale x 2 x i16> @test_nxv2i16(<vscale x 2 x i16> %x) {
24 ; CHECK-RV32-LABEL: test_nxv2i16:
25 ; CHECK-RV32: # %bb.0:
26 ; CHECK-RV32-NEXT: vsetvli a0, zero, e16, mf2, ta, ma
27 ; CHECK-RV32-NEXT: vsrl.vi v8, v8, 7
28 ; CHECK-RV32-NEXT: li a0, 257
29 ; CHECK-RV32-NEXT: vand.vx v8, v8, a0
30 ; CHECK-RV32-NEXT: vsll.vi v8, v8, 8
31 ; CHECK-RV32-NEXT: ret
33 ; CHECK-RV64-LABEL: test_nxv2i16:
34 ; CHECK-RV64: # %bb.0:
35 ; CHECK-RV64-NEXT: vsetvli a0, zero, e16, mf2, ta, ma
36 ; CHECK-RV64-NEXT: vsrl.vi v8, v8, 7
37 ; CHECK-RV64-NEXT: li a0, 257
38 ; CHECK-RV64-NEXT: vand.vx v8, v8, a0
39 ; CHECK-RV64-NEXT: vsll.vi v8, v8, 8
40 ; CHECK-RV64-NEXT: ret
41 %1 = lshr <vscale x 2 x i16> %x, splat (i16 7)
42 %2 = and <vscale x 2 x i16> %1, splat (i16 257)
43 %3 = mul <vscale x 2 x i16> %2, splat (i16 256)
44 ret <vscale x 2 x i16> %3
47 define <2 x i32> @test_v2i32(<2 x i32> %x) {
48 ; CHECK-RV32-LABEL: test_v2i32:
49 ; CHECK-RV32: # %bb.0:
50 ; CHECK-RV32-NEXT: vsetivli zero, 4, e16, mf2, ta, ma
51 ; CHECK-RV32-NEXT: vsra.vi v8, v8, 15
52 ; CHECK-RV32-NEXT: ret
54 ; CHECK-RV64-LABEL: test_v2i32:
55 ; CHECK-RV64: # %bb.0:
56 ; CHECK-RV64-NEXT: vsetivli zero, 4, e16, mf2, ta, ma
57 ; CHECK-RV64-NEXT: vsra.vi v8, v8, 15
58 ; CHECK-RV64-NEXT: ret
59 %1 = lshr <2 x i32> %x, <i32 15, i32 15>
60 %2 = and <2 x i32> %1, <i32 65537, i32 65537>
61 %3 = mul <2 x i32> %2, <i32 65535, i32 65535>
65 define <vscale x 2 x i32> @test_nxv2i32(<vscale x 2 x i32> %x) {
66 ; CHECK-RV32-LABEL: test_nxv2i32:
67 ; CHECK-RV32: # %bb.0:
68 ; CHECK-RV32-NEXT: vsetvli a0, zero, e16, m1, ta, ma
69 ; CHECK-RV32-NEXT: vsra.vi v8, v8, 15
70 ; CHECK-RV32-NEXT: ret
72 ; CHECK-RV64-LABEL: test_nxv2i32:
73 ; CHECK-RV64: # %bb.0:
74 ; CHECK-RV64-NEXT: vsetvli a0, zero, e16, m1, ta, ma
75 ; CHECK-RV64-NEXT: vsra.vi v8, v8, 15
76 ; CHECK-RV64-NEXT: ret
77 %1 = lshr <vscale x 2 x i32> %x, splat (i32 15)
78 %2 = and <vscale x 2 x i32> %1, splat (i32 65537)
79 %3 = mul <vscale x 2 x i32> %2, splat (i32 65535)
80 ret <vscale x 2 x i32> %3
83 define <2 x i64> @test_v2i64(<2 x i64> %x) {
84 ; CHECK-RV32-LABEL: test_v2i64:
85 ; CHECK-RV32: # %bb.0:
86 ; CHECK-RV32-NEXT: vsetivli zero, 4, e32, m1, ta, ma
87 ; CHECK-RV32-NEXT: vsra.vi v8, v8, 31
88 ; CHECK-RV32-NEXT: ret
90 ; CHECK-RV64-LABEL: test_v2i64:
91 ; CHECK-RV64: # %bb.0:
92 ; CHECK-RV64-NEXT: vsetivli zero, 4, e32, m1, ta, ma
93 ; CHECK-RV64-NEXT: vsra.vi v8, v8, 31
94 ; CHECK-RV64-NEXT: ret
95 %1 = lshr <2 x i64> %x, <i64 31, i64 31>
96 %2 = and <2 x i64> %1, <i64 4294967297, i64 4294967297>
97 %3 = mul <2 x i64> %2, <i64 4294967295, i64 4294967295>
101 define <vscale x 2 x i64> @test_nxv2i64(<vscale x 2 x i64> %x) {
102 ; CHECK-RV32-LABEL: test_nxv2i64:
103 ; CHECK-RV32: # %bb.0:
104 ; CHECK-RV32-NEXT: vsetvli a0, zero, e32, m2, ta, ma
105 ; CHECK-RV32-NEXT: vsra.vi v8, v8, 31
106 ; CHECK-RV32-NEXT: ret
108 ; CHECK-RV64-LABEL: test_nxv2i64:
109 ; CHECK-RV64: # %bb.0:
110 ; CHECK-RV64-NEXT: vsetvli a0, zero, e32, m2, ta, ma
111 ; CHECK-RV64-NEXT: vsra.vi v8, v8, 31
112 ; CHECK-RV64-NEXT: ret
113 %1 = lshr <vscale x 2 x i64> %x, splat (i64 31)
114 %2 = and <vscale x 2 x i64> %1, splat (i64 4294967297)
115 %3 = mul <vscale x 2 x i64> %2, splat (i64 4294967295)
116 ret <vscale x 2 x i64> %3