1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2
2 // REQUIRES: riscv-registered-target
3 // RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \
4 // RUN: -target-feature +experimental-zvbb \
5 // RUN: -target-feature +experimental-zvbc \
6 // RUN: -target-feature +experimental-zvkg \
7 // RUN: -target-feature +experimental-zvkned \
8 // RUN: -target-feature +experimental-zvknhb \
9 // RUN: -target-feature +experimental-zvksed \
10 // RUN: -target-feature +experimental-zvksh -disable-O0-optnone \
11 // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \
12 // RUN: FileCheck --check-prefix=CHECK-RV64 %s
14 #include <riscv_vector.h>
16 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i16> @test_vwsll_vv_u16mf4
17 // CHECK-RV64-SAME: (<vscale x 1 x i8> [[VS2:%.*]], <vscale x 1 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
18 // CHECK-RV64-NEXT: entry:
19 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i16> @llvm.riscv.vwsll.nxv1i16.nxv1i8.nxv1i8.i64(<vscale x 1 x i16> poison, <vscale x 1 x i8> [[VS2]], <vscale x 1 x i8> [[VS1]], i64 [[VL]])
20 // CHECK-RV64-NEXT: ret <vscale x 1 x i16> [[TMP0]]
22 vuint16mf4_t
test_vwsll_vv_u16mf4(vuint8mf8_t vs2
, vuint8mf8_t vs1
, size_t vl
) {
23 return __riscv_vwsll_vv_u16mf4(vs2
, vs1
, vl
);
26 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i16> @test_vwsll_vx_u16mf4
27 // CHECK-RV64-SAME: (<vscale x 1 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
28 // CHECK-RV64-NEXT: entry:
29 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i16> @llvm.riscv.vwsll.nxv1i16.nxv1i8.i64.i64(<vscale x 1 x i16> poison, <vscale x 1 x i8> [[VS2]], i64 [[RS1]], i64 [[VL]])
30 // CHECK-RV64-NEXT: ret <vscale x 1 x i16> [[TMP0]]
32 vuint16mf4_t
test_vwsll_vx_u16mf4(vuint8mf8_t vs2
, size_t rs1
, size_t vl
) {
33 return __riscv_vwsll_vx_u16mf4(vs2
, rs1
, vl
);
36 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i16> @test_vwsll_vv_u16mf2
37 // CHECK-RV64-SAME: (<vscale x 2 x i8> [[VS2:%.*]], <vscale x 2 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
38 // CHECK-RV64-NEXT: entry:
39 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i16> @llvm.riscv.vwsll.nxv2i16.nxv2i8.nxv2i8.i64(<vscale x 2 x i16> poison, <vscale x 2 x i8> [[VS2]], <vscale x 2 x i8> [[VS1]], i64 [[VL]])
40 // CHECK-RV64-NEXT: ret <vscale x 2 x i16> [[TMP0]]
42 vuint16mf2_t
test_vwsll_vv_u16mf2(vuint8mf4_t vs2
, vuint8mf4_t vs1
, size_t vl
) {
43 return __riscv_vwsll_vv_u16mf2(vs2
, vs1
, vl
);
46 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i16> @test_vwsll_vx_u16mf2
47 // CHECK-RV64-SAME: (<vscale x 2 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
48 // CHECK-RV64-NEXT: entry:
49 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i16> @llvm.riscv.vwsll.nxv2i16.nxv2i8.i64.i64(<vscale x 2 x i16> poison, <vscale x 2 x i8> [[VS2]], i64 [[RS1]], i64 [[VL]])
50 // CHECK-RV64-NEXT: ret <vscale x 2 x i16> [[TMP0]]
52 vuint16mf2_t
test_vwsll_vx_u16mf2(vuint8mf4_t vs2
, size_t rs1
, size_t vl
) {
53 return __riscv_vwsll_vx_u16mf2(vs2
, rs1
, vl
);
56 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vwsll_vv_u16m1
57 // CHECK-RV64-SAME: (<vscale x 4 x i8> [[VS2:%.*]], <vscale x 4 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
58 // CHECK-RV64-NEXT: entry:
59 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vwsll.nxv4i16.nxv4i8.nxv4i8.i64(<vscale x 4 x i16> poison, <vscale x 4 x i8> [[VS2]], <vscale x 4 x i8> [[VS1]], i64 [[VL]])
60 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
62 vuint16m1_t
test_vwsll_vv_u16m1(vuint8mf2_t vs2
, vuint8mf2_t vs1
, size_t vl
) {
63 return __riscv_vwsll_vv_u16m1(vs2
, vs1
, vl
);
66 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vwsll_vx_u16m1
67 // CHECK-RV64-SAME: (<vscale x 4 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
68 // CHECK-RV64-NEXT: entry:
69 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vwsll.nxv4i16.nxv4i8.i64.i64(<vscale x 4 x i16> poison, <vscale x 4 x i8> [[VS2]], i64 [[RS1]], i64 [[VL]])
70 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
72 vuint16m1_t
test_vwsll_vx_u16m1(vuint8mf2_t vs2
, size_t rs1
, size_t vl
) {
73 return __riscv_vwsll_vx_u16m1(vs2
, rs1
, vl
);
76 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i16> @test_vwsll_vv_u16m2
77 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[VS2:%.*]], <vscale x 8 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
78 // CHECK-RV64-NEXT: entry:
79 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i16> @llvm.riscv.vwsll.nxv8i16.nxv8i8.nxv8i8.i64(<vscale x 8 x i16> poison, <vscale x 8 x i8> [[VS2]], <vscale x 8 x i8> [[VS1]], i64 [[VL]])
80 // CHECK-RV64-NEXT: ret <vscale x 8 x i16> [[TMP0]]
82 vuint16m2_t
test_vwsll_vv_u16m2(vuint8m1_t vs2
, vuint8m1_t vs1
, size_t vl
) {
83 return __riscv_vwsll_vv_u16m2(vs2
, vs1
, vl
);
86 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i16> @test_vwsll_vx_u16m2
87 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
88 // CHECK-RV64-NEXT: entry:
89 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i16> @llvm.riscv.vwsll.nxv8i16.nxv8i8.i64.i64(<vscale x 8 x i16> poison, <vscale x 8 x i8> [[VS2]], i64 [[RS1]], i64 [[VL]])
90 // CHECK-RV64-NEXT: ret <vscale x 8 x i16> [[TMP0]]
92 vuint16m2_t
test_vwsll_vx_u16m2(vuint8m1_t vs2
, size_t rs1
, size_t vl
) {
93 return __riscv_vwsll_vx_u16m2(vs2
, rs1
, vl
);
96 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i16> @test_vwsll_vv_u16m4
97 // CHECK-RV64-SAME: (<vscale x 16 x i8> [[VS2:%.*]], <vscale x 16 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
98 // CHECK-RV64-NEXT: entry:
99 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i16> @llvm.riscv.vwsll.nxv16i16.nxv16i8.nxv16i8.i64(<vscale x 16 x i16> poison, <vscale x 16 x i8> [[VS2]], <vscale x 16 x i8> [[VS1]], i64 [[VL]])
100 // CHECK-RV64-NEXT: ret <vscale x 16 x i16> [[TMP0]]
102 vuint16m4_t
test_vwsll_vv_u16m4(vuint8m2_t vs2
, vuint8m2_t vs1
, size_t vl
) {
103 return __riscv_vwsll_vv_u16m4(vs2
, vs1
, vl
);
106 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i16> @test_vwsll_vx_u16m4
107 // CHECK-RV64-SAME: (<vscale x 16 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
108 // CHECK-RV64-NEXT: entry:
109 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i16> @llvm.riscv.vwsll.nxv16i16.nxv16i8.i64.i64(<vscale x 16 x i16> poison, <vscale x 16 x i8> [[VS2]], i64 [[RS1]], i64 [[VL]])
110 // CHECK-RV64-NEXT: ret <vscale x 16 x i16> [[TMP0]]
112 vuint16m4_t
test_vwsll_vx_u16m4(vuint8m2_t vs2
, size_t rs1
, size_t vl
) {
113 return __riscv_vwsll_vx_u16m4(vs2
, rs1
, vl
);
116 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i16> @test_vwsll_vv_u16m8
117 // CHECK-RV64-SAME: (<vscale x 32 x i8> [[VS2:%.*]], <vscale x 32 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
118 // CHECK-RV64-NEXT: entry:
119 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i16> @llvm.riscv.vwsll.nxv32i16.nxv32i8.nxv32i8.i64(<vscale x 32 x i16> poison, <vscale x 32 x i8> [[VS2]], <vscale x 32 x i8> [[VS1]], i64 [[VL]])
120 // CHECK-RV64-NEXT: ret <vscale x 32 x i16> [[TMP0]]
122 vuint16m8_t
test_vwsll_vv_u16m8(vuint8m4_t vs2
, vuint8m4_t vs1
, size_t vl
) {
123 return __riscv_vwsll_vv_u16m8(vs2
, vs1
, vl
);
126 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i16> @test_vwsll_vx_u16m8
127 // CHECK-RV64-SAME: (<vscale x 32 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
128 // CHECK-RV64-NEXT: entry:
129 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i16> @llvm.riscv.vwsll.nxv32i16.nxv32i8.i64.i64(<vscale x 32 x i16> poison, <vscale x 32 x i8> [[VS2]], i64 [[RS1]], i64 [[VL]])
130 // CHECK-RV64-NEXT: ret <vscale x 32 x i16> [[TMP0]]
132 vuint16m8_t
test_vwsll_vx_u16m8(vuint8m4_t vs2
, size_t rs1
, size_t vl
) {
133 return __riscv_vwsll_vx_u16m8(vs2
, rs1
, vl
);
136 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i32> @test_vwsll_vv_u32mf2
137 // CHECK-RV64-SAME: (<vscale x 1 x i16> [[VS2:%.*]], <vscale x 1 x i16> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
138 // CHECK-RV64-NEXT: entry:
139 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i32> @llvm.riscv.vwsll.nxv1i32.nxv1i16.nxv1i16.i64(<vscale x 1 x i32> poison, <vscale x 1 x i16> [[VS2]], <vscale x 1 x i16> [[VS1]], i64 [[VL]])
140 // CHECK-RV64-NEXT: ret <vscale x 1 x i32> [[TMP0]]
142 vuint32mf2_t
test_vwsll_vv_u32mf2(vuint16mf4_t vs2
, vuint16mf4_t vs1
, size_t vl
) {
143 return __riscv_vwsll_vv_u32mf2(vs2
, vs1
, vl
);
146 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i32> @test_vwsll_vx_u32mf2
147 // CHECK-RV64-SAME: (<vscale x 1 x i16> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
148 // CHECK-RV64-NEXT: entry:
149 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i32> @llvm.riscv.vwsll.nxv1i32.nxv1i16.i64.i64(<vscale x 1 x i32> poison, <vscale x 1 x i16> [[VS2]], i64 [[RS1]], i64 [[VL]])
150 // CHECK-RV64-NEXT: ret <vscale x 1 x i32> [[TMP0]]
152 vuint32mf2_t
test_vwsll_vx_u32mf2(vuint16mf4_t vs2
, size_t rs1
, size_t vl
) {
153 return __riscv_vwsll_vx_u32mf2(vs2
, rs1
, vl
);
156 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vwsll_vv_u32m1
157 // CHECK-RV64-SAME: (<vscale x 2 x i16> [[VS2:%.*]], <vscale x 2 x i16> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
158 // CHECK-RV64-NEXT: entry:
159 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vwsll.nxv2i32.nxv2i16.nxv2i16.i64(<vscale x 2 x i32> poison, <vscale x 2 x i16> [[VS2]], <vscale x 2 x i16> [[VS1]], i64 [[VL]])
160 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
162 vuint32m1_t
test_vwsll_vv_u32m1(vuint16mf2_t vs2
, vuint16mf2_t vs1
, size_t vl
) {
163 return __riscv_vwsll_vv_u32m1(vs2
, vs1
, vl
);
166 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vwsll_vx_u32m1
167 // CHECK-RV64-SAME: (<vscale x 2 x i16> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
168 // CHECK-RV64-NEXT: entry:
169 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vwsll.nxv2i32.nxv2i16.i64.i64(<vscale x 2 x i32> poison, <vscale x 2 x i16> [[VS2]], i64 [[RS1]], i64 [[VL]])
170 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
172 vuint32m1_t
test_vwsll_vx_u32m1(vuint16mf2_t vs2
, size_t rs1
, size_t vl
) {
173 return __riscv_vwsll_vx_u32m1(vs2
, rs1
, vl
);
176 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i32> @test_vwsll_vv_u32m2
177 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[VS2:%.*]], <vscale x 4 x i16> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
178 // CHECK-RV64-NEXT: entry:
179 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i32> @llvm.riscv.vwsll.nxv4i32.nxv4i16.nxv4i16.i64(<vscale x 4 x i32> poison, <vscale x 4 x i16> [[VS2]], <vscale x 4 x i16> [[VS1]], i64 [[VL]])
180 // CHECK-RV64-NEXT: ret <vscale x 4 x i32> [[TMP0]]
182 vuint32m2_t
test_vwsll_vv_u32m2(vuint16m1_t vs2
, vuint16m1_t vs1
, size_t vl
) {
183 return __riscv_vwsll_vv_u32m2(vs2
, vs1
, vl
);
186 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i32> @test_vwsll_vx_u32m2
187 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
188 // CHECK-RV64-NEXT: entry:
189 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i32> @llvm.riscv.vwsll.nxv4i32.nxv4i16.i64.i64(<vscale x 4 x i32> poison, <vscale x 4 x i16> [[VS2]], i64 [[RS1]], i64 [[VL]])
190 // CHECK-RV64-NEXT: ret <vscale x 4 x i32> [[TMP0]]
192 vuint32m2_t
test_vwsll_vx_u32m2(vuint16m1_t vs2
, size_t rs1
, size_t vl
) {
193 return __riscv_vwsll_vx_u32m2(vs2
, rs1
, vl
);
196 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i32> @test_vwsll_vv_u32m4
197 // CHECK-RV64-SAME: (<vscale x 8 x i16> [[VS2:%.*]], <vscale x 8 x i16> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
198 // CHECK-RV64-NEXT: entry:
199 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i32> @llvm.riscv.vwsll.nxv8i32.nxv8i16.nxv8i16.i64(<vscale x 8 x i32> poison, <vscale x 8 x i16> [[VS2]], <vscale x 8 x i16> [[VS1]], i64 [[VL]])
200 // CHECK-RV64-NEXT: ret <vscale x 8 x i32> [[TMP0]]
202 vuint32m4_t
test_vwsll_vv_u32m4(vuint16m2_t vs2
, vuint16m2_t vs1
, size_t vl
) {
203 return __riscv_vwsll_vv_u32m4(vs2
, vs1
, vl
);
206 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i32> @test_vwsll_vx_u32m4
207 // CHECK-RV64-SAME: (<vscale x 8 x i16> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
208 // CHECK-RV64-NEXT: entry:
209 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i32> @llvm.riscv.vwsll.nxv8i32.nxv8i16.i64.i64(<vscale x 8 x i32> poison, <vscale x 8 x i16> [[VS2]], i64 [[RS1]], i64 [[VL]])
210 // CHECK-RV64-NEXT: ret <vscale x 8 x i32> [[TMP0]]
212 vuint32m4_t
test_vwsll_vx_u32m4(vuint16m2_t vs2
, size_t rs1
, size_t vl
) {
213 return __riscv_vwsll_vx_u32m4(vs2
, rs1
, vl
);
216 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i32> @test_vwsll_vv_u32m8
217 // CHECK-RV64-SAME: (<vscale x 16 x i16> [[VS2:%.*]], <vscale x 16 x i16> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
218 // CHECK-RV64-NEXT: entry:
219 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i32> @llvm.riscv.vwsll.nxv16i32.nxv16i16.nxv16i16.i64(<vscale x 16 x i32> poison, <vscale x 16 x i16> [[VS2]], <vscale x 16 x i16> [[VS1]], i64 [[VL]])
220 // CHECK-RV64-NEXT: ret <vscale x 16 x i32> [[TMP0]]
222 vuint32m8_t
test_vwsll_vv_u32m8(vuint16m4_t vs2
, vuint16m4_t vs1
, size_t vl
) {
223 return __riscv_vwsll_vv_u32m8(vs2
, vs1
, vl
);
226 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i32> @test_vwsll_vx_u32m8
227 // CHECK-RV64-SAME: (<vscale x 16 x i16> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
228 // CHECK-RV64-NEXT: entry:
229 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i32> @llvm.riscv.vwsll.nxv16i32.nxv16i16.i64.i64(<vscale x 16 x i32> poison, <vscale x 16 x i16> [[VS2]], i64 [[RS1]], i64 [[VL]])
230 // CHECK-RV64-NEXT: ret <vscale x 16 x i32> [[TMP0]]
232 vuint32m8_t
test_vwsll_vx_u32m8(vuint16m4_t vs2
, size_t rs1
, size_t vl
) {
233 return __riscv_vwsll_vx_u32m8(vs2
, rs1
, vl
);
236 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vwsll_vv_u64m1
237 // CHECK-RV64-SAME: (<vscale x 1 x i32> [[VS2:%.*]], <vscale x 1 x i32> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
238 // CHECK-RV64-NEXT: entry:
239 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vwsll.nxv1i64.nxv1i32.nxv1i32.i64(<vscale x 1 x i64> poison, <vscale x 1 x i32> [[VS2]], <vscale x 1 x i32> [[VS1]], i64 [[VL]])
240 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
242 vuint64m1_t
test_vwsll_vv_u64m1(vuint32mf2_t vs2
, vuint32mf2_t vs1
, size_t vl
) {
243 return __riscv_vwsll_vv_u64m1(vs2
, vs1
, vl
);
246 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vwsll_vx_u64m1
247 // CHECK-RV64-SAME: (<vscale x 1 x i32> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
248 // CHECK-RV64-NEXT: entry:
249 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vwsll.nxv1i64.nxv1i32.i64.i64(<vscale x 1 x i64> poison, <vscale x 1 x i32> [[VS2]], i64 [[RS1]], i64 [[VL]])
250 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
252 vuint64m1_t
test_vwsll_vx_u64m1(vuint32mf2_t vs2
, size_t rs1
, size_t vl
) {
253 return __riscv_vwsll_vx_u64m1(vs2
, rs1
, vl
);
256 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vwsll_vv_u64m2
257 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[VS2:%.*]], <vscale x 2 x i32> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
258 // CHECK-RV64-NEXT: entry:
259 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vwsll.nxv2i64.nxv2i32.nxv2i32.i64(<vscale x 2 x i64> poison, <vscale x 2 x i32> [[VS2]], <vscale x 2 x i32> [[VS1]], i64 [[VL]])
260 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
262 vuint64m2_t
test_vwsll_vv_u64m2(vuint32m1_t vs2
, vuint32m1_t vs1
, size_t vl
) {
263 return __riscv_vwsll_vv_u64m2(vs2
, vs1
, vl
);
266 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vwsll_vx_u64m2
267 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
268 // CHECK-RV64-NEXT: entry:
269 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vwsll.nxv2i64.nxv2i32.i64.i64(<vscale x 2 x i64> poison, <vscale x 2 x i32> [[VS2]], i64 [[RS1]], i64 [[VL]])
270 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
272 vuint64m2_t
test_vwsll_vx_u64m2(vuint32m1_t vs2
, size_t rs1
, size_t vl
) {
273 return __riscv_vwsll_vx_u64m2(vs2
, rs1
, vl
);
276 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vwsll_vv_u64m4
277 // CHECK-RV64-SAME: (<vscale x 4 x i32> [[VS2:%.*]], <vscale x 4 x i32> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
278 // CHECK-RV64-NEXT: entry:
279 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vwsll.nxv4i64.nxv4i32.nxv4i32.i64(<vscale x 4 x i64> poison, <vscale x 4 x i32> [[VS2]], <vscale x 4 x i32> [[VS1]], i64 [[VL]])
280 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
282 vuint64m4_t
test_vwsll_vv_u64m4(vuint32m2_t vs2
, vuint32m2_t vs1
, size_t vl
) {
283 return __riscv_vwsll_vv_u64m4(vs2
, vs1
, vl
);
286 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vwsll_vx_u64m4
287 // CHECK-RV64-SAME: (<vscale x 4 x i32> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
288 // CHECK-RV64-NEXT: entry:
289 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vwsll.nxv4i64.nxv4i32.i64.i64(<vscale x 4 x i64> poison, <vscale x 4 x i32> [[VS2]], i64 [[RS1]], i64 [[VL]])
290 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
292 vuint64m4_t
test_vwsll_vx_u64m4(vuint32m2_t vs2
, size_t rs1
, size_t vl
) {
293 return __riscv_vwsll_vx_u64m4(vs2
, rs1
, vl
);
296 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vwsll_vv_u64m8
297 // CHECK-RV64-SAME: (<vscale x 8 x i32> [[VS2:%.*]], <vscale x 8 x i32> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
298 // CHECK-RV64-NEXT: entry:
299 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vwsll.nxv8i64.nxv8i32.nxv8i32.i64(<vscale x 8 x i64> poison, <vscale x 8 x i32> [[VS2]], <vscale x 8 x i32> [[VS1]], i64 [[VL]])
300 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
302 vuint64m8_t
test_vwsll_vv_u64m8(vuint32m4_t vs2
, vuint32m4_t vs1
, size_t vl
) {
303 return __riscv_vwsll_vv_u64m8(vs2
, vs1
, vl
);
306 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vwsll_vx_u64m8
307 // CHECK-RV64-SAME: (<vscale x 8 x i32> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
308 // CHECK-RV64-NEXT: entry:
309 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vwsll.nxv8i64.nxv8i32.i64.i64(<vscale x 8 x i64> poison, <vscale x 8 x i32> [[VS2]], i64 [[RS1]], i64 [[VL]])
310 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
312 vuint64m8_t
test_vwsll_vx_u64m8(vuint32m4_t vs2
, size_t rs1
, size_t vl
) {
313 return __riscv_vwsll_vx_u64m8(vs2
, rs1
, vl
);
316 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i16> @test_vwsll_vv_u16mf4_m
317 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i8> [[VS2:%.*]], <vscale x 1 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
318 // CHECK-RV64-NEXT: entry:
319 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i16> @llvm.riscv.vwsll.mask.nxv1i16.nxv1i8.nxv1i8.i64(<vscale x 1 x i16> poison, <vscale x 1 x i8> [[VS2]], <vscale x 1 x i8> [[VS1]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3)
320 // CHECK-RV64-NEXT: ret <vscale x 1 x i16> [[TMP0]]
322 vuint16mf4_t
test_vwsll_vv_u16mf4_m(vbool64_t mask
, vuint8mf8_t vs2
, vuint8mf8_t vs1
, size_t vl
) {
323 return __riscv_vwsll_vv_u16mf4_m(mask
, vs2
, vs1
, vl
);
326 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i16> @test_vwsll_vx_u16mf4_m
327 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
328 // CHECK-RV64-NEXT: entry:
329 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i16> @llvm.riscv.vwsll.mask.nxv1i16.nxv1i8.i64.i64(<vscale x 1 x i16> poison, <vscale x 1 x i8> [[VS2]], i64 [[RS1]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3)
330 // CHECK-RV64-NEXT: ret <vscale x 1 x i16> [[TMP0]]
332 vuint16mf4_t
test_vwsll_vx_u16mf4_m(vbool64_t mask
, vuint8mf8_t vs2
, size_t rs1
, size_t vl
) {
333 return __riscv_vwsll_vx_u16mf4_m(mask
, vs2
, rs1
, vl
);
336 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i16> @test_vwsll_vv_u16mf2_m
337 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i8> [[VS2:%.*]], <vscale x 2 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
338 // CHECK-RV64-NEXT: entry:
339 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i16> @llvm.riscv.vwsll.mask.nxv2i16.nxv2i8.nxv2i8.i64(<vscale x 2 x i16> poison, <vscale x 2 x i8> [[VS2]], <vscale x 2 x i8> [[VS1]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3)
340 // CHECK-RV64-NEXT: ret <vscale x 2 x i16> [[TMP0]]
342 vuint16mf2_t
test_vwsll_vv_u16mf2_m(vbool32_t mask
, vuint8mf4_t vs2
, vuint8mf4_t vs1
, size_t vl
) {
343 return __riscv_vwsll_vv_u16mf2_m(mask
, vs2
, vs1
, vl
);
346 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i16> @test_vwsll_vx_u16mf2_m
347 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
348 // CHECK-RV64-NEXT: entry:
349 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i16> @llvm.riscv.vwsll.mask.nxv2i16.nxv2i8.i64.i64(<vscale x 2 x i16> poison, <vscale x 2 x i8> [[VS2]], i64 [[RS1]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3)
350 // CHECK-RV64-NEXT: ret <vscale x 2 x i16> [[TMP0]]
352 vuint16mf2_t
test_vwsll_vx_u16mf2_m(vbool32_t mask
, vuint8mf4_t vs2
, size_t rs1
, size_t vl
) {
353 return __riscv_vwsll_vx_u16mf2_m(mask
, vs2
, rs1
, vl
);
356 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vwsll_vv_u16m1_m
357 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i8> [[VS2:%.*]], <vscale x 4 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
358 // CHECK-RV64-NEXT: entry:
359 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vwsll.mask.nxv4i16.nxv4i8.nxv4i8.i64(<vscale x 4 x i16> poison, <vscale x 4 x i8> [[VS2]], <vscale x 4 x i8> [[VS1]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3)
360 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
362 vuint16m1_t
test_vwsll_vv_u16m1_m(vbool16_t mask
, vuint8mf2_t vs2
, vuint8mf2_t vs1
, size_t vl
) {
363 return __riscv_vwsll_vv_u16m1_m(mask
, vs2
, vs1
, vl
);
366 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vwsll_vx_u16m1_m
367 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
368 // CHECK-RV64-NEXT: entry:
369 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vwsll.mask.nxv4i16.nxv4i8.i64.i64(<vscale x 4 x i16> poison, <vscale x 4 x i8> [[VS2]], i64 [[RS1]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3)
370 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
372 vuint16m1_t
test_vwsll_vx_u16m1_m(vbool16_t mask
, vuint8mf2_t vs2
, size_t rs1
, size_t vl
) {
373 return __riscv_vwsll_vx_u16m1_m(mask
, vs2
, rs1
, vl
);
376 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i16> @test_vwsll_vv_u16m2_m
377 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i8> [[VS2:%.*]], <vscale x 8 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
378 // CHECK-RV64-NEXT: entry:
379 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i16> @llvm.riscv.vwsll.mask.nxv8i16.nxv8i8.nxv8i8.i64(<vscale x 8 x i16> poison, <vscale x 8 x i8> [[VS2]], <vscale x 8 x i8> [[VS1]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3)
380 // CHECK-RV64-NEXT: ret <vscale x 8 x i16> [[TMP0]]
382 vuint16m2_t
test_vwsll_vv_u16m2_m(vbool8_t mask
, vuint8m1_t vs2
, vuint8m1_t vs1
, size_t vl
) {
383 return __riscv_vwsll_vv_u16m2_m(mask
, vs2
, vs1
, vl
);
386 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i16> @test_vwsll_vx_u16m2_m
387 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
388 // CHECK-RV64-NEXT: entry:
389 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i16> @llvm.riscv.vwsll.mask.nxv8i16.nxv8i8.i64.i64(<vscale x 8 x i16> poison, <vscale x 8 x i8> [[VS2]], i64 [[RS1]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3)
390 // CHECK-RV64-NEXT: ret <vscale x 8 x i16> [[TMP0]]
392 vuint16m2_t
test_vwsll_vx_u16m2_m(vbool8_t mask
, vuint8m1_t vs2
, size_t rs1
, size_t vl
) {
393 return __riscv_vwsll_vx_u16m2_m(mask
, vs2
, rs1
, vl
);
396 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i16> @test_vwsll_vv_u16m4_m
397 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[MASK:%.*]], <vscale x 16 x i8> [[VS2:%.*]], <vscale x 16 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
398 // CHECK-RV64-NEXT: entry:
399 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i16> @llvm.riscv.vwsll.mask.nxv16i16.nxv16i8.nxv16i8.i64(<vscale x 16 x i16> poison, <vscale x 16 x i8> [[VS2]], <vscale x 16 x i8> [[VS1]], <vscale x 16 x i1> [[MASK]], i64 [[VL]], i64 3)
400 // CHECK-RV64-NEXT: ret <vscale x 16 x i16> [[TMP0]]
402 vuint16m4_t
test_vwsll_vv_u16m4_m(vbool4_t mask
, vuint8m2_t vs2
, vuint8m2_t vs1
, size_t vl
) {
403 return __riscv_vwsll_vv_u16m4_m(mask
, vs2
, vs1
, vl
);
406 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i16> @test_vwsll_vx_u16m4_m
407 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[MASK:%.*]], <vscale x 16 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
408 // CHECK-RV64-NEXT: entry:
409 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i16> @llvm.riscv.vwsll.mask.nxv16i16.nxv16i8.i64.i64(<vscale x 16 x i16> poison, <vscale x 16 x i8> [[VS2]], i64 [[RS1]], <vscale x 16 x i1> [[MASK]], i64 [[VL]], i64 3)
410 // CHECK-RV64-NEXT: ret <vscale x 16 x i16> [[TMP0]]
412 vuint16m4_t
test_vwsll_vx_u16m4_m(vbool4_t mask
, vuint8m2_t vs2
, size_t rs1
, size_t vl
) {
413 return __riscv_vwsll_vx_u16m4_m(mask
, vs2
, rs1
, vl
);
416 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i16> @test_vwsll_vv_u16m8_m
417 // CHECK-RV64-SAME: (<vscale x 32 x i1> [[MASK:%.*]], <vscale x 32 x i8> [[VS2:%.*]], <vscale x 32 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
418 // CHECK-RV64-NEXT: entry:
419 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i16> @llvm.riscv.vwsll.mask.nxv32i16.nxv32i8.nxv32i8.i64(<vscale x 32 x i16> poison, <vscale x 32 x i8> [[VS2]], <vscale x 32 x i8> [[VS1]], <vscale x 32 x i1> [[MASK]], i64 [[VL]], i64 3)
420 // CHECK-RV64-NEXT: ret <vscale x 32 x i16> [[TMP0]]
422 vuint16m8_t
test_vwsll_vv_u16m8_m(vbool2_t mask
, vuint8m4_t vs2
, vuint8m4_t vs1
, size_t vl
) {
423 return __riscv_vwsll_vv_u16m8_m(mask
, vs2
, vs1
, vl
);
426 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i16> @test_vwsll_vx_u16m8_m
427 // CHECK-RV64-SAME: (<vscale x 32 x i1> [[MASK:%.*]], <vscale x 32 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
428 // CHECK-RV64-NEXT: entry:
429 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i16> @llvm.riscv.vwsll.mask.nxv32i16.nxv32i8.i64.i64(<vscale x 32 x i16> poison, <vscale x 32 x i8> [[VS2]], i64 [[RS1]], <vscale x 32 x i1> [[MASK]], i64 [[VL]], i64 3)
430 // CHECK-RV64-NEXT: ret <vscale x 32 x i16> [[TMP0]]
432 vuint16m8_t
test_vwsll_vx_u16m8_m(vbool2_t mask
, vuint8m4_t vs2
, size_t rs1
, size_t vl
) {
433 return __riscv_vwsll_vx_u16m8_m(mask
, vs2
, rs1
, vl
);
436 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i32> @test_vwsll_vv_u32mf2_m
437 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i16> [[VS2:%.*]], <vscale x 1 x i16> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
438 // CHECK-RV64-NEXT: entry:
439 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i32> @llvm.riscv.vwsll.mask.nxv1i32.nxv1i16.nxv1i16.i64(<vscale x 1 x i32> poison, <vscale x 1 x i16> [[VS2]], <vscale x 1 x i16> [[VS1]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3)
440 // CHECK-RV64-NEXT: ret <vscale x 1 x i32> [[TMP0]]
442 vuint32mf2_t
test_vwsll_vv_u32mf2_m(vbool64_t mask
, vuint16mf4_t vs2
, vuint16mf4_t vs1
, size_t vl
) {
443 return __riscv_vwsll_vv_u32mf2_m(mask
, vs2
, vs1
, vl
);
446 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i32> @test_vwsll_vx_u32mf2_m
447 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i16> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
448 // CHECK-RV64-NEXT: entry:
449 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i32> @llvm.riscv.vwsll.mask.nxv1i32.nxv1i16.i64.i64(<vscale x 1 x i32> poison, <vscale x 1 x i16> [[VS2]], i64 [[RS1]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3)
450 // CHECK-RV64-NEXT: ret <vscale x 1 x i32> [[TMP0]]
452 vuint32mf2_t
test_vwsll_vx_u32mf2_m(vbool64_t mask
, vuint16mf4_t vs2
, size_t rs1
, size_t vl
) {
453 return __riscv_vwsll_vx_u32mf2_m(mask
, vs2
, rs1
, vl
);
456 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vwsll_vv_u32m1_m
457 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i16> [[VS2:%.*]], <vscale x 2 x i16> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
458 // CHECK-RV64-NEXT: entry:
459 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vwsll.mask.nxv2i32.nxv2i16.nxv2i16.i64(<vscale x 2 x i32> poison, <vscale x 2 x i16> [[VS2]], <vscale x 2 x i16> [[VS1]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3)
460 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
462 vuint32m1_t
test_vwsll_vv_u32m1_m(vbool32_t mask
, vuint16mf2_t vs2
, vuint16mf2_t vs1
, size_t vl
) {
463 return __riscv_vwsll_vv_u32m1_m(mask
, vs2
, vs1
, vl
);
466 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vwsll_vx_u32m1_m
467 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i16> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
468 // CHECK-RV64-NEXT: entry:
469 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vwsll.mask.nxv2i32.nxv2i16.i64.i64(<vscale x 2 x i32> poison, <vscale x 2 x i16> [[VS2]], i64 [[RS1]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3)
470 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
472 vuint32m1_t
test_vwsll_vx_u32m1_m(vbool32_t mask
, vuint16mf2_t vs2
, size_t rs1
, size_t vl
) {
473 return __riscv_vwsll_vx_u32m1_m(mask
, vs2
, rs1
, vl
);
476 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i32> @test_vwsll_vv_u32m2_m
477 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i16> [[VS2:%.*]], <vscale x 4 x i16> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
478 // CHECK-RV64-NEXT: entry:
479 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i32> @llvm.riscv.vwsll.mask.nxv4i32.nxv4i16.nxv4i16.i64(<vscale x 4 x i32> poison, <vscale x 4 x i16> [[VS2]], <vscale x 4 x i16> [[VS1]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3)
480 // CHECK-RV64-NEXT: ret <vscale x 4 x i32> [[TMP0]]
482 vuint32m2_t
test_vwsll_vv_u32m2_m(vbool16_t mask
, vuint16m1_t vs2
, vuint16m1_t vs1
, size_t vl
) {
483 return __riscv_vwsll_vv_u32m2_m(mask
, vs2
, vs1
, vl
);
486 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i32> @test_vwsll_vx_u32m2_m
487 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i16> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
488 // CHECK-RV64-NEXT: entry:
489 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i32> @llvm.riscv.vwsll.mask.nxv4i32.nxv4i16.i64.i64(<vscale x 4 x i32> poison, <vscale x 4 x i16> [[VS2]], i64 [[RS1]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3)
490 // CHECK-RV64-NEXT: ret <vscale x 4 x i32> [[TMP0]]
492 vuint32m2_t
test_vwsll_vx_u32m2_m(vbool16_t mask
, vuint16m1_t vs2
, size_t rs1
, size_t vl
) {
493 return __riscv_vwsll_vx_u32m2_m(mask
, vs2
, rs1
, vl
);
496 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i32> @test_vwsll_vv_u32m4_m
497 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i16> [[VS2:%.*]], <vscale x 8 x i16> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
498 // CHECK-RV64-NEXT: entry:
499 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i32> @llvm.riscv.vwsll.mask.nxv8i32.nxv8i16.nxv8i16.i64(<vscale x 8 x i32> poison, <vscale x 8 x i16> [[VS2]], <vscale x 8 x i16> [[VS1]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3)
500 // CHECK-RV64-NEXT: ret <vscale x 8 x i32> [[TMP0]]
502 vuint32m4_t
test_vwsll_vv_u32m4_m(vbool8_t mask
, vuint16m2_t vs2
, vuint16m2_t vs1
, size_t vl
) {
503 return __riscv_vwsll_vv_u32m4_m(mask
, vs2
, vs1
, vl
);
506 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i32> @test_vwsll_vx_u32m4_m
507 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i16> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
508 // CHECK-RV64-NEXT: entry:
509 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i32> @llvm.riscv.vwsll.mask.nxv8i32.nxv8i16.i64.i64(<vscale x 8 x i32> poison, <vscale x 8 x i16> [[VS2]], i64 [[RS1]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3)
510 // CHECK-RV64-NEXT: ret <vscale x 8 x i32> [[TMP0]]
512 vuint32m4_t
test_vwsll_vx_u32m4_m(vbool8_t mask
, vuint16m2_t vs2
, size_t rs1
, size_t vl
) {
513 return __riscv_vwsll_vx_u32m4_m(mask
, vs2
, rs1
, vl
);
516 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i32> @test_vwsll_vv_u32m8_m
517 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[MASK:%.*]], <vscale x 16 x i16> [[VS2:%.*]], <vscale x 16 x i16> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
518 // CHECK-RV64-NEXT: entry:
519 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i32> @llvm.riscv.vwsll.mask.nxv16i32.nxv16i16.nxv16i16.i64(<vscale x 16 x i32> poison, <vscale x 16 x i16> [[VS2]], <vscale x 16 x i16> [[VS1]], <vscale x 16 x i1> [[MASK]], i64 [[VL]], i64 3)
520 // CHECK-RV64-NEXT: ret <vscale x 16 x i32> [[TMP0]]
522 vuint32m8_t
test_vwsll_vv_u32m8_m(vbool4_t mask
, vuint16m4_t vs2
, vuint16m4_t vs1
, size_t vl
) {
523 return __riscv_vwsll_vv_u32m8_m(mask
, vs2
, vs1
, vl
);
526 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i32> @test_vwsll_vx_u32m8_m
527 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[MASK:%.*]], <vscale x 16 x i16> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
528 // CHECK-RV64-NEXT: entry:
529 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i32> @llvm.riscv.vwsll.mask.nxv16i32.nxv16i16.i64.i64(<vscale x 16 x i32> poison, <vscale x 16 x i16> [[VS2]], i64 [[RS1]], <vscale x 16 x i1> [[MASK]], i64 [[VL]], i64 3)
530 // CHECK-RV64-NEXT: ret <vscale x 16 x i32> [[TMP0]]
532 vuint32m8_t
test_vwsll_vx_u32m8_m(vbool4_t mask
, vuint16m4_t vs2
, size_t rs1
, size_t vl
) {
533 return __riscv_vwsll_vx_u32m8_m(mask
, vs2
, rs1
, vl
);
536 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vwsll_vv_u64m1_m
537 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i32> [[VS2:%.*]], <vscale x 1 x i32> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
538 // CHECK-RV64-NEXT: entry:
539 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vwsll.mask.nxv1i64.nxv1i32.nxv1i32.i64(<vscale x 1 x i64> poison, <vscale x 1 x i32> [[VS2]], <vscale x 1 x i32> [[VS1]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3)
540 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
542 vuint64m1_t
test_vwsll_vv_u64m1_m(vbool64_t mask
, vuint32mf2_t vs2
, vuint32mf2_t vs1
, size_t vl
) {
543 return __riscv_vwsll_vv_u64m1_m(mask
, vs2
, vs1
, vl
);
546 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vwsll_vx_u64m1_m
547 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i32> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
548 // CHECK-RV64-NEXT: entry:
549 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vwsll.mask.nxv1i64.nxv1i32.i64.i64(<vscale x 1 x i64> poison, <vscale x 1 x i32> [[VS2]], i64 [[RS1]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3)
550 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
552 vuint64m1_t
test_vwsll_vx_u64m1_m(vbool64_t mask
, vuint32mf2_t vs2
, size_t rs1
, size_t vl
) {
553 return __riscv_vwsll_vx_u64m1_m(mask
, vs2
, rs1
, vl
);
556 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vwsll_vv_u64m2_m
557 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i32> [[VS2:%.*]], <vscale x 2 x i32> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
558 // CHECK-RV64-NEXT: entry:
559 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vwsll.mask.nxv2i64.nxv2i32.nxv2i32.i64(<vscale x 2 x i64> poison, <vscale x 2 x i32> [[VS2]], <vscale x 2 x i32> [[VS1]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3)
560 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
562 vuint64m2_t
test_vwsll_vv_u64m2_m(vbool32_t mask
, vuint32m1_t vs2
, vuint32m1_t vs1
, size_t vl
) {
563 return __riscv_vwsll_vv_u64m2_m(mask
, vs2
, vs1
, vl
);
566 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vwsll_vx_u64m2_m
567 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i32> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
568 // CHECK-RV64-NEXT: entry:
569 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vwsll.mask.nxv2i64.nxv2i32.i64.i64(<vscale x 2 x i64> poison, <vscale x 2 x i32> [[VS2]], i64 [[RS1]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3)
570 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
572 vuint64m2_t
test_vwsll_vx_u64m2_m(vbool32_t mask
, vuint32m1_t vs2
, size_t rs1
, size_t vl
) {
573 return __riscv_vwsll_vx_u64m2_m(mask
, vs2
, rs1
, vl
);
576 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vwsll_vv_u64m4_m
577 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i32> [[VS2:%.*]], <vscale x 4 x i32> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
578 // CHECK-RV64-NEXT: entry:
579 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vwsll.mask.nxv4i64.nxv4i32.nxv4i32.i64(<vscale x 4 x i64> poison, <vscale x 4 x i32> [[VS2]], <vscale x 4 x i32> [[VS1]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3)
580 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
582 vuint64m4_t
test_vwsll_vv_u64m4_m(vbool16_t mask
, vuint32m2_t vs2
, vuint32m2_t vs1
, size_t vl
) {
583 return __riscv_vwsll_vv_u64m4_m(mask
, vs2
, vs1
, vl
);
586 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vwsll_vx_u64m4_m
587 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i32> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
588 // CHECK-RV64-NEXT: entry:
589 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vwsll.mask.nxv4i64.nxv4i32.i64.i64(<vscale x 4 x i64> poison, <vscale x 4 x i32> [[VS2]], i64 [[RS1]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3)
590 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
592 vuint64m4_t
test_vwsll_vx_u64m4_m(vbool16_t mask
, vuint32m2_t vs2
, size_t rs1
, size_t vl
) {
593 return __riscv_vwsll_vx_u64m4_m(mask
, vs2
, rs1
, vl
);
596 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vwsll_vv_u64m8_m
597 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i32> [[VS2:%.*]], <vscale x 8 x i32> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
598 // CHECK-RV64-NEXT: entry:
599 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vwsll.mask.nxv8i64.nxv8i32.nxv8i32.i64(<vscale x 8 x i64> poison, <vscale x 8 x i32> [[VS2]], <vscale x 8 x i32> [[VS1]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3)
600 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
602 vuint64m8_t
test_vwsll_vv_u64m8_m(vbool8_t mask
, vuint32m4_t vs2
, vuint32m4_t vs1
, size_t vl
) {
603 return __riscv_vwsll_vv_u64m8_m(mask
, vs2
, vs1
, vl
);
606 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vwsll_vx_u64m8_m
607 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i32> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
608 // CHECK-RV64-NEXT: entry:
609 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vwsll.mask.nxv8i64.nxv8i32.i64.i64(<vscale x 8 x i64> poison, <vscale x 8 x i32> [[VS2]], i64 [[RS1]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3)
610 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
612 vuint64m8_t
test_vwsll_vx_u64m8_m(vbool8_t mask
, vuint32m4_t vs2
, size_t rs1
, size_t vl
) {
613 return __riscv_vwsll_vx_u64m8_m(mask
, vs2
, rs1
, vl
);