1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2
2 // REQUIRES: riscv-registered-target
3 // RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \
4 // RUN: -target-feature +experimental-zvbb \
5 // RUN: -target-feature +experimental-zvbc \
6 // RUN: -target-feature +experimental-zvkg \
7 // RUN: -target-feature +experimental-zvkned \
8 // RUN: -target-feature +experimental-zvknhb \
9 // RUN: -target-feature +experimental-zvksed \
10 // RUN: -target-feature +experimental-zvksh -disable-O0-optnone \
11 // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \
12 // RUN: FileCheck --check-prefix=CHECK-RV64 %s
14 #include <riscv_vector.h>
16 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vclz_v_u8mf8
17 // CHECK-RV64-SAME: (<vscale x 1 x i8> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
18 // CHECK-RV64-NEXT: entry:
19 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vclz.nxv1i8.i64(<vscale x 1 x i8> poison, <vscale x 1 x i8> [[VS2]], i64 [[VL]])
20 // CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
22 vuint8mf8_t
test_vclz_v_u8mf8(vuint8mf8_t vs2
, size_t vl
) {
23 return __riscv_vclz_v_u8mf8(vs2
, vl
);
26 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vclz_v_u8mf4
27 // CHECK-RV64-SAME: (<vscale x 2 x i8> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
28 // CHECK-RV64-NEXT: entry:
29 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vclz.nxv2i8.i64(<vscale x 2 x i8> poison, <vscale x 2 x i8> [[VS2]], i64 [[VL]])
30 // CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
32 vuint8mf4_t
test_vclz_v_u8mf4(vuint8mf4_t vs2
, size_t vl
) {
33 return __riscv_vclz_v_u8mf4(vs2
, vl
);
36 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vclz_v_u8mf2
37 // CHECK-RV64-SAME: (<vscale x 4 x i8> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
38 // CHECK-RV64-NEXT: entry:
39 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vclz.nxv4i8.i64(<vscale x 4 x i8> poison, <vscale x 4 x i8> [[VS2]], i64 [[VL]])
40 // CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
42 vuint8mf2_t
test_vclz_v_u8mf2(vuint8mf2_t vs2
, size_t vl
) {
43 return __riscv_vclz_v_u8mf2(vs2
, vl
);
46 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vclz_v_u8m1
47 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
48 // CHECK-RV64-NEXT: entry:
49 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vclz.nxv8i8.i64(<vscale x 8 x i8> poison, <vscale x 8 x i8> [[VS2]], i64 [[VL]])
50 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
52 vuint8m1_t
test_vclz_v_u8m1(vuint8m1_t vs2
, size_t vl
) {
53 return __riscv_vclz_v_u8m1(vs2
, vl
);
56 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vclz_v_u8m2
57 // CHECK-RV64-SAME: (<vscale x 16 x i8> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
58 // CHECK-RV64-NEXT: entry:
59 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vclz.nxv16i8.i64(<vscale x 16 x i8> poison, <vscale x 16 x i8> [[VS2]], i64 [[VL]])
60 // CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
62 vuint8m2_t
test_vclz_v_u8m2(vuint8m2_t vs2
, size_t vl
) {
63 return __riscv_vclz_v_u8m2(vs2
, vl
);
66 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vclz_v_u8m4
67 // CHECK-RV64-SAME: (<vscale x 32 x i8> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
68 // CHECK-RV64-NEXT: entry:
69 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.vclz.nxv32i8.i64(<vscale x 32 x i8> poison, <vscale x 32 x i8> [[VS2]], i64 [[VL]])
70 // CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
72 vuint8m4_t
test_vclz_v_u8m4(vuint8m4_t vs2
, size_t vl
) {
73 return __riscv_vclz_v_u8m4(vs2
, vl
);
76 // CHECK-RV64-LABEL: define dso_local <vscale x 64 x i8> @test_vclz_v_u8m8
77 // CHECK-RV64-SAME: (<vscale x 64 x i8> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
78 // CHECK-RV64-NEXT: entry:
79 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i8> @llvm.riscv.vclz.nxv64i8.i64(<vscale x 64 x i8> poison, <vscale x 64 x i8> [[VS2]], i64 [[VL]])
80 // CHECK-RV64-NEXT: ret <vscale x 64 x i8> [[TMP0]]
82 vuint8m8_t
test_vclz_v_u8m8(vuint8m8_t vs2
, size_t vl
) {
83 return __riscv_vclz_v_u8m8(vs2
, vl
);
86 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i16> @test_vclz_v_u16mf4
87 // CHECK-RV64-SAME: (<vscale x 1 x i16> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
88 // CHECK-RV64-NEXT: entry:
89 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i16> @llvm.riscv.vclz.nxv1i16.i64(<vscale x 1 x i16> poison, <vscale x 1 x i16> [[VS2]], i64 [[VL]])
90 // CHECK-RV64-NEXT: ret <vscale x 1 x i16> [[TMP0]]
92 vuint16mf4_t
test_vclz_v_u16mf4(vuint16mf4_t vs2
, size_t vl
) {
93 return __riscv_vclz_v_u16mf4(vs2
, vl
);
96 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i16> @test_vclz_v_u16mf2
97 // CHECK-RV64-SAME: (<vscale x 2 x i16> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
98 // CHECK-RV64-NEXT: entry:
99 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i16> @llvm.riscv.vclz.nxv2i16.i64(<vscale x 2 x i16> poison, <vscale x 2 x i16> [[VS2]], i64 [[VL]])
100 // CHECK-RV64-NEXT: ret <vscale x 2 x i16> [[TMP0]]
102 vuint16mf2_t
test_vclz_v_u16mf2(vuint16mf2_t vs2
, size_t vl
) {
103 return __riscv_vclz_v_u16mf2(vs2
, vl
);
106 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vclz_v_u16m1
107 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
108 // CHECK-RV64-NEXT: entry:
109 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vclz.nxv4i16.i64(<vscale x 4 x i16> poison, <vscale x 4 x i16> [[VS2]], i64 [[VL]])
110 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
112 vuint16m1_t
test_vclz_v_u16m1(vuint16m1_t vs2
, size_t vl
) {
113 return __riscv_vclz_v_u16m1(vs2
, vl
);
116 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i16> @test_vclz_v_u16m2
117 // CHECK-RV64-SAME: (<vscale x 8 x i16> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
118 // CHECK-RV64-NEXT: entry:
119 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i16> @llvm.riscv.vclz.nxv8i16.i64(<vscale x 8 x i16> poison, <vscale x 8 x i16> [[VS2]], i64 [[VL]])
120 // CHECK-RV64-NEXT: ret <vscale x 8 x i16> [[TMP0]]
122 vuint16m2_t
test_vclz_v_u16m2(vuint16m2_t vs2
, size_t vl
) {
123 return __riscv_vclz_v_u16m2(vs2
, vl
);
126 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i16> @test_vclz_v_u16m4
127 // CHECK-RV64-SAME: (<vscale x 16 x i16> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
128 // CHECK-RV64-NEXT: entry:
129 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i16> @llvm.riscv.vclz.nxv16i16.i64(<vscale x 16 x i16> poison, <vscale x 16 x i16> [[VS2]], i64 [[VL]])
130 // CHECK-RV64-NEXT: ret <vscale x 16 x i16> [[TMP0]]
132 vuint16m4_t
test_vclz_v_u16m4(vuint16m4_t vs2
, size_t vl
) {
133 return __riscv_vclz_v_u16m4(vs2
, vl
);
136 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i16> @test_vclz_v_u16m8
137 // CHECK-RV64-SAME: (<vscale x 32 x i16> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
138 // CHECK-RV64-NEXT: entry:
139 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i16> @llvm.riscv.vclz.nxv32i16.i64(<vscale x 32 x i16> poison, <vscale x 32 x i16> [[VS2]], i64 [[VL]])
140 // CHECK-RV64-NEXT: ret <vscale x 32 x i16> [[TMP0]]
142 vuint16m8_t
test_vclz_v_u16m8(vuint16m8_t vs2
, size_t vl
) {
143 return __riscv_vclz_v_u16m8(vs2
, vl
);
146 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i32> @test_vclz_v_u32mf2
147 // CHECK-RV64-SAME: (<vscale x 1 x i32> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
148 // CHECK-RV64-NEXT: entry:
149 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i32> @llvm.riscv.vclz.nxv1i32.i64(<vscale x 1 x i32> poison, <vscale x 1 x i32> [[VS2]], i64 [[VL]])
150 // CHECK-RV64-NEXT: ret <vscale x 1 x i32> [[TMP0]]
152 vuint32mf2_t
test_vclz_v_u32mf2(vuint32mf2_t vs2
, size_t vl
) {
153 return __riscv_vclz_v_u32mf2(vs2
, vl
);
156 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vclz_v_u32m1
157 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
158 // CHECK-RV64-NEXT: entry:
159 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vclz.nxv2i32.i64(<vscale x 2 x i32> poison, <vscale x 2 x i32> [[VS2]], i64 [[VL]])
160 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
162 vuint32m1_t
test_vclz_v_u32m1(vuint32m1_t vs2
, size_t vl
) {
163 return __riscv_vclz_v_u32m1(vs2
, vl
);
166 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i32> @test_vclz_v_u32m2
167 // CHECK-RV64-SAME: (<vscale x 4 x i32> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
168 // CHECK-RV64-NEXT: entry:
169 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i32> @llvm.riscv.vclz.nxv4i32.i64(<vscale x 4 x i32> poison, <vscale x 4 x i32> [[VS2]], i64 [[VL]])
170 // CHECK-RV64-NEXT: ret <vscale x 4 x i32> [[TMP0]]
172 vuint32m2_t
test_vclz_v_u32m2(vuint32m2_t vs2
, size_t vl
) {
173 return __riscv_vclz_v_u32m2(vs2
, vl
);
176 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i32> @test_vclz_v_u32m4
177 // CHECK-RV64-SAME: (<vscale x 8 x i32> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
178 // CHECK-RV64-NEXT: entry:
179 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i32> @llvm.riscv.vclz.nxv8i32.i64(<vscale x 8 x i32> poison, <vscale x 8 x i32> [[VS2]], i64 [[VL]])
180 // CHECK-RV64-NEXT: ret <vscale x 8 x i32> [[TMP0]]
182 vuint32m4_t
test_vclz_v_u32m4(vuint32m4_t vs2
, size_t vl
) {
183 return __riscv_vclz_v_u32m4(vs2
, vl
);
186 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i32> @test_vclz_v_u32m8
187 // CHECK-RV64-SAME: (<vscale x 16 x i32> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
188 // CHECK-RV64-NEXT: entry:
189 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i32> @llvm.riscv.vclz.nxv16i32.i64(<vscale x 16 x i32> poison, <vscale x 16 x i32> [[VS2]], i64 [[VL]])
190 // CHECK-RV64-NEXT: ret <vscale x 16 x i32> [[TMP0]]
192 vuint32m8_t
test_vclz_v_u32m8(vuint32m8_t vs2
, size_t vl
) {
193 return __riscv_vclz_v_u32m8(vs2
, vl
);
196 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vclz_v_u64m1
197 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
198 // CHECK-RV64-NEXT: entry:
199 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vclz.nxv1i64.i64(<vscale x 1 x i64> poison, <vscale x 1 x i64> [[VS2]], i64 [[VL]])
200 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
202 vuint64m1_t
test_vclz_v_u64m1(vuint64m1_t vs2
, size_t vl
) {
203 return __riscv_vclz_v_u64m1(vs2
, vl
);
206 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vclz_v_u64m2
207 // CHECK-RV64-SAME: (<vscale x 2 x i64> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
208 // CHECK-RV64-NEXT: entry:
209 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vclz.nxv2i64.i64(<vscale x 2 x i64> poison, <vscale x 2 x i64> [[VS2]], i64 [[VL]])
210 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
212 vuint64m2_t
test_vclz_v_u64m2(vuint64m2_t vs2
, size_t vl
) {
213 return __riscv_vclz_v_u64m2(vs2
, vl
);
216 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vclz_v_u64m4
217 // CHECK-RV64-SAME: (<vscale x 4 x i64> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
218 // CHECK-RV64-NEXT: entry:
219 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vclz.nxv4i64.i64(<vscale x 4 x i64> poison, <vscale x 4 x i64> [[VS2]], i64 [[VL]])
220 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
222 vuint64m4_t
test_vclz_v_u64m4(vuint64m4_t vs2
, size_t vl
) {
223 return __riscv_vclz_v_u64m4(vs2
, vl
);
226 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vclz_v_u64m8
227 // CHECK-RV64-SAME: (<vscale x 8 x i64> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
228 // CHECK-RV64-NEXT: entry:
229 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vclz.nxv8i64.i64(<vscale x 8 x i64> poison, <vscale x 8 x i64> [[VS2]], i64 [[VL]])
230 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
232 vuint64m8_t
test_vclz_v_u64m8(vuint64m8_t vs2
, size_t vl
) {
233 return __riscv_vclz_v_u64m8(vs2
, vl
);
236 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vclz_v_u8mf8_m
237 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i8> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
238 // CHECK-RV64-NEXT: entry:
239 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vclz.mask.nxv1i8.i64(<vscale x 1 x i8> poison, <vscale x 1 x i8> [[VS2]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3)
240 // CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
242 vuint8mf8_t
test_vclz_v_u8mf8_m(vbool64_t mask
, vuint8mf8_t vs2
, size_t vl
) {
243 return __riscv_vclz_v_u8mf8_m(mask
, vs2
, vl
);
246 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vclz_v_u8mf4_m
247 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i8> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
248 // CHECK-RV64-NEXT: entry:
249 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vclz.mask.nxv2i8.i64(<vscale x 2 x i8> poison, <vscale x 2 x i8> [[VS2]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3)
250 // CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
252 vuint8mf4_t
test_vclz_v_u8mf4_m(vbool32_t mask
, vuint8mf4_t vs2
, size_t vl
) {
253 return __riscv_vclz_v_u8mf4_m(mask
, vs2
, vl
);
256 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vclz_v_u8mf2_m
257 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i8> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
258 // CHECK-RV64-NEXT: entry:
259 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vclz.mask.nxv4i8.i64(<vscale x 4 x i8> poison, <vscale x 4 x i8> [[VS2]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3)
260 // CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
262 vuint8mf2_t
test_vclz_v_u8mf2_m(vbool16_t mask
, vuint8mf2_t vs2
, size_t vl
) {
263 return __riscv_vclz_v_u8mf2_m(mask
, vs2
, vl
);
266 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vclz_v_u8m1_m
267 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i8> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
268 // CHECK-RV64-NEXT: entry:
269 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vclz.mask.nxv8i8.i64(<vscale x 8 x i8> poison, <vscale x 8 x i8> [[VS2]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3)
270 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
272 vuint8m1_t
test_vclz_v_u8m1_m(vbool8_t mask
, vuint8m1_t vs2
, size_t vl
) {
273 return __riscv_vclz_v_u8m1_m(mask
, vs2
, vl
);
276 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vclz_v_u8m2_m
277 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[MASK:%.*]], <vscale x 16 x i8> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
278 // CHECK-RV64-NEXT: entry:
279 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vclz.mask.nxv16i8.i64(<vscale x 16 x i8> poison, <vscale x 16 x i8> [[VS2]], <vscale x 16 x i1> [[MASK]], i64 [[VL]], i64 3)
280 // CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
282 vuint8m2_t
test_vclz_v_u8m2_m(vbool4_t mask
, vuint8m2_t vs2
, size_t vl
) {
283 return __riscv_vclz_v_u8m2_m(mask
, vs2
, vl
);
286 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vclz_v_u8m4_m
287 // CHECK-RV64-SAME: (<vscale x 32 x i1> [[MASK:%.*]], <vscale x 32 x i8> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
288 // CHECK-RV64-NEXT: entry:
289 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.vclz.mask.nxv32i8.i64(<vscale x 32 x i8> poison, <vscale x 32 x i8> [[VS2]], <vscale x 32 x i1> [[MASK]], i64 [[VL]], i64 3)
290 // CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
292 vuint8m4_t
test_vclz_v_u8m4_m(vbool2_t mask
, vuint8m4_t vs2
, size_t vl
) {
293 return __riscv_vclz_v_u8m4_m(mask
, vs2
, vl
);
296 // CHECK-RV64-LABEL: define dso_local <vscale x 64 x i8> @test_vclz_v_u8m8_m
297 // CHECK-RV64-SAME: (<vscale x 64 x i1> [[MASK:%.*]], <vscale x 64 x i8> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
298 // CHECK-RV64-NEXT: entry:
299 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i8> @llvm.riscv.vclz.mask.nxv64i8.i64(<vscale x 64 x i8> poison, <vscale x 64 x i8> [[VS2]], <vscale x 64 x i1> [[MASK]], i64 [[VL]], i64 3)
300 // CHECK-RV64-NEXT: ret <vscale x 64 x i8> [[TMP0]]
302 vuint8m8_t
test_vclz_v_u8m8_m(vbool1_t mask
, vuint8m8_t vs2
, size_t vl
) {
303 return __riscv_vclz_v_u8m8_m(mask
, vs2
, vl
);
306 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i16> @test_vclz_v_u16mf4_m
307 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i16> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
308 // CHECK-RV64-NEXT: entry:
309 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i16> @llvm.riscv.vclz.mask.nxv1i16.i64(<vscale x 1 x i16> poison, <vscale x 1 x i16> [[VS2]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3)
310 // CHECK-RV64-NEXT: ret <vscale x 1 x i16> [[TMP0]]
312 vuint16mf4_t
test_vclz_v_u16mf4_m(vbool64_t mask
, vuint16mf4_t vs2
, size_t vl
) {
313 return __riscv_vclz_v_u16mf4_m(mask
, vs2
, vl
);
316 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i16> @test_vclz_v_u16mf2_m
317 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i16> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
318 // CHECK-RV64-NEXT: entry:
319 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i16> @llvm.riscv.vclz.mask.nxv2i16.i64(<vscale x 2 x i16> poison, <vscale x 2 x i16> [[VS2]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3)
320 // CHECK-RV64-NEXT: ret <vscale x 2 x i16> [[TMP0]]
322 vuint16mf2_t
test_vclz_v_u16mf2_m(vbool32_t mask
, vuint16mf2_t vs2
, size_t vl
) {
323 return __riscv_vclz_v_u16mf2_m(mask
, vs2
, vl
);
326 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vclz_v_u16m1_m
327 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i16> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
328 // CHECK-RV64-NEXT: entry:
329 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vclz.mask.nxv4i16.i64(<vscale x 4 x i16> poison, <vscale x 4 x i16> [[VS2]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3)
330 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
332 vuint16m1_t
test_vclz_v_u16m1_m(vbool16_t mask
, vuint16m1_t vs2
, size_t vl
) {
333 return __riscv_vclz_v_u16m1_m(mask
, vs2
, vl
);
336 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i16> @test_vclz_v_u16m2_m
337 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i16> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
338 // CHECK-RV64-NEXT: entry:
339 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i16> @llvm.riscv.vclz.mask.nxv8i16.i64(<vscale x 8 x i16> poison, <vscale x 8 x i16> [[VS2]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3)
340 // CHECK-RV64-NEXT: ret <vscale x 8 x i16> [[TMP0]]
342 vuint16m2_t
test_vclz_v_u16m2_m(vbool8_t mask
, vuint16m2_t vs2
, size_t vl
) {
343 return __riscv_vclz_v_u16m2_m(mask
, vs2
, vl
);
346 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i16> @test_vclz_v_u16m4_m
347 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[MASK:%.*]], <vscale x 16 x i16> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
348 // CHECK-RV64-NEXT: entry:
349 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i16> @llvm.riscv.vclz.mask.nxv16i16.i64(<vscale x 16 x i16> poison, <vscale x 16 x i16> [[VS2]], <vscale x 16 x i1> [[MASK]], i64 [[VL]], i64 3)
350 // CHECK-RV64-NEXT: ret <vscale x 16 x i16> [[TMP0]]
352 vuint16m4_t
test_vclz_v_u16m4_m(vbool4_t mask
, vuint16m4_t vs2
, size_t vl
) {
353 return __riscv_vclz_v_u16m4_m(mask
, vs2
, vl
);
356 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i16> @test_vclz_v_u16m8_m
357 // CHECK-RV64-SAME: (<vscale x 32 x i1> [[MASK:%.*]], <vscale x 32 x i16> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
358 // CHECK-RV64-NEXT: entry:
359 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i16> @llvm.riscv.vclz.mask.nxv32i16.i64(<vscale x 32 x i16> poison, <vscale x 32 x i16> [[VS2]], <vscale x 32 x i1> [[MASK]], i64 [[VL]], i64 3)
360 // CHECK-RV64-NEXT: ret <vscale x 32 x i16> [[TMP0]]
362 vuint16m8_t
test_vclz_v_u16m8_m(vbool2_t mask
, vuint16m8_t vs2
, size_t vl
) {
363 return __riscv_vclz_v_u16m8_m(mask
, vs2
, vl
);
366 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i32> @test_vclz_v_u32mf2_m
367 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i32> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
368 // CHECK-RV64-NEXT: entry:
369 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i32> @llvm.riscv.vclz.mask.nxv1i32.i64(<vscale x 1 x i32> poison, <vscale x 1 x i32> [[VS2]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3)
370 // CHECK-RV64-NEXT: ret <vscale x 1 x i32> [[TMP0]]
372 vuint32mf2_t
test_vclz_v_u32mf2_m(vbool64_t mask
, vuint32mf2_t vs2
, size_t vl
) {
373 return __riscv_vclz_v_u32mf2_m(mask
, vs2
, vl
);
376 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vclz_v_u32m1_m
377 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i32> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
378 // CHECK-RV64-NEXT: entry:
379 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vclz.mask.nxv2i32.i64(<vscale x 2 x i32> poison, <vscale x 2 x i32> [[VS2]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3)
380 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
382 vuint32m1_t
test_vclz_v_u32m1_m(vbool32_t mask
, vuint32m1_t vs2
, size_t vl
) {
383 return __riscv_vclz_v_u32m1_m(mask
, vs2
, vl
);
386 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i32> @test_vclz_v_u32m2_m
387 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i32> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
388 // CHECK-RV64-NEXT: entry:
389 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i32> @llvm.riscv.vclz.mask.nxv4i32.i64(<vscale x 4 x i32> poison, <vscale x 4 x i32> [[VS2]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3)
390 // CHECK-RV64-NEXT: ret <vscale x 4 x i32> [[TMP0]]
392 vuint32m2_t
test_vclz_v_u32m2_m(vbool16_t mask
, vuint32m2_t vs2
, size_t vl
) {
393 return __riscv_vclz_v_u32m2_m(mask
, vs2
, vl
);
396 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i32> @test_vclz_v_u32m4_m
397 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i32> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
398 // CHECK-RV64-NEXT: entry:
399 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i32> @llvm.riscv.vclz.mask.nxv8i32.i64(<vscale x 8 x i32> poison, <vscale x 8 x i32> [[VS2]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3)
400 // CHECK-RV64-NEXT: ret <vscale x 8 x i32> [[TMP0]]
402 vuint32m4_t
test_vclz_v_u32m4_m(vbool8_t mask
, vuint32m4_t vs2
, size_t vl
) {
403 return __riscv_vclz_v_u32m4_m(mask
, vs2
, vl
);
406 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i32> @test_vclz_v_u32m8_m
407 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[MASK:%.*]], <vscale x 16 x i32> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
408 // CHECK-RV64-NEXT: entry:
409 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i32> @llvm.riscv.vclz.mask.nxv16i32.i64(<vscale x 16 x i32> poison, <vscale x 16 x i32> [[VS2]], <vscale x 16 x i1> [[MASK]], i64 [[VL]], i64 3)
410 // CHECK-RV64-NEXT: ret <vscale x 16 x i32> [[TMP0]]
412 vuint32m8_t
test_vclz_v_u32m8_m(vbool4_t mask
, vuint32m8_t vs2
, size_t vl
) {
413 return __riscv_vclz_v_u32m8_m(mask
, vs2
, vl
);
416 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vclz_v_u64m1_m
417 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
418 // CHECK-RV64-NEXT: entry:
419 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vclz.mask.nxv1i64.i64(<vscale x 1 x i64> poison, <vscale x 1 x i64> [[VS2]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3)
420 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
422 vuint64m1_t
test_vclz_v_u64m1_m(vbool64_t mask
, vuint64m1_t vs2
, size_t vl
) {
423 return __riscv_vclz_v_u64m1_m(mask
, vs2
, vl
);
426 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vclz_v_u64m2_m
427 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
428 // CHECK-RV64-NEXT: entry:
429 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vclz.mask.nxv2i64.i64(<vscale x 2 x i64> poison, <vscale x 2 x i64> [[VS2]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3)
430 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
432 vuint64m2_t
test_vclz_v_u64m2_m(vbool32_t mask
, vuint64m2_t vs2
, size_t vl
) {
433 return __riscv_vclz_v_u64m2_m(mask
, vs2
, vl
);
436 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vclz_v_u64m4_m
437 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
438 // CHECK-RV64-NEXT: entry:
439 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vclz.mask.nxv4i64.i64(<vscale x 4 x i64> poison, <vscale x 4 x i64> [[VS2]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3)
440 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
442 vuint64m4_t
test_vclz_v_u64m4_m(vbool16_t mask
, vuint64m4_t vs2
, size_t vl
) {
443 return __riscv_vclz_v_u64m4_m(mask
, vs2
, vl
);
446 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vclz_v_u64m8_m
447 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i64> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
448 // CHECK-RV64-NEXT: entry:
449 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vclz.mask.nxv8i64.i64(<vscale x 8 x i64> poison, <vscale x 8 x i64> [[VS2]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3)
450 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
452 vuint64m8_t
test_vclz_v_u64m8_m(vbool8_t mask
, vuint64m8_t vs2
, size_t vl
) {
453 return __riscv_vclz_v_u64m8_m(mask
, vs2
, vl
);