Run DCE after a LoopFlatten test to reduce spurious output [nfc]
[llvm-project.git] / clang / test / CodeGen / RISCV / rvv-intrinsics-autogenerated / policy / overloaded / vclmul.c
blob6aa9d6bf7f62fae67dc24ed81b1da719012e7c7c
1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2
2 // REQUIRES: riscv-registered-target
3 // RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \
4 // RUN: -target-feature +experimental-zvbb \
5 // RUN: -target-feature +experimental-zvbc \
6 // RUN: -target-feature +experimental-zvkg \
7 // RUN: -target-feature +experimental-zvkned \
8 // RUN: -target-feature +experimental-zvknhb \
9 // RUN: -target-feature +experimental-zvksed \
10 // RUN: -target-feature +experimental-zvksh -disable-O0-optnone \
11 // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \
12 // RUN: FileCheck --check-prefix=CHECK-RV64 %s
14 #include <riscv_vector.h>
16 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vclmul_vv_u64m1_tu
17 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[MASKEDOFF:%.*]], <vscale x 1 x i64> [[VS2:%.*]], <vscale x 1 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
18 // CHECK-RV64-NEXT: entry:
19 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vclmul.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], <vscale x 1 x i64> [[VS2]], <vscale x 1 x i64> [[VS1]], i64 [[VL]])
20 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
22 vuint64m1_t test_vclmul_vv_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) {
23 return __riscv_vclmul_tu(maskedoff, vs2, vs1, vl);
26 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vclmul_vx_u64m1_tu
27 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[MASKEDOFF:%.*]], <vscale x 1 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
28 // CHECK-RV64-NEXT: entry:
29 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vclmul.nxv1i64.i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], <vscale x 1 x i64> [[VS2]], i64 [[RS1]], i64 [[VL]])
30 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
32 vuint64m1_t test_vclmul_vx_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) {
33 return __riscv_vclmul_tu(maskedoff, vs2, rs1, vl);
36 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vclmul_vv_u64m2_tu
37 // CHECK-RV64-SAME: (<vscale x 2 x i64> [[MASKEDOFF:%.*]], <vscale x 2 x i64> [[VS2:%.*]], <vscale x 2 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
38 // CHECK-RV64-NEXT: entry:
39 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vclmul.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], <vscale x 2 x i64> [[VS2]], <vscale x 2 x i64> [[VS1]], i64 [[VL]])
40 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
42 vuint64m2_t test_vclmul_vv_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) {
43 return __riscv_vclmul_tu(maskedoff, vs2, vs1, vl);
46 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vclmul_vx_u64m2_tu
47 // CHECK-RV64-SAME: (<vscale x 2 x i64> [[MASKEDOFF:%.*]], <vscale x 2 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
48 // CHECK-RV64-NEXT: entry:
49 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vclmul.nxv2i64.i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], <vscale x 2 x i64> [[VS2]], i64 [[RS1]], i64 [[VL]])
50 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
52 vuint64m2_t test_vclmul_vx_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) {
53 return __riscv_vclmul_tu(maskedoff, vs2, rs1, vl);
56 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vclmul_vv_u64m4_tu
57 // CHECK-RV64-SAME: (<vscale x 4 x i64> [[MASKEDOFF:%.*]], <vscale x 4 x i64> [[VS2:%.*]], <vscale x 4 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
58 // CHECK-RV64-NEXT: entry:
59 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vclmul.nxv4i64.nxv4i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], <vscale x 4 x i64> [[VS2]], <vscale x 4 x i64> [[VS1]], i64 [[VL]])
60 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
62 vuint64m4_t test_vclmul_vv_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) {
63 return __riscv_vclmul_tu(maskedoff, vs2, vs1, vl);
66 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vclmul_vx_u64m4_tu
67 // CHECK-RV64-SAME: (<vscale x 4 x i64> [[MASKEDOFF:%.*]], <vscale x 4 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
68 // CHECK-RV64-NEXT: entry:
69 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vclmul.nxv4i64.i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], <vscale x 4 x i64> [[VS2]], i64 [[RS1]], i64 [[VL]])
70 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
72 vuint64m4_t test_vclmul_vx_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) {
73 return __riscv_vclmul_tu(maskedoff, vs2, rs1, vl);
76 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vclmul_vv_u64m8_tu
77 // CHECK-RV64-SAME: (<vscale x 8 x i64> [[MASKEDOFF:%.*]], <vscale x 8 x i64> [[VS2:%.*]], <vscale x 8 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
78 // CHECK-RV64-NEXT: entry:
79 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vclmul.nxv8i64.nxv8i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], <vscale x 8 x i64> [[VS2]], <vscale x 8 x i64> [[VS1]], i64 [[VL]])
80 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
82 vuint64m8_t test_vclmul_vv_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) {
83 return __riscv_vclmul_tu(maskedoff, vs2, vs1, vl);
86 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vclmul_vx_u64m8_tu
87 // CHECK-RV64-SAME: (<vscale x 8 x i64> [[MASKEDOFF:%.*]], <vscale x 8 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
88 // CHECK-RV64-NEXT: entry:
89 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vclmul.nxv8i64.i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], <vscale x 8 x i64> [[VS2]], i64 [[RS1]], i64 [[VL]])
90 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
92 vuint64m8_t test_vclmul_vx_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) {
93 return __riscv_vclmul_tu(maskedoff, vs2, rs1, vl);
96 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vclmul_vv_u64m1_tum
97 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF:%.*]], <vscale x 1 x i64> [[VS2:%.*]], <vscale x 1 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
98 // CHECK-RV64-NEXT: entry:
99 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vclmul.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], <vscale x 1 x i64> [[VS2]], <vscale x 1 x i64> [[VS1]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 2)
100 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
102 vuint64m1_t test_vclmul_vv_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) {
103 return __riscv_vclmul_tum(mask, maskedoff, vs2, vs1, vl);
106 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vclmul_vx_u64m1_tum
107 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF:%.*]], <vscale x 1 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
108 // CHECK-RV64-NEXT: entry:
109 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vclmul.mask.nxv1i64.i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], <vscale x 1 x i64> [[VS2]], i64 [[RS1]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 2)
110 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
112 vuint64m1_t test_vclmul_vx_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) {
113 return __riscv_vclmul_tum(mask, maskedoff, vs2, rs1, vl);
116 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vclmul_vv_u64m2_tum
117 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF:%.*]], <vscale x 2 x i64> [[VS2:%.*]], <vscale x 2 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
118 // CHECK-RV64-NEXT: entry:
119 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vclmul.mask.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], <vscale x 2 x i64> [[VS2]], <vscale x 2 x i64> [[VS1]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 2)
120 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
122 vuint64m2_t test_vclmul_vv_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) {
123 return __riscv_vclmul_tum(mask, maskedoff, vs2, vs1, vl);
126 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vclmul_vx_u64m2_tum
127 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF:%.*]], <vscale x 2 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
128 // CHECK-RV64-NEXT: entry:
129 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vclmul.mask.nxv2i64.i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], <vscale x 2 x i64> [[VS2]], i64 [[RS1]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 2)
130 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
132 vuint64m2_t test_vclmul_vx_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) {
133 return __riscv_vclmul_tum(mask, maskedoff, vs2, rs1, vl);
136 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vclmul_vv_u64m4_tum
137 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF:%.*]], <vscale x 4 x i64> [[VS2:%.*]], <vscale x 4 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
138 // CHECK-RV64-NEXT: entry:
139 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vclmul.mask.nxv4i64.nxv4i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], <vscale x 4 x i64> [[VS2]], <vscale x 4 x i64> [[VS1]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 2)
140 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
142 vuint64m4_t test_vclmul_vv_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) {
143 return __riscv_vclmul_tum(mask, maskedoff, vs2, vs1, vl);
146 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vclmul_vx_u64m4_tum
147 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF:%.*]], <vscale x 4 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
148 // CHECK-RV64-NEXT: entry:
149 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vclmul.mask.nxv4i64.i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], <vscale x 4 x i64> [[VS2]], i64 [[RS1]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 2)
150 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
152 vuint64m4_t test_vclmul_vx_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) {
153 return __riscv_vclmul_tum(mask, maskedoff, vs2, rs1, vl);
156 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vclmul_vv_u64m8_tum
157 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i64> [[MASKEDOFF:%.*]], <vscale x 8 x i64> [[VS2:%.*]], <vscale x 8 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
158 // CHECK-RV64-NEXT: entry:
159 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vclmul.mask.nxv8i64.nxv8i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], <vscale x 8 x i64> [[VS2]], <vscale x 8 x i64> [[VS1]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 2)
160 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
162 vuint64m8_t test_vclmul_vv_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) {
163 return __riscv_vclmul_tum(mask, maskedoff, vs2, vs1, vl);
166 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vclmul_vx_u64m8_tum
167 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i64> [[MASKEDOFF:%.*]], <vscale x 8 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
168 // CHECK-RV64-NEXT: entry:
169 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vclmul.mask.nxv8i64.i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], <vscale x 8 x i64> [[VS2]], i64 [[RS1]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 2)
170 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
172 vuint64m8_t test_vclmul_vx_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) {
173 return __riscv_vclmul_tum(mask, maskedoff, vs2, rs1, vl);
176 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vclmul_vv_u64m1_tumu
177 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF:%.*]], <vscale x 1 x i64> [[VS2:%.*]], <vscale x 1 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
178 // CHECK-RV64-NEXT: entry:
179 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vclmul.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], <vscale x 1 x i64> [[VS2]], <vscale x 1 x i64> [[VS1]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 0)
180 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
182 vuint64m1_t test_vclmul_vv_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) {
183 return __riscv_vclmul_tumu(mask, maskedoff, vs2, vs1, vl);
186 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vclmul_vx_u64m1_tumu
187 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF:%.*]], <vscale x 1 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
188 // CHECK-RV64-NEXT: entry:
189 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vclmul.mask.nxv1i64.i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], <vscale x 1 x i64> [[VS2]], i64 [[RS1]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 0)
190 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
192 vuint64m1_t test_vclmul_vx_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) {
193 return __riscv_vclmul_tumu(mask, maskedoff, vs2, rs1, vl);
196 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vclmul_vv_u64m2_tumu
197 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF:%.*]], <vscale x 2 x i64> [[VS2:%.*]], <vscale x 2 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
198 // CHECK-RV64-NEXT: entry:
199 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vclmul.mask.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], <vscale x 2 x i64> [[VS2]], <vscale x 2 x i64> [[VS1]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 0)
200 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
202 vuint64m2_t test_vclmul_vv_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) {
203 return __riscv_vclmul_tumu(mask, maskedoff, vs2, vs1, vl);
206 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vclmul_vx_u64m2_tumu
207 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF:%.*]], <vscale x 2 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
208 // CHECK-RV64-NEXT: entry:
209 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vclmul.mask.nxv2i64.i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], <vscale x 2 x i64> [[VS2]], i64 [[RS1]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 0)
210 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
212 vuint64m2_t test_vclmul_vx_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) {
213 return __riscv_vclmul_tumu(mask, maskedoff, vs2, rs1, vl);
216 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vclmul_vv_u64m4_tumu
217 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF:%.*]], <vscale x 4 x i64> [[VS2:%.*]], <vscale x 4 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
218 // CHECK-RV64-NEXT: entry:
219 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vclmul.mask.nxv4i64.nxv4i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], <vscale x 4 x i64> [[VS2]], <vscale x 4 x i64> [[VS1]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 0)
220 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
222 vuint64m4_t test_vclmul_vv_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) {
223 return __riscv_vclmul_tumu(mask, maskedoff, vs2, vs1, vl);
226 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vclmul_vx_u64m4_tumu
227 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF:%.*]], <vscale x 4 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
228 // CHECK-RV64-NEXT: entry:
229 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vclmul.mask.nxv4i64.i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], <vscale x 4 x i64> [[VS2]], i64 [[RS1]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 0)
230 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
232 vuint64m4_t test_vclmul_vx_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) {
233 return __riscv_vclmul_tumu(mask, maskedoff, vs2, rs1, vl);
236 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vclmul_vv_u64m8_tumu
237 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i64> [[MASKEDOFF:%.*]], <vscale x 8 x i64> [[VS2:%.*]], <vscale x 8 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
238 // CHECK-RV64-NEXT: entry:
239 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vclmul.mask.nxv8i64.nxv8i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], <vscale x 8 x i64> [[VS2]], <vscale x 8 x i64> [[VS1]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 0)
240 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
242 vuint64m8_t test_vclmul_vv_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) {
243 return __riscv_vclmul_tumu(mask, maskedoff, vs2, vs1, vl);
246 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vclmul_vx_u64m8_tumu
247 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i64> [[MASKEDOFF:%.*]], <vscale x 8 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
248 // CHECK-RV64-NEXT: entry:
249 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vclmul.mask.nxv8i64.i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], <vscale x 8 x i64> [[VS2]], i64 [[RS1]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 0)
250 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
252 vuint64m8_t test_vclmul_vx_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) {
253 return __riscv_vclmul_tumu(mask, maskedoff, vs2, rs1, vl);
256 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vclmul_vv_u64m1_mu
257 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF:%.*]], <vscale x 1 x i64> [[VS2:%.*]], <vscale x 1 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
258 // CHECK-RV64-NEXT: entry:
259 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vclmul.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], <vscale x 1 x i64> [[VS2]], <vscale x 1 x i64> [[VS1]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 1)
260 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
262 vuint64m1_t test_vclmul_vv_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) {
263 return __riscv_vclmul_mu(mask, maskedoff, vs2, vs1, vl);
266 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vclmul_vx_u64m1_mu
267 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF:%.*]], <vscale x 1 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
268 // CHECK-RV64-NEXT: entry:
269 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vclmul.mask.nxv1i64.i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], <vscale x 1 x i64> [[VS2]], i64 [[RS1]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 1)
270 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
272 vuint64m1_t test_vclmul_vx_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) {
273 return __riscv_vclmul_mu(mask, maskedoff, vs2, rs1, vl);
276 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vclmul_vv_u64m2_mu
277 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF:%.*]], <vscale x 2 x i64> [[VS2:%.*]], <vscale x 2 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
278 // CHECK-RV64-NEXT: entry:
279 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vclmul.mask.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], <vscale x 2 x i64> [[VS2]], <vscale x 2 x i64> [[VS1]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 1)
280 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
282 vuint64m2_t test_vclmul_vv_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) {
283 return __riscv_vclmul_mu(mask, maskedoff, vs2, vs1, vl);
286 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vclmul_vx_u64m2_mu
287 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF:%.*]], <vscale x 2 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
288 // CHECK-RV64-NEXT: entry:
289 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vclmul.mask.nxv2i64.i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], <vscale x 2 x i64> [[VS2]], i64 [[RS1]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 1)
290 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
292 vuint64m2_t test_vclmul_vx_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) {
293 return __riscv_vclmul_mu(mask, maskedoff, vs2, rs1, vl);
296 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vclmul_vv_u64m4_mu
297 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF:%.*]], <vscale x 4 x i64> [[VS2:%.*]], <vscale x 4 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
298 // CHECK-RV64-NEXT: entry:
299 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vclmul.mask.nxv4i64.nxv4i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], <vscale x 4 x i64> [[VS2]], <vscale x 4 x i64> [[VS1]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 1)
300 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
302 vuint64m4_t test_vclmul_vv_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) {
303 return __riscv_vclmul_mu(mask, maskedoff, vs2, vs1, vl);
306 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vclmul_vx_u64m4_mu
307 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF:%.*]], <vscale x 4 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
308 // CHECK-RV64-NEXT: entry:
309 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vclmul.mask.nxv4i64.i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], <vscale x 4 x i64> [[VS2]], i64 [[RS1]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 1)
310 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
312 vuint64m4_t test_vclmul_vx_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) {
313 return __riscv_vclmul_mu(mask, maskedoff, vs2, rs1, vl);
316 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vclmul_vv_u64m8_mu
317 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i64> [[MASKEDOFF:%.*]], <vscale x 8 x i64> [[VS2:%.*]], <vscale x 8 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
318 // CHECK-RV64-NEXT: entry:
319 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vclmul.mask.nxv8i64.nxv8i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], <vscale x 8 x i64> [[VS2]], <vscale x 8 x i64> [[VS1]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 1)
320 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
322 vuint64m8_t test_vclmul_vv_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) {
323 return __riscv_vclmul_mu(mask, maskedoff, vs2, vs1, vl);
326 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vclmul_vx_u64m8_mu
327 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i64> [[MASKEDOFF:%.*]], <vscale x 8 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
328 // CHECK-RV64-NEXT: entry:
329 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vclmul.mask.nxv8i64.i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], <vscale x 8 x i64> [[VS2]], i64 [[RS1]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 1)
330 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
332 vuint64m8_t test_vclmul_vx_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) {
333 return __riscv_vclmul_mu(mask, maskedoff, vs2, rs1, vl);