Run DCE after a LoopFlatten test to reduce spurious output [nfc]
[llvm-project.git] / clang / test / CodeGen / RISCV / rvv-intrinsics-autogenerated / policy / overloaded / vlse64.c
blob2fd6953bbb364fa64b1c91a4829cc858c3225f81
1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2
2 // REQUIRES: riscv-registered-target
3 // RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zfh \
4 // RUN: -target-feature +zvfh -disable-O0-optnone \
5 // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \
6 // RUN: FileCheck --check-prefix=CHECK-RV64 %s
8 #include <riscv_vector.h>
10 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x double> @test_vlse64_v_f64m1_tu
11 // CHECK-RV64-SAME: (<vscale x 1 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
12 // CHECK-RV64-NEXT: entry:
13 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x double> @llvm.riscv.vlse.nxv1f64.i64(<vscale x 1 x double> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], i64 [[VL]])
14 // CHECK-RV64-NEXT: ret <vscale x 1 x double> [[TMP0]]
16 vfloat64m1_t test_vlse64_v_f64m1_tu(vfloat64m1_t maskedoff, const double *base, ptrdiff_t bstride, size_t vl) {
17 return __riscv_vlse64_tu(maskedoff, base, bstride, vl);
20 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x double> @test_vlse64_v_f64m2_tu
21 // CHECK-RV64-SAME: (<vscale x 2 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
22 // CHECK-RV64-NEXT: entry:
23 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x double> @llvm.riscv.vlse.nxv2f64.i64(<vscale x 2 x double> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], i64 [[VL]])
24 // CHECK-RV64-NEXT: ret <vscale x 2 x double> [[TMP0]]
26 vfloat64m2_t test_vlse64_v_f64m2_tu(vfloat64m2_t maskedoff, const double *base, ptrdiff_t bstride, size_t vl) {
27 return __riscv_vlse64_tu(maskedoff, base, bstride, vl);
30 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x double> @test_vlse64_v_f64m4_tu
31 // CHECK-RV64-SAME: (<vscale x 4 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
32 // CHECK-RV64-NEXT: entry:
33 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x double> @llvm.riscv.vlse.nxv4f64.i64(<vscale x 4 x double> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], i64 [[VL]])
34 // CHECK-RV64-NEXT: ret <vscale x 4 x double> [[TMP0]]
36 vfloat64m4_t test_vlse64_v_f64m4_tu(vfloat64m4_t maskedoff, const double *base, ptrdiff_t bstride, size_t vl) {
37 return __riscv_vlse64_tu(maskedoff, base, bstride, vl);
40 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x double> @test_vlse64_v_f64m8_tu
41 // CHECK-RV64-SAME: (<vscale x 8 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
42 // CHECK-RV64-NEXT: entry:
43 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x double> @llvm.riscv.vlse.nxv8f64.i64(<vscale x 8 x double> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], i64 [[VL]])
44 // CHECK-RV64-NEXT: ret <vscale x 8 x double> [[TMP0]]
46 vfloat64m8_t test_vlse64_v_f64m8_tu(vfloat64m8_t maskedoff, const double *base, ptrdiff_t bstride, size_t vl) {
47 return __riscv_vlse64_tu(maskedoff, base, bstride, vl);
50 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vlse64_v_i64m1_tu
51 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
52 // CHECK-RV64-NEXT: entry:
53 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vlse.nxv1i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], i64 [[VL]])
54 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
56 vint64m1_t test_vlse64_v_i64m1_tu(vint64m1_t maskedoff, const int64_t *base, ptrdiff_t bstride, size_t vl) {
57 return __riscv_vlse64_tu(maskedoff, base, bstride, vl);
60 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vlse64_v_i64m2_tu
61 // CHECK-RV64-SAME: (<vscale x 2 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
62 // CHECK-RV64-NEXT: entry:
63 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vlse.nxv2i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], i64 [[VL]])
64 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
66 vint64m2_t test_vlse64_v_i64m2_tu(vint64m2_t maskedoff, const int64_t *base, ptrdiff_t bstride, size_t vl) {
67 return __riscv_vlse64_tu(maskedoff, base, bstride, vl);
70 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vlse64_v_i64m4_tu
71 // CHECK-RV64-SAME: (<vscale x 4 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
72 // CHECK-RV64-NEXT: entry:
73 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vlse.nxv4i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], i64 [[VL]])
74 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
76 vint64m4_t test_vlse64_v_i64m4_tu(vint64m4_t maskedoff, const int64_t *base, ptrdiff_t bstride, size_t vl) {
77 return __riscv_vlse64_tu(maskedoff, base, bstride, vl);
80 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vlse64_v_i64m8_tu
81 // CHECK-RV64-SAME: (<vscale x 8 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
82 // CHECK-RV64-NEXT: entry:
83 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vlse.nxv8i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], i64 [[VL]])
84 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
86 vint64m8_t test_vlse64_v_i64m8_tu(vint64m8_t maskedoff, const int64_t *base, ptrdiff_t bstride, size_t vl) {
87 return __riscv_vlse64_tu(maskedoff, base, bstride, vl);
90 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vlse64_v_u64m1_tu
91 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
92 // CHECK-RV64-NEXT: entry:
93 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vlse.nxv1i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], i64 [[VL]])
94 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
96 vuint64m1_t test_vlse64_v_u64m1_tu(vuint64m1_t maskedoff, const uint64_t *base, ptrdiff_t bstride, size_t vl) {
97 return __riscv_vlse64_tu(maskedoff, base, bstride, vl);
100 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vlse64_v_u64m2_tu
101 // CHECK-RV64-SAME: (<vscale x 2 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
102 // CHECK-RV64-NEXT: entry:
103 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vlse.nxv2i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], i64 [[VL]])
104 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
106 vuint64m2_t test_vlse64_v_u64m2_tu(vuint64m2_t maskedoff, const uint64_t *base, ptrdiff_t bstride, size_t vl) {
107 return __riscv_vlse64_tu(maskedoff, base, bstride, vl);
110 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vlse64_v_u64m4_tu
111 // CHECK-RV64-SAME: (<vscale x 4 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
112 // CHECK-RV64-NEXT: entry:
113 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vlse.nxv4i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], i64 [[VL]])
114 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
116 vuint64m4_t test_vlse64_v_u64m4_tu(vuint64m4_t maskedoff, const uint64_t *base, ptrdiff_t bstride, size_t vl) {
117 return __riscv_vlse64_tu(maskedoff, base, bstride, vl);
120 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vlse64_v_u64m8_tu
121 // CHECK-RV64-SAME: (<vscale x 8 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
122 // CHECK-RV64-NEXT: entry:
123 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vlse.nxv8i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], i64 [[VL]])
124 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
126 vuint64m8_t test_vlse64_v_u64m8_tu(vuint64m8_t maskedoff, const uint64_t *base, ptrdiff_t bstride, size_t vl) {
127 return __riscv_vlse64_tu(maskedoff, base, bstride, vl);
130 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x double> @test_vlse64_v_f64m1_tum
131 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
132 // CHECK-RV64-NEXT: entry:
133 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x double> @llvm.riscv.vlse.mask.nxv1f64.i64(<vscale x 1 x double> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 2)
134 // CHECK-RV64-NEXT: ret <vscale x 1 x double> [[TMP0]]
136 vfloat64m1_t test_vlse64_v_f64m1_tum(vbool64_t mask, vfloat64m1_t maskedoff, const double *base, ptrdiff_t bstride, size_t vl) {
137 return __riscv_vlse64_tum(mask, maskedoff, base, bstride, vl);
140 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x double> @test_vlse64_v_f64m2_tum
141 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
142 // CHECK-RV64-NEXT: entry:
143 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x double> @llvm.riscv.vlse.mask.nxv2f64.i64(<vscale x 2 x double> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 2)
144 // CHECK-RV64-NEXT: ret <vscale x 2 x double> [[TMP0]]
146 vfloat64m2_t test_vlse64_v_f64m2_tum(vbool32_t mask, vfloat64m2_t maskedoff, const double *base, ptrdiff_t bstride, size_t vl) {
147 return __riscv_vlse64_tum(mask, maskedoff, base, bstride, vl);
150 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x double> @test_vlse64_v_f64m4_tum
151 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
152 // CHECK-RV64-NEXT: entry:
153 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x double> @llvm.riscv.vlse.mask.nxv4f64.i64(<vscale x 4 x double> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 2)
154 // CHECK-RV64-NEXT: ret <vscale x 4 x double> [[TMP0]]
156 vfloat64m4_t test_vlse64_v_f64m4_tum(vbool16_t mask, vfloat64m4_t maskedoff, const double *base, ptrdiff_t bstride, size_t vl) {
157 return __riscv_vlse64_tum(mask, maskedoff, base, bstride, vl);
160 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x double> @test_vlse64_v_f64m8_tum
161 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
162 // CHECK-RV64-NEXT: entry:
163 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x double> @llvm.riscv.vlse.mask.nxv8f64.i64(<vscale x 8 x double> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 2)
164 // CHECK-RV64-NEXT: ret <vscale x 8 x double> [[TMP0]]
166 vfloat64m8_t test_vlse64_v_f64m8_tum(vbool8_t mask, vfloat64m8_t maskedoff, const double *base, ptrdiff_t bstride, size_t vl) {
167 return __riscv_vlse64_tum(mask, maskedoff, base, bstride, vl);
170 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vlse64_v_i64m1_tum
171 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
172 // CHECK-RV64-NEXT: entry:
173 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vlse.mask.nxv1i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 2)
174 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
176 vint64m1_t test_vlse64_v_i64m1_tum(vbool64_t mask, vint64m1_t maskedoff, const int64_t *base, ptrdiff_t bstride, size_t vl) {
177 return __riscv_vlse64_tum(mask, maskedoff, base, bstride, vl);
180 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vlse64_v_i64m2_tum
181 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
182 // CHECK-RV64-NEXT: entry:
183 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vlse.mask.nxv2i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 2)
184 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
186 vint64m2_t test_vlse64_v_i64m2_tum(vbool32_t mask, vint64m2_t maskedoff, const int64_t *base, ptrdiff_t bstride, size_t vl) {
187 return __riscv_vlse64_tum(mask, maskedoff, base, bstride, vl);
190 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vlse64_v_i64m4_tum
191 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
192 // CHECK-RV64-NEXT: entry:
193 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vlse.mask.nxv4i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 2)
194 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
196 vint64m4_t test_vlse64_v_i64m4_tum(vbool16_t mask, vint64m4_t maskedoff, const int64_t *base, ptrdiff_t bstride, size_t vl) {
197 return __riscv_vlse64_tum(mask, maskedoff, base, bstride, vl);
200 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vlse64_v_i64m8_tum
201 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
202 // CHECK-RV64-NEXT: entry:
203 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vlse.mask.nxv8i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 2)
204 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
206 vint64m8_t test_vlse64_v_i64m8_tum(vbool8_t mask, vint64m8_t maskedoff, const int64_t *base, ptrdiff_t bstride, size_t vl) {
207 return __riscv_vlse64_tum(mask, maskedoff, base, bstride, vl);
210 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vlse64_v_u64m1_tum
211 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
212 // CHECK-RV64-NEXT: entry:
213 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vlse.mask.nxv1i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 2)
214 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
216 vuint64m1_t test_vlse64_v_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, const uint64_t *base, ptrdiff_t bstride, size_t vl) {
217 return __riscv_vlse64_tum(mask, maskedoff, base, bstride, vl);
220 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vlse64_v_u64m2_tum
221 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
222 // CHECK-RV64-NEXT: entry:
223 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vlse.mask.nxv2i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 2)
224 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
226 vuint64m2_t test_vlse64_v_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, const uint64_t *base, ptrdiff_t bstride, size_t vl) {
227 return __riscv_vlse64_tum(mask, maskedoff, base, bstride, vl);
230 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vlse64_v_u64m4_tum
231 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
232 // CHECK-RV64-NEXT: entry:
233 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vlse.mask.nxv4i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 2)
234 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
236 vuint64m4_t test_vlse64_v_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, const uint64_t *base, ptrdiff_t bstride, size_t vl) {
237 return __riscv_vlse64_tum(mask, maskedoff, base, bstride, vl);
240 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vlse64_v_u64m8_tum
241 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
242 // CHECK-RV64-NEXT: entry:
243 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vlse.mask.nxv8i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 2)
244 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
246 vuint64m8_t test_vlse64_v_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, const uint64_t *base, ptrdiff_t bstride, size_t vl) {
247 return __riscv_vlse64_tum(mask, maskedoff, base, bstride, vl);
250 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x double> @test_vlse64_v_f64m1_tumu
251 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
252 // CHECK-RV64-NEXT: entry:
253 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x double> @llvm.riscv.vlse.mask.nxv1f64.i64(<vscale x 1 x double> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 0)
254 // CHECK-RV64-NEXT: ret <vscale x 1 x double> [[TMP0]]
256 vfloat64m1_t test_vlse64_v_f64m1_tumu(vbool64_t mask, vfloat64m1_t maskedoff, const double *base, ptrdiff_t bstride, size_t vl) {
257 return __riscv_vlse64_tumu(mask, maskedoff, base, bstride, vl);
260 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x double> @test_vlse64_v_f64m2_tumu
261 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
262 // CHECK-RV64-NEXT: entry:
263 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x double> @llvm.riscv.vlse.mask.nxv2f64.i64(<vscale x 2 x double> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 0)
264 // CHECK-RV64-NEXT: ret <vscale x 2 x double> [[TMP0]]
266 vfloat64m2_t test_vlse64_v_f64m2_tumu(vbool32_t mask, vfloat64m2_t maskedoff, const double *base, ptrdiff_t bstride, size_t vl) {
267 return __riscv_vlse64_tumu(mask, maskedoff, base, bstride, vl);
270 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x double> @test_vlse64_v_f64m4_tumu
271 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
272 // CHECK-RV64-NEXT: entry:
273 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x double> @llvm.riscv.vlse.mask.nxv4f64.i64(<vscale x 4 x double> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 0)
274 // CHECK-RV64-NEXT: ret <vscale x 4 x double> [[TMP0]]
276 vfloat64m4_t test_vlse64_v_f64m4_tumu(vbool16_t mask, vfloat64m4_t maskedoff, const double *base, ptrdiff_t bstride, size_t vl) {
277 return __riscv_vlse64_tumu(mask, maskedoff, base, bstride, vl);
280 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x double> @test_vlse64_v_f64m8_tumu
281 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
282 // CHECK-RV64-NEXT: entry:
283 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x double> @llvm.riscv.vlse.mask.nxv8f64.i64(<vscale x 8 x double> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 0)
284 // CHECK-RV64-NEXT: ret <vscale x 8 x double> [[TMP0]]
286 vfloat64m8_t test_vlse64_v_f64m8_tumu(vbool8_t mask, vfloat64m8_t maskedoff, const double *base, ptrdiff_t bstride, size_t vl) {
287 return __riscv_vlse64_tumu(mask, maskedoff, base, bstride, vl);
290 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vlse64_v_i64m1_tumu
291 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
292 // CHECK-RV64-NEXT: entry:
293 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vlse.mask.nxv1i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 0)
294 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
296 vint64m1_t test_vlse64_v_i64m1_tumu(vbool64_t mask, vint64m1_t maskedoff, const int64_t *base, ptrdiff_t bstride, size_t vl) {
297 return __riscv_vlse64_tumu(mask, maskedoff, base, bstride, vl);
300 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vlse64_v_i64m2_tumu
301 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
302 // CHECK-RV64-NEXT: entry:
303 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vlse.mask.nxv2i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 0)
304 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
306 vint64m2_t test_vlse64_v_i64m2_tumu(vbool32_t mask, vint64m2_t maskedoff, const int64_t *base, ptrdiff_t bstride, size_t vl) {
307 return __riscv_vlse64_tumu(mask, maskedoff, base, bstride, vl);
310 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vlse64_v_i64m4_tumu
311 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
312 // CHECK-RV64-NEXT: entry:
313 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vlse.mask.nxv4i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 0)
314 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
316 vint64m4_t test_vlse64_v_i64m4_tumu(vbool16_t mask, vint64m4_t maskedoff, const int64_t *base, ptrdiff_t bstride, size_t vl) {
317 return __riscv_vlse64_tumu(mask, maskedoff, base, bstride, vl);
320 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vlse64_v_i64m8_tumu
321 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
322 // CHECK-RV64-NEXT: entry:
323 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vlse.mask.nxv8i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 0)
324 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
326 vint64m8_t test_vlse64_v_i64m8_tumu(vbool8_t mask, vint64m8_t maskedoff, const int64_t *base, ptrdiff_t bstride, size_t vl) {
327 return __riscv_vlse64_tumu(mask, maskedoff, base, bstride, vl);
330 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vlse64_v_u64m1_tumu
331 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
332 // CHECK-RV64-NEXT: entry:
333 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vlse.mask.nxv1i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 0)
334 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
336 vuint64m1_t test_vlse64_v_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, const uint64_t *base, ptrdiff_t bstride, size_t vl) {
337 return __riscv_vlse64_tumu(mask, maskedoff, base, bstride, vl);
340 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vlse64_v_u64m2_tumu
341 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
342 // CHECK-RV64-NEXT: entry:
343 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vlse.mask.nxv2i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 0)
344 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
346 vuint64m2_t test_vlse64_v_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, const uint64_t *base, ptrdiff_t bstride, size_t vl) {
347 return __riscv_vlse64_tumu(mask, maskedoff, base, bstride, vl);
350 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vlse64_v_u64m4_tumu
351 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
352 // CHECK-RV64-NEXT: entry:
353 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vlse.mask.nxv4i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 0)
354 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
356 vuint64m4_t test_vlse64_v_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, const uint64_t *base, ptrdiff_t bstride, size_t vl) {
357 return __riscv_vlse64_tumu(mask, maskedoff, base, bstride, vl);
360 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vlse64_v_u64m8_tumu
361 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
362 // CHECK-RV64-NEXT: entry:
363 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vlse.mask.nxv8i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 0)
364 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
366 vuint64m8_t test_vlse64_v_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, const uint64_t *base, ptrdiff_t bstride, size_t vl) {
367 return __riscv_vlse64_tumu(mask, maskedoff, base, bstride, vl);
370 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x double> @test_vlse64_v_f64m1_mu
371 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
372 // CHECK-RV64-NEXT: entry:
373 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x double> @llvm.riscv.vlse.mask.nxv1f64.i64(<vscale x 1 x double> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 1)
374 // CHECK-RV64-NEXT: ret <vscale x 1 x double> [[TMP0]]
376 vfloat64m1_t test_vlse64_v_f64m1_mu(vbool64_t mask, vfloat64m1_t maskedoff, const double *base, ptrdiff_t bstride, size_t vl) {
377 return __riscv_vlse64_mu(mask, maskedoff, base, bstride, vl);
380 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x double> @test_vlse64_v_f64m2_mu
381 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
382 // CHECK-RV64-NEXT: entry:
383 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x double> @llvm.riscv.vlse.mask.nxv2f64.i64(<vscale x 2 x double> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 1)
384 // CHECK-RV64-NEXT: ret <vscale x 2 x double> [[TMP0]]
386 vfloat64m2_t test_vlse64_v_f64m2_mu(vbool32_t mask, vfloat64m2_t maskedoff, const double *base, ptrdiff_t bstride, size_t vl) {
387 return __riscv_vlse64_mu(mask, maskedoff, base, bstride, vl);
390 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x double> @test_vlse64_v_f64m4_mu
391 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
392 // CHECK-RV64-NEXT: entry:
393 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x double> @llvm.riscv.vlse.mask.nxv4f64.i64(<vscale x 4 x double> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 1)
394 // CHECK-RV64-NEXT: ret <vscale x 4 x double> [[TMP0]]
396 vfloat64m4_t test_vlse64_v_f64m4_mu(vbool16_t mask, vfloat64m4_t maskedoff, const double *base, ptrdiff_t bstride, size_t vl) {
397 return __riscv_vlse64_mu(mask, maskedoff, base, bstride, vl);
400 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x double> @test_vlse64_v_f64m8_mu
401 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
402 // CHECK-RV64-NEXT: entry:
403 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x double> @llvm.riscv.vlse.mask.nxv8f64.i64(<vscale x 8 x double> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 1)
404 // CHECK-RV64-NEXT: ret <vscale x 8 x double> [[TMP0]]
406 vfloat64m8_t test_vlse64_v_f64m8_mu(vbool8_t mask, vfloat64m8_t maskedoff, const double *base, ptrdiff_t bstride, size_t vl) {
407 return __riscv_vlse64_mu(mask, maskedoff, base, bstride, vl);
410 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vlse64_v_i64m1_mu
411 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
412 // CHECK-RV64-NEXT: entry:
413 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vlse.mask.nxv1i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 1)
414 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
416 vint64m1_t test_vlse64_v_i64m1_mu(vbool64_t mask, vint64m1_t maskedoff, const int64_t *base, ptrdiff_t bstride, size_t vl) {
417 return __riscv_vlse64_mu(mask, maskedoff, base, bstride, vl);
420 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vlse64_v_i64m2_mu
421 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
422 // CHECK-RV64-NEXT: entry:
423 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vlse.mask.nxv2i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 1)
424 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
426 vint64m2_t test_vlse64_v_i64m2_mu(vbool32_t mask, vint64m2_t maskedoff, const int64_t *base, ptrdiff_t bstride, size_t vl) {
427 return __riscv_vlse64_mu(mask, maskedoff, base, bstride, vl);
430 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vlse64_v_i64m4_mu
431 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
432 // CHECK-RV64-NEXT: entry:
433 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vlse.mask.nxv4i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 1)
434 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
436 vint64m4_t test_vlse64_v_i64m4_mu(vbool16_t mask, vint64m4_t maskedoff, const int64_t *base, ptrdiff_t bstride, size_t vl) {
437 return __riscv_vlse64_mu(mask, maskedoff, base, bstride, vl);
440 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vlse64_v_i64m8_mu
441 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
442 // CHECK-RV64-NEXT: entry:
443 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vlse.mask.nxv8i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 1)
444 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
446 vint64m8_t test_vlse64_v_i64m8_mu(vbool8_t mask, vint64m8_t maskedoff, const int64_t *base, ptrdiff_t bstride, size_t vl) {
447 return __riscv_vlse64_mu(mask, maskedoff, base, bstride, vl);
450 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vlse64_v_u64m1_mu
451 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
452 // CHECK-RV64-NEXT: entry:
453 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vlse.mask.nxv1i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 1)
454 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
456 vuint64m1_t test_vlse64_v_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, const uint64_t *base, ptrdiff_t bstride, size_t vl) {
457 return __riscv_vlse64_mu(mask, maskedoff, base, bstride, vl);
460 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vlse64_v_u64m2_mu
461 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
462 // CHECK-RV64-NEXT: entry:
463 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vlse.mask.nxv2i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 1)
464 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
466 vuint64m2_t test_vlse64_v_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, const uint64_t *base, ptrdiff_t bstride, size_t vl) {
467 return __riscv_vlse64_mu(mask, maskedoff, base, bstride, vl);
470 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vlse64_v_u64m4_mu
471 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
472 // CHECK-RV64-NEXT: entry:
473 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vlse.mask.nxv4i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 1)
474 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
476 vuint64m4_t test_vlse64_v_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, const uint64_t *base, ptrdiff_t bstride, size_t vl) {
477 return __riscv_vlse64_mu(mask, maskedoff, base, bstride, vl);
480 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vlse64_v_u64m8_mu
481 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[BSTRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
482 // CHECK-RV64-NEXT: entry:
483 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vlse.mask.nxv8i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[BSTRIDE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 1)
484 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
486 vuint64m8_t test_vlse64_v_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, const uint64_t *base, ptrdiff_t bstride, size_t vl) {
487 return __riscv_vlse64_mu(mask, maskedoff, base, bstride, vl);