1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc -mtriple=riscv64 -mattr=+zve64d,+f,+d,+zfh,+zvfh \
3 ; RUN: -verify-machineinstrs < %s | FileCheck %s
5 declare {<vscale x 16 x i16>,<vscale x 16 x i16>, i64} @llvm.riscv.vlseg2ff.nxv16i16(<vscale x 16 x i16>,<vscale x 16 x i16>, ptr , i64)
6 declare {<vscale x 16 x i16>,<vscale x 16 x i16>, i64} @llvm.riscv.vlseg2ff.mask.nxv16i16(<vscale x 16 x i16>,<vscale x 16 x i16>, ptr, <vscale x 16 x i1>, i64, i64)
8 define <vscale x 16 x i16> @test_vlseg2ff_nxv16i16(ptr %base, i64 %vl, ptr %outvl) {
9 ; CHECK-LABEL: test_vlseg2ff_nxv16i16:
10 ; CHECK: # %bb.0: # %entry
11 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, ma
12 ; CHECK-NEXT: vlseg2e16ff.v v4, (a0)
13 ; CHECK-NEXT: csrr a0, vl
14 ; CHECK-NEXT: sd a0, 0(a2)
17 %0 = tail call {<vscale x 16 x i16>,<vscale x 16 x i16>, i64} @llvm.riscv.vlseg2ff.nxv16i16(<vscale x 16 x i16> undef, <vscale x 16 x i16> undef, ptr %base, i64 %vl)
18 %1 = extractvalue {<vscale x 16 x i16>,<vscale x 16 x i16>, i64} %0, 1
19 %2 = extractvalue {<vscale x 16 x i16>,<vscale x 16 x i16>, i64} %0, 2
20 store i64 %2, ptr %outvl
21 ret <vscale x 16 x i16> %1
24 define <vscale x 16 x i16> @test_vlseg2ff_mask_nxv16i16(<vscale x 16 x i16> %val, ptr %base, i64 %vl, <vscale x 16 x i1> %mask, ptr %outvl) {
25 ; CHECK-LABEL: test_vlseg2ff_mask_nxv16i16:
26 ; CHECK: # %bb.0: # %entry
27 ; CHECK-NEXT: vmv4r.v v4, v8
28 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu
29 ; CHECK-NEXT: vlseg2e16ff.v v4, (a0), v0.t
30 ; CHECK-NEXT: csrr a0, vl
31 ; CHECK-NEXT: sd a0, 0(a2)
34 %0 = tail call {<vscale x 16 x i16>,<vscale x 16 x i16>, i64} @llvm.riscv.vlseg2ff.mask.nxv16i16(<vscale x 16 x i16> %val,<vscale x 16 x i16> %val, ptr %base, <vscale x 16 x i1> %mask, i64 %vl, i64 1)
35 %1 = extractvalue {<vscale x 16 x i16>,<vscale x 16 x i16>, i64} %0, 1
36 %2 = extractvalue {<vscale x 16 x i16>,<vscale x 16 x i16>, i64} %0, 2
37 store i64 %2, ptr %outvl
38 ret <vscale x 16 x i16> %1
41 declare {<vscale x 4 x i32>,<vscale x 4 x i32>, i64} @llvm.riscv.vlseg2ff.nxv4i32(<vscale x 4 x i32>,<vscale x 4 x i32>, ptr , i64)
42 declare {<vscale x 4 x i32>,<vscale x 4 x i32>, i64} @llvm.riscv.vlseg2ff.mask.nxv4i32(<vscale x 4 x i32>,<vscale x 4 x i32>, ptr, <vscale x 4 x i1>, i64, i64)
44 define <vscale x 4 x i32> @test_vlseg2ff_nxv4i32(ptr %base, i64 %vl, ptr %outvl) {
45 ; CHECK-LABEL: test_vlseg2ff_nxv4i32:
46 ; CHECK: # %bb.0: # %entry
47 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
48 ; CHECK-NEXT: vlseg2e32ff.v v6, (a0)
49 ; CHECK-NEXT: csrr a0, vl
50 ; CHECK-NEXT: sd a0, 0(a2)
53 %0 = tail call {<vscale x 4 x i32>,<vscale x 4 x i32>, i64} @llvm.riscv.vlseg2ff.nxv4i32(<vscale x 4 x i32> undef, <vscale x 4 x i32> undef, ptr %base, i64 %vl)
54 %1 = extractvalue {<vscale x 4 x i32>,<vscale x 4 x i32>, i64} %0, 1
55 %2 = extractvalue {<vscale x 4 x i32>,<vscale x 4 x i32>, i64} %0, 2
56 store i64 %2, ptr %outvl
57 ret <vscale x 4 x i32> %1
60 define <vscale x 4 x i32> @test_vlseg2ff_mask_nxv4i32(<vscale x 4 x i32> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
61 ; CHECK-LABEL: test_vlseg2ff_mask_nxv4i32:
62 ; CHECK: # %bb.0: # %entry
63 ; CHECK-NEXT: vmv2r.v v6, v8
64 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu
65 ; CHECK-NEXT: vlseg2e32ff.v v6, (a0), v0.t
66 ; CHECK-NEXT: csrr a0, vl
67 ; CHECK-NEXT: sd a0, 0(a2)
70 %0 = tail call {<vscale x 4 x i32>,<vscale x 4 x i32>, i64} @llvm.riscv.vlseg2ff.mask.nxv4i32(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
71 %1 = extractvalue {<vscale x 4 x i32>,<vscale x 4 x i32>, i64} %0, 1
72 %2 = extractvalue {<vscale x 4 x i32>,<vscale x 4 x i32>, i64} %0, 2
73 store i64 %2, ptr %outvl
74 ret <vscale x 4 x i32> %1
77 declare {<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i64} @llvm.riscv.vlseg3ff.nxv4i32(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, ptr , i64)
78 declare {<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i64} @llvm.riscv.vlseg3ff.mask.nxv4i32(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, ptr, <vscale x 4 x i1>, i64, i64)
80 define <vscale x 4 x i32> @test_vlseg3ff_nxv4i32(ptr %base, i64 %vl, ptr %outvl) {
81 ; CHECK-LABEL: test_vlseg3ff_nxv4i32:
82 ; CHECK: # %bb.0: # %entry
83 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
84 ; CHECK-NEXT: vlseg3e32ff.v v6, (a0)
85 ; CHECK-NEXT: csrr a0, vl
86 ; CHECK-NEXT: sd a0, 0(a2)
89 %0 = tail call {<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i64} @llvm.riscv.vlseg3ff.nxv4i32(<vscale x 4 x i32> undef, <vscale x 4 x i32> undef, <vscale x 4 x i32> undef, ptr %base, i64 %vl)
90 %1 = extractvalue {<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i64} %0, 1
91 %2 = extractvalue {<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i64} %0, 3
92 store i64 %2, ptr %outvl
93 ret <vscale x 4 x i32> %1
96 define <vscale x 4 x i32> @test_vlseg3ff_mask_nxv4i32(<vscale x 4 x i32> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
97 ; CHECK-LABEL: test_vlseg3ff_mask_nxv4i32:
98 ; CHECK: # %bb.0: # %entry
99 ; CHECK-NEXT: vmv2r.v v6, v8
100 ; CHECK-NEXT: vmv2r.v v10, v8
101 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu
102 ; CHECK-NEXT: vlseg3e32ff.v v6, (a0), v0.t
103 ; CHECK-NEXT: csrr a0, vl
104 ; CHECK-NEXT: sd a0, 0(a2)
107 %0 = tail call {<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i64} @llvm.riscv.vlseg3ff.mask.nxv4i32(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
108 %1 = extractvalue {<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i64} %0, 1
109 %2 = extractvalue {<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i64} %0, 3
110 store i64 %2, ptr %outvl
111 ret <vscale x 4 x i32> %1
114 declare {<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i64} @llvm.riscv.vlseg4ff.nxv4i32(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, ptr , i64)
115 declare {<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i64} @llvm.riscv.vlseg4ff.mask.nxv4i32(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, ptr, <vscale x 4 x i1>, i64, i64)
117 define <vscale x 4 x i32> @test_vlseg4ff_nxv4i32(ptr %base, i64 %vl, ptr %outvl) {
118 ; CHECK-LABEL: test_vlseg4ff_nxv4i32:
119 ; CHECK: # %bb.0: # %entry
120 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
121 ; CHECK-NEXT: vlseg4e32ff.v v6, (a0)
122 ; CHECK-NEXT: csrr a0, vl
123 ; CHECK-NEXT: sd a0, 0(a2)
126 %0 = tail call {<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i64} @llvm.riscv.vlseg4ff.nxv4i32(<vscale x 4 x i32> undef, <vscale x 4 x i32> undef, <vscale x 4 x i32> undef, <vscale x 4 x i32> undef, ptr %base, i64 %vl)
127 %1 = extractvalue {<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i64} %0, 1
128 %2 = extractvalue {<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i64} %0, 4
129 store i64 %2, ptr %outvl
130 ret <vscale x 4 x i32> %1
133 define <vscale x 4 x i32> @test_vlseg4ff_mask_nxv4i32(<vscale x 4 x i32> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
134 ; CHECK-LABEL: test_vlseg4ff_mask_nxv4i32:
135 ; CHECK: # %bb.0: # %entry
136 ; CHECK-NEXT: vmv2r.v v6, v8
137 ; CHECK-NEXT: vmv2r.v v10, v8
138 ; CHECK-NEXT: vmv2r.v v12, v8
139 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu
140 ; CHECK-NEXT: vlseg4e32ff.v v6, (a0), v0.t
141 ; CHECK-NEXT: csrr a0, vl
142 ; CHECK-NEXT: sd a0, 0(a2)
145 %0 = tail call {<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i64} @llvm.riscv.vlseg4ff.mask.nxv4i32(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
146 %1 = extractvalue {<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i64} %0, 1
147 %2 = extractvalue {<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i64} %0, 4
148 store i64 %2, ptr %outvl
149 ret <vscale x 4 x i32> %1
152 declare {<vscale x 16 x i8>,<vscale x 16 x i8>, i64} @llvm.riscv.vlseg2ff.nxv16i8(<vscale x 16 x i8>,<vscale x 16 x i8>, ptr , i64)
153 declare {<vscale x 16 x i8>,<vscale x 16 x i8>, i64} @llvm.riscv.vlseg2ff.mask.nxv16i8(<vscale x 16 x i8>,<vscale x 16 x i8>, ptr, <vscale x 16 x i1>, i64, i64)
155 define <vscale x 16 x i8> @test_vlseg2ff_nxv16i8(ptr %base, i64 %vl, ptr %outvl) {
156 ; CHECK-LABEL: test_vlseg2ff_nxv16i8:
157 ; CHECK: # %bb.0: # %entry
158 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, ma
159 ; CHECK-NEXT: vlseg2e8ff.v v6, (a0)
160 ; CHECK-NEXT: csrr a0, vl
161 ; CHECK-NEXT: sd a0, 0(a2)
164 %0 = tail call {<vscale x 16 x i8>,<vscale x 16 x i8>, i64} @llvm.riscv.vlseg2ff.nxv16i8(<vscale x 16 x i8> undef, <vscale x 16 x i8> undef, ptr %base, i64 %vl)
165 %1 = extractvalue {<vscale x 16 x i8>,<vscale x 16 x i8>, i64} %0, 1
166 %2 = extractvalue {<vscale x 16 x i8>,<vscale x 16 x i8>, i64} %0, 2
167 store i64 %2, ptr %outvl
168 ret <vscale x 16 x i8> %1
171 define <vscale x 16 x i8> @test_vlseg2ff_mask_nxv16i8(<vscale x 16 x i8> %val, ptr %base, i64 %vl, <vscale x 16 x i1> %mask, ptr %outvl) {
172 ; CHECK-LABEL: test_vlseg2ff_mask_nxv16i8:
173 ; CHECK: # %bb.0: # %entry
174 ; CHECK-NEXT: vmv2r.v v6, v8
175 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu
176 ; CHECK-NEXT: vlseg2e8ff.v v6, (a0), v0.t
177 ; CHECK-NEXT: csrr a0, vl
178 ; CHECK-NEXT: sd a0, 0(a2)
181 %0 = tail call {<vscale x 16 x i8>,<vscale x 16 x i8>, i64} @llvm.riscv.vlseg2ff.mask.nxv16i8(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i1> %mask, i64 %vl, i64 1)
182 %1 = extractvalue {<vscale x 16 x i8>,<vscale x 16 x i8>, i64} %0, 1
183 %2 = extractvalue {<vscale x 16 x i8>,<vscale x 16 x i8>, i64} %0, 2
184 store i64 %2, ptr %outvl
185 ret <vscale x 16 x i8> %1
188 declare {<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i64} @llvm.riscv.vlseg3ff.nxv16i8(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, ptr , i64)
189 declare {<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i64} @llvm.riscv.vlseg3ff.mask.nxv16i8(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, ptr, <vscale x 16 x i1>, i64, i64)
191 define <vscale x 16 x i8> @test_vlseg3ff_nxv16i8(ptr %base, i64 %vl, ptr %outvl) {
192 ; CHECK-LABEL: test_vlseg3ff_nxv16i8:
193 ; CHECK: # %bb.0: # %entry
194 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, ma
195 ; CHECK-NEXT: vlseg3e8ff.v v6, (a0)
196 ; CHECK-NEXT: csrr a0, vl
197 ; CHECK-NEXT: sd a0, 0(a2)
200 %0 = tail call {<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i64} @llvm.riscv.vlseg3ff.nxv16i8(<vscale x 16 x i8> undef, <vscale x 16 x i8> undef, <vscale x 16 x i8> undef, ptr %base, i64 %vl)
201 %1 = extractvalue {<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i64} %0, 1
202 %2 = extractvalue {<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i64} %0, 3
203 store i64 %2, ptr %outvl
204 ret <vscale x 16 x i8> %1
207 define <vscale x 16 x i8> @test_vlseg3ff_mask_nxv16i8(<vscale x 16 x i8> %val, ptr %base, i64 %vl, <vscale x 16 x i1> %mask, ptr %outvl) {
208 ; CHECK-LABEL: test_vlseg3ff_mask_nxv16i8:
209 ; CHECK: # %bb.0: # %entry
210 ; CHECK-NEXT: vmv2r.v v6, v8
211 ; CHECK-NEXT: vmv2r.v v10, v8
212 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu
213 ; CHECK-NEXT: vlseg3e8ff.v v6, (a0), v0.t
214 ; CHECK-NEXT: csrr a0, vl
215 ; CHECK-NEXT: sd a0, 0(a2)
218 %0 = tail call {<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i64} @llvm.riscv.vlseg3ff.mask.nxv16i8(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i1> %mask, i64 %vl, i64 1)
219 %1 = extractvalue {<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i64} %0, 1
220 %2 = extractvalue {<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i64} %0, 3
221 store i64 %2, ptr %outvl
222 ret <vscale x 16 x i8> %1
225 declare {<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i64} @llvm.riscv.vlseg4ff.nxv16i8(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, ptr , i64)
226 declare {<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i64} @llvm.riscv.vlseg4ff.mask.nxv16i8(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, ptr, <vscale x 16 x i1>, i64, i64)
228 define <vscale x 16 x i8> @test_vlseg4ff_nxv16i8(ptr %base, i64 %vl, ptr %outvl) {
229 ; CHECK-LABEL: test_vlseg4ff_nxv16i8:
230 ; CHECK: # %bb.0: # %entry
231 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, ma
232 ; CHECK-NEXT: vlseg4e8ff.v v6, (a0)
233 ; CHECK-NEXT: csrr a0, vl
234 ; CHECK-NEXT: sd a0, 0(a2)
237 %0 = tail call {<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i64} @llvm.riscv.vlseg4ff.nxv16i8(<vscale x 16 x i8> undef, <vscale x 16 x i8> undef, <vscale x 16 x i8> undef, <vscale x 16 x i8> undef, ptr %base, i64 %vl)
238 %1 = extractvalue {<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i64} %0, 1
239 %2 = extractvalue {<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i64} %0, 4
240 store i64 %2, ptr %outvl
241 ret <vscale x 16 x i8> %1
244 define <vscale x 16 x i8> @test_vlseg4ff_mask_nxv16i8(<vscale x 16 x i8> %val, ptr %base, i64 %vl, <vscale x 16 x i1> %mask, ptr %outvl) {
245 ; CHECK-LABEL: test_vlseg4ff_mask_nxv16i8:
246 ; CHECK: # %bb.0: # %entry
247 ; CHECK-NEXT: vmv2r.v v6, v8
248 ; CHECK-NEXT: vmv2r.v v10, v8
249 ; CHECK-NEXT: vmv2r.v v12, v8
250 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, mu
251 ; CHECK-NEXT: vlseg4e8ff.v v6, (a0), v0.t
252 ; CHECK-NEXT: csrr a0, vl
253 ; CHECK-NEXT: sd a0, 0(a2)
256 %0 = tail call {<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i64} @llvm.riscv.vlseg4ff.mask.nxv16i8(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i1> %mask, i64 %vl, i64 1)
257 %1 = extractvalue {<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i64} %0, 1
258 %2 = extractvalue {<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i64} %0, 4
259 store i64 %2, ptr %outvl
260 ret <vscale x 16 x i8> %1
263 declare {<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg2ff.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>, ptr , i64)
264 declare {<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg2ff.mask.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>, ptr, <vscale x 1 x i1>, i64, i64)
266 define <vscale x 1 x i64> @test_vlseg2ff_nxv1i64(ptr %base, i64 %vl, ptr %outvl) {
267 ; CHECK-LABEL: test_vlseg2ff_nxv1i64:
268 ; CHECK: # %bb.0: # %entry
269 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
270 ; CHECK-NEXT: vlseg2e64ff.v v7, (a0)
271 ; CHECK-NEXT: csrr a0, vl
272 ; CHECK-NEXT: sd a0, 0(a2)
275 %0 = tail call {<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg2ff.nxv1i64(<vscale x 1 x i64> undef, <vscale x 1 x i64> undef, ptr %base, i64 %vl)
276 %1 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 1
277 %2 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 2
278 store i64 %2, ptr %outvl
279 ret <vscale x 1 x i64> %1
282 define <vscale x 1 x i64> @test_vlseg2ff_mask_nxv1i64(<vscale x 1 x i64> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
283 ; CHECK-LABEL: test_vlseg2ff_mask_nxv1i64:
284 ; CHECK: # %bb.0: # %entry
285 ; CHECK-NEXT: vmv1r.v v7, v8
286 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu
287 ; CHECK-NEXT: vlseg2e64ff.v v7, (a0), v0.t
288 ; CHECK-NEXT: csrr a0, vl
289 ; CHECK-NEXT: sd a0, 0(a2)
292 %0 = tail call {<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg2ff.mask.nxv1i64(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
293 %1 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 1
294 %2 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 2
295 store i64 %2, ptr %outvl
296 ret <vscale x 1 x i64> %1
299 declare {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg3ff.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, ptr , i64)
300 declare {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg3ff.mask.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, ptr, <vscale x 1 x i1>, i64, i64)
302 define <vscale x 1 x i64> @test_vlseg3ff_nxv1i64(ptr %base, i64 %vl, ptr %outvl) {
303 ; CHECK-LABEL: test_vlseg3ff_nxv1i64:
304 ; CHECK: # %bb.0: # %entry
305 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
306 ; CHECK-NEXT: vlseg3e64ff.v v7, (a0)
307 ; CHECK-NEXT: csrr a0, vl
308 ; CHECK-NEXT: sd a0, 0(a2)
311 %0 = tail call {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg3ff.nxv1i64(<vscale x 1 x i64> undef, <vscale x 1 x i64> undef, <vscale x 1 x i64> undef, ptr %base, i64 %vl)
312 %1 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 1
313 %2 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 3
314 store i64 %2, ptr %outvl
315 ret <vscale x 1 x i64> %1
318 define <vscale x 1 x i64> @test_vlseg3ff_mask_nxv1i64(<vscale x 1 x i64> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
319 ; CHECK-LABEL: test_vlseg3ff_mask_nxv1i64:
320 ; CHECK: # %bb.0: # %entry
321 ; CHECK-NEXT: vmv1r.v v7, v8
322 ; CHECK-NEXT: vmv1r.v v9, v8
323 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu
324 ; CHECK-NEXT: vlseg3e64ff.v v7, (a0), v0.t
325 ; CHECK-NEXT: csrr a0, vl
326 ; CHECK-NEXT: sd a0, 0(a2)
329 %0 = tail call {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg3ff.mask.nxv1i64(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
330 %1 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 1
331 %2 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 3
332 store i64 %2, ptr %outvl
333 ret <vscale x 1 x i64> %1
336 declare {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg4ff.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, ptr , i64)
337 declare {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg4ff.mask.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, ptr, <vscale x 1 x i1>, i64, i64)
339 define <vscale x 1 x i64> @test_vlseg4ff_nxv1i64(ptr %base, i64 %vl, ptr %outvl) {
340 ; CHECK-LABEL: test_vlseg4ff_nxv1i64:
341 ; CHECK: # %bb.0: # %entry
342 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
343 ; CHECK-NEXT: vlseg4e64ff.v v7, (a0)
344 ; CHECK-NEXT: csrr a0, vl
345 ; CHECK-NEXT: sd a0, 0(a2)
348 %0 = tail call {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg4ff.nxv1i64(<vscale x 1 x i64> undef, <vscale x 1 x i64> undef, <vscale x 1 x i64> undef, <vscale x 1 x i64> undef, ptr %base, i64 %vl)
349 %1 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 1
350 %2 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 4
351 store i64 %2, ptr %outvl
352 ret <vscale x 1 x i64> %1
355 define <vscale x 1 x i64> @test_vlseg4ff_mask_nxv1i64(<vscale x 1 x i64> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
356 ; CHECK-LABEL: test_vlseg4ff_mask_nxv1i64:
357 ; CHECK: # %bb.0: # %entry
358 ; CHECK-NEXT: vmv1r.v v7, v8
359 ; CHECK-NEXT: vmv1r.v v9, v8
360 ; CHECK-NEXT: vmv1r.v v10, v8
361 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu
362 ; CHECK-NEXT: vlseg4e64ff.v v7, (a0), v0.t
363 ; CHECK-NEXT: csrr a0, vl
364 ; CHECK-NEXT: sd a0, 0(a2)
367 %0 = tail call {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg4ff.mask.nxv1i64(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
368 %1 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 1
369 %2 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 4
370 store i64 %2, ptr %outvl
371 ret <vscale x 1 x i64> %1
374 declare {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg5ff.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, ptr , i64)
375 declare {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg5ff.mask.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, ptr, <vscale x 1 x i1>, i64, i64)
377 define <vscale x 1 x i64> @test_vlseg5ff_nxv1i64(ptr %base, i64 %vl, ptr %outvl) {
378 ; CHECK-LABEL: test_vlseg5ff_nxv1i64:
379 ; CHECK: # %bb.0: # %entry
380 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
381 ; CHECK-NEXT: vlseg5e64ff.v v7, (a0)
382 ; CHECK-NEXT: csrr a0, vl
383 ; CHECK-NEXT: sd a0, 0(a2)
386 %0 = tail call {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg5ff.nxv1i64(<vscale x 1 x i64> undef, <vscale x 1 x i64> undef, <vscale x 1 x i64> undef, <vscale x 1 x i64> undef, <vscale x 1 x i64> undef, ptr %base, i64 %vl)
387 %1 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 1
388 %2 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 5
389 store i64 %2, ptr %outvl
390 ret <vscale x 1 x i64> %1
393 define <vscale x 1 x i64> @test_vlseg5ff_mask_nxv1i64(<vscale x 1 x i64> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
394 ; CHECK-LABEL: test_vlseg5ff_mask_nxv1i64:
395 ; CHECK: # %bb.0: # %entry
396 ; CHECK-NEXT: vmv1r.v v7, v8
397 ; CHECK-NEXT: vmv1r.v v9, v8
398 ; CHECK-NEXT: vmv1r.v v10, v8
399 ; CHECK-NEXT: vmv1r.v v11, v8
400 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu
401 ; CHECK-NEXT: vlseg5e64ff.v v7, (a0), v0.t
402 ; CHECK-NEXT: csrr a0, vl
403 ; CHECK-NEXT: sd a0, 0(a2)
406 %0 = tail call {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg5ff.mask.nxv1i64(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
407 %1 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 1
408 %2 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 5
409 store i64 %2, ptr %outvl
410 ret <vscale x 1 x i64> %1
413 declare {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg6ff.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, ptr , i64)
414 declare {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg6ff.mask.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, ptr, <vscale x 1 x i1>, i64, i64)
416 define <vscale x 1 x i64> @test_vlseg6ff_nxv1i64(ptr %base, i64 %vl, ptr %outvl) {
417 ; CHECK-LABEL: test_vlseg6ff_nxv1i64:
418 ; CHECK: # %bb.0: # %entry
419 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
420 ; CHECK-NEXT: vlseg6e64ff.v v7, (a0)
421 ; CHECK-NEXT: csrr a0, vl
422 ; CHECK-NEXT: sd a0, 0(a2)
425 %0 = tail call {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg6ff.nxv1i64(<vscale x 1 x i64> undef, <vscale x 1 x i64> undef, <vscale x 1 x i64> undef, <vscale x 1 x i64> undef, <vscale x 1 x i64> undef, <vscale x 1 x i64> undef, ptr %base, i64 %vl)
426 %1 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 1
427 %2 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 6
428 store i64 %2, ptr %outvl
429 ret <vscale x 1 x i64> %1
432 define <vscale x 1 x i64> @test_vlseg6ff_mask_nxv1i64(<vscale x 1 x i64> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
433 ; CHECK-LABEL: test_vlseg6ff_mask_nxv1i64:
434 ; CHECK: # %bb.0: # %entry
435 ; CHECK-NEXT: vmv1r.v v7, v8
436 ; CHECK-NEXT: vmv1r.v v9, v8
437 ; CHECK-NEXT: vmv1r.v v10, v8
438 ; CHECK-NEXT: vmv1r.v v11, v8
439 ; CHECK-NEXT: vmv1r.v v12, v8
440 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu
441 ; CHECK-NEXT: vlseg6e64ff.v v7, (a0), v0.t
442 ; CHECK-NEXT: csrr a0, vl
443 ; CHECK-NEXT: sd a0, 0(a2)
446 %0 = tail call {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg6ff.mask.nxv1i64(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
447 %1 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 1
448 %2 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 6
449 store i64 %2, ptr %outvl
450 ret <vscale x 1 x i64> %1
453 declare {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg7ff.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, ptr , i64)
454 declare {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg7ff.mask.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, ptr, <vscale x 1 x i1>, i64, i64)
456 define <vscale x 1 x i64> @test_vlseg7ff_nxv1i64(ptr %base, i64 %vl, ptr %outvl) {
457 ; CHECK-LABEL: test_vlseg7ff_nxv1i64:
458 ; CHECK: # %bb.0: # %entry
459 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
460 ; CHECK-NEXT: vlseg7e64ff.v v7, (a0)
461 ; CHECK-NEXT: csrr a0, vl
462 ; CHECK-NEXT: sd a0, 0(a2)
465 %0 = tail call {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg7ff.nxv1i64(<vscale x 1 x i64> undef, <vscale x 1 x i64> undef, <vscale x 1 x i64> undef, <vscale x 1 x i64> undef, <vscale x 1 x i64> undef, <vscale x 1 x i64> undef, <vscale x 1 x i64> undef, ptr %base, i64 %vl)
466 %1 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 1
467 %2 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 7
468 store i64 %2, ptr %outvl
469 ret <vscale x 1 x i64> %1
472 define <vscale x 1 x i64> @test_vlseg7ff_mask_nxv1i64(<vscale x 1 x i64> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
473 ; CHECK-LABEL: test_vlseg7ff_mask_nxv1i64:
474 ; CHECK: # %bb.0: # %entry
475 ; CHECK-NEXT: vmv1r.v v7, v8
476 ; CHECK-NEXT: vmv1r.v v9, v8
477 ; CHECK-NEXT: vmv1r.v v10, v8
478 ; CHECK-NEXT: vmv1r.v v11, v8
479 ; CHECK-NEXT: vmv1r.v v12, v8
480 ; CHECK-NEXT: vmv1r.v v13, v8
481 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu
482 ; CHECK-NEXT: vlseg7e64ff.v v7, (a0), v0.t
483 ; CHECK-NEXT: csrr a0, vl
484 ; CHECK-NEXT: sd a0, 0(a2)
487 %0 = tail call {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg7ff.mask.nxv1i64(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
488 %1 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 1
489 %2 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 7
490 store i64 %2, ptr %outvl
491 ret <vscale x 1 x i64> %1
494 declare {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg8ff.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, ptr , i64)
495 declare {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg8ff.mask.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, ptr, <vscale x 1 x i1>, i64, i64)
497 define <vscale x 1 x i64> @test_vlseg8ff_nxv1i64(ptr %base, i64 %vl, ptr %outvl) {
498 ; CHECK-LABEL: test_vlseg8ff_nxv1i64:
499 ; CHECK: # %bb.0: # %entry
500 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
501 ; CHECK-NEXT: vlseg8e64ff.v v7, (a0)
502 ; CHECK-NEXT: csrr a0, vl
503 ; CHECK-NEXT: sd a0, 0(a2)
506 %0 = tail call {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg8ff.nxv1i64(<vscale x 1 x i64> undef, <vscale x 1 x i64> undef ,<vscale x 1 x i64> undef ,<vscale x 1 x i64> undef, <vscale x 1 x i64> undef ,<vscale x 1 x i64> undef, <vscale x 1 x i64> undef, <vscale x 1 x i64> undef, ptr %base, i64 %vl)
507 %1 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 1
508 %2 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 8
509 store i64 %2, ptr %outvl
510 ret <vscale x 1 x i64> %1
513 define <vscale x 1 x i64> @test_vlseg8ff_mask_nxv1i64(<vscale x 1 x i64> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
514 ; CHECK-LABEL: test_vlseg8ff_mask_nxv1i64:
515 ; CHECK: # %bb.0: # %entry
516 ; CHECK-NEXT: vmv1r.v v7, v8
517 ; CHECK-NEXT: vmv1r.v v9, v8
518 ; CHECK-NEXT: vmv1r.v v10, v8
519 ; CHECK-NEXT: vmv1r.v v11, v8
520 ; CHECK-NEXT: vmv1r.v v12, v8
521 ; CHECK-NEXT: vmv1r.v v13, v8
522 ; CHECK-NEXT: vmv1r.v v14, v8
523 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu
524 ; CHECK-NEXT: vlseg8e64ff.v v7, (a0), v0.t
525 ; CHECK-NEXT: csrr a0, vl
526 ; CHECK-NEXT: sd a0, 0(a2)
529 %0 = tail call {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} @llvm.riscv.vlseg8ff.mask.nxv1i64(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
530 %1 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 1
531 %2 = extractvalue {<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64} %0, 8
532 store i64 %2, ptr %outvl
533 ret <vscale x 1 x i64> %1
536 declare {<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg2ff.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>, ptr , i64)
537 declare {<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg2ff.mask.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i1>, i64, i64)
539 define <vscale x 1 x i32> @test_vlseg2ff_nxv1i32(ptr %base, i64 %vl, ptr %outvl) {
540 ; CHECK-LABEL: test_vlseg2ff_nxv1i32:
541 ; CHECK: # %bb.0: # %entry
542 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
543 ; CHECK-NEXT: vlseg2e32ff.v v7, (a0)
544 ; CHECK-NEXT: csrr a0, vl
545 ; CHECK-NEXT: sd a0, 0(a2)
548 %0 = tail call {<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg2ff.nxv1i32(<vscale x 1 x i32> undef, <vscale x 1 x i32> undef, ptr %base, i64 %vl)
549 %1 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 1
550 %2 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 2
551 store i64 %2, ptr %outvl
552 ret <vscale x 1 x i32> %1
555 define <vscale x 1 x i32> @test_vlseg2ff_mask_nxv1i32(<vscale x 1 x i32> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
556 ; CHECK-LABEL: test_vlseg2ff_mask_nxv1i32:
557 ; CHECK: # %bb.0: # %entry
558 ; CHECK-NEXT: vmv1r.v v7, v8
559 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu
560 ; CHECK-NEXT: vlseg2e32ff.v v7, (a0), v0.t
561 ; CHECK-NEXT: csrr a0, vl
562 ; CHECK-NEXT: sd a0, 0(a2)
565 %0 = tail call {<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg2ff.mask.nxv1i32(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
566 %1 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 1
567 %2 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 2
568 store i64 %2, ptr %outvl
569 ret <vscale x 1 x i32> %1
572 declare {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg3ff.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr , i64)
573 declare {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg3ff.mask.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i1>, i64, i64)
575 define <vscale x 1 x i32> @test_vlseg3ff_nxv1i32(ptr %base, i64 %vl, ptr %outvl) {
576 ; CHECK-LABEL: test_vlseg3ff_nxv1i32:
577 ; CHECK: # %bb.0: # %entry
578 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
579 ; CHECK-NEXT: vlseg3e32ff.v v7, (a0)
580 ; CHECK-NEXT: csrr a0, vl
581 ; CHECK-NEXT: sd a0, 0(a2)
584 %0 = tail call {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg3ff.nxv1i32(<vscale x 1 x i32> undef, <vscale x 1 x i32> undef, <vscale x 1 x i32> undef, ptr %base, i64 %vl)
585 %1 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 1
586 %2 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 3
587 store i64 %2, ptr %outvl
588 ret <vscale x 1 x i32> %1
591 define <vscale x 1 x i32> @test_vlseg3ff_mask_nxv1i32(<vscale x 1 x i32> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
592 ; CHECK-LABEL: test_vlseg3ff_mask_nxv1i32:
593 ; CHECK: # %bb.0: # %entry
594 ; CHECK-NEXT: vmv1r.v v7, v8
595 ; CHECK-NEXT: vmv1r.v v9, v8
596 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu
597 ; CHECK-NEXT: vlseg3e32ff.v v7, (a0), v0.t
598 ; CHECK-NEXT: csrr a0, vl
599 ; CHECK-NEXT: sd a0, 0(a2)
602 %0 = tail call {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg3ff.mask.nxv1i32(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
603 %1 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 1
604 %2 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 3
605 store i64 %2, ptr %outvl
606 ret <vscale x 1 x i32> %1
609 declare {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg4ff.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr , i64)
610 declare {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg4ff.mask.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i1>, i64, i64)
612 define <vscale x 1 x i32> @test_vlseg4ff_nxv1i32(ptr %base, i64 %vl, ptr %outvl) {
613 ; CHECK-LABEL: test_vlseg4ff_nxv1i32:
614 ; CHECK: # %bb.0: # %entry
615 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
616 ; CHECK-NEXT: vlseg4e32ff.v v7, (a0)
617 ; CHECK-NEXT: csrr a0, vl
618 ; CHECK-NEXT: sd a0, 0(a2)
621 %0 = tail call {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg4ff.nxv1i32(<vscale x 1 x i32> undef, <vscale x 1 x i32> undef, <vscale x 1 x i32> undef, <vscale x 1 x i32> undef, ptr %base, i64 %vl)
622 %1 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 1
623 %2 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 4
624 store i64 %2, ptr %outvl
625 ret <vscale x 1 x i32> %1
628 define <vscale x 1 x i32> @test_vlseg4ff_mask_nxv1i32(<vscale x 1 x i32> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
629 ; CHECK-LABEL: test_vlseg4ff_mask_nxv1i32:
630 ; CHECK: # %bb.0: # %entry
631 ; CHECK-NEXT: vmv1r.v v7, v8
632 ; CHECK-NEXT: vmv1r.v v9, v8
633 ; CHECK-NEXT: vmv1r.v v10, v8
634 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu
635 ; CHECK-NEXT: vlseg4e32ff.v v7, (a0), v0.t
636 ; CHECK-NEXT: csrr a0, vl
637 ; CHECK-NEXT: sd a0, 0(a2)
640 %0 = tail call {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg4ff.mask.nxv1i32(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
641 %1 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 1
642 %2 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 4
643 store i64 %2, ptr %outvl
644 ret <vscale x 1 x i32> %1
647 declare {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg5ff.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr , i64)
648 declare {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg5ff.mask.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i1>, i64, i64)
650 define <vscale x 1 x i32> @test_vlseg5ff_nxv1i32(ptr %base, i64 %vl, ptr %outvl) {
651 ; CHECK-LABEL: test_vlseg5ff_nxv1i32:
652 ; CHECK: # %bb.0: # %entry
653 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
654 ; CHECK-NEXT: vlseg5e32ff.v v7, (a0)
655 ; CHECK-NEXT: csrr a0, vl
656 ; CHECK-NEXT: sd a0, 0(a2)
659 %0 = tail call {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg5ff.nxv1i32(<vscale x 1 x i32> undef, <vscale x 1 x i32> undef, <vscale x 1 x i32> undef, <vscale x 1 x i32> undef, <vscale x 1 x i32> undef, ptr %base, i64 %vl)
660 %1 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 1
661 %2 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 5
662 store i64 %2, ptr %outvl
663 ret <vscale x 1 x i32> %1
666 define <vscale x 1 x i32> @test_vlseg5ff_mask_nxv1i32(<vscale x 1 x i32> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
667 ; CHECK-LABEL: test_vlseg5ff_mask_nxv1i32:
668 ; CHECK: # %bb.0: # %entry
669 ; CHECK-NEXT: vmv1r.v v7, v8
670 ; CHECK-NEXT: vmv1r.v v9, v8
671 ; CHECK-NEXT: vmv1r.v v10, v8
672 ; CHECK-NEXT: vmv1r.v v11, v8
673 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu
674 ; CHECK-NEXT: vlseg5e32ff.v v7, (a0), v0.t
675 ; CHECK-NEXT: csrr a0, vl
676 ; CHECK-NEXT: sd a0, 0(a2)
679 %0 = tail call {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg5ff.mask.nxv1i32(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
680 %1 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 1
681 %2 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 5
682 store i64 %2, ptr %outvl
683 ret <vscale x 1 x i32> %1
686 declare {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg6ff.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr , i64)
687 declare {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg6ff.mask.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i1>, i64, i64)
689 define <vscale x 1 x i32> @test_vlseg6ff_nxv1i32(ptr %base, i64 %vl, ptr %outvl) {
690 ; CHECK-LABEL: test_vlseg6ff_nxv1i32:
691 ; CHECK: # %bb.0: # %entry
692 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
693 ; CHECK-NEXT: vlseg6e32ff.v v7, (a0)
694 ; CHECK-NEXT: csrr a0, vl
695 ; CHECK-NEXT: sd a0, 0(a2)
698 %0 = tail call {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg6ff.nxv1i32(<vscale x 1 x i32> undef, <vscale x 1 x i32> undef, <vscale x 1 x i32> undef, <vscale x 1 x i32> undef, <vscale x 1 x i32> undef, <vscale x 1 x i32> undef, ptr %base, i64 %vl)
699 %1 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 1
700 %2 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 6
701 store i64 %2, ptr %outvl
702 ret <vscale x 1 x i32> %1
705 define <vscale x 1 x i32> @test_vlseg6ff_mask_nxv1i32(<vscale x 1 x i32> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
706 ; CHECK-LABEL: test_vlseg6ff_mask_nxv1i32:
707 ; CHECK: # %bb.0: # %entry
708 ; CHECK-NEXT: vmv1r.v v7, v8
709 ; CHECK-NEXT: vmv1r.v v9, v8
710 ; CHECK-NEXT: vmv1r.v v10, v8
711 ; CHECK-NEXT: vmv1r.v v11, v8
712 ; CHECK-NEXT: vmv1r.v v12, v8
713 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu
714 ; CHECK-NEXT: vlseg6e32ff.v v7, (a0), v0.t
715 ; CHECK-NEXT: csrr a0, vl
716 ; CHECK-NEXT: sd a0, 0(a2)
719 %0 = tail call {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg6ff.mask.nxv1i32(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
720 %1 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 1
721 %2 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 6
722 store i64 %2, ptr %outvl
723 ret <vscale x 1 x i32> %1
726 declare {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg7ff.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr , i64)
727 declare {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg7ff.mask.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i1>, i64, i64)
729 define <vscale x 1 x i32> @test_vlseg7ff_nxv1i32(ptr %base, i64 %vl, ptr %outvl) {
730 ; CHECK-LABEL: test_vlseg7ff_nxv1i32:
731 ; CHECK: # %bb.0: # %entry
732 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
733 ; CHECK-NEXT: vlseg7e32ff.v v7, (a0)
734 ; CHECK-NEXT: csrr a0, vl
735 ; CHECK-NEXT: sd a0, 0(a2)
738 %0 = tail call {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg7ff.nxv1i32(<vscale x 1 x i32> undef, <vscale x 1 x i32> undef, <vscale x 1 x i32> undef, <vscale x 1 x i32> undef, <vscale x 1 x i32> undef, <vscale x 1 x i32> undef, <vscale x 1 x i32> undef, ptr %base, i64 %vl)
739 %1 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 1
740 %2 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 7
741 store i64 %2, ptr %outvl
742 ret <vscale x 1 x i32> %1
745 define <vscale x 1 x i32> @test_vlseg7ff_mask_nxv1i32(<vscale x 1 x i32> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
746 ; CHECK-LABEL: test_vlseg7ff_mask_nxv1i32:
747 ; CHECK: # %bb.0: # %entry
748 ; CHECK-NEXT: vmv1r.v v7, v8
749 ; CHECK-NEXT: vmv1r.v v9, v8
750 ; CHECK-NEXT: vmv1r.v v10, v8
751 ; CHECK-NEXT: vmv1r.v v11, v8
752 ; CHECK-NEXT: vmv1r.v v12, v8
753 ; CHECK-NEXT: vmv1r.v v13, v8
754 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu
755 ; CHECK-NEXT: vlseg7e32ff.v v7, (a0), v0.t
756 ; CHECK-NEXT: csrr a0, vl
757 ; CHECK-NEXT: sd a0, 0(a2)
760 %0 = tail call {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg7ff.mask.nxv1i32(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
761 %1 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 1
762 %2 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 7
763 store i64 %2, ptr %outvl
764 ret <vscale x 1 x i32> %1
767 declare {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg8ff.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr , i64)
768 declare {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg8ff.mask.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i1>, i64, i64)
770 define <vscale x 1 x i32> @test_vlseg8ff_nxv1i32(ptr %base, i64 %vl, ptr %outvl) {
771 ; CHECK-LABEL: test_vlseg8ff_nxv1i32:
772 ; CHECK: # %bb.0: # %entry
773 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
774 ; CHECK-NEXT: vlseg8e32ff.v v7, (a0)
775 ; CHECK-NEXT: csrr a0, vl
776 ; CHECK-NEXT: sd a0, 0(a2)
779 %0 = tail call {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg8ff.nxv1i32(<vscale x 1 x i32> undef, <vscale x 1 x i32> undef ,<vscale x 1 x i32> undef ,<vscale x 1 x i32> undef, <vscale x 1 x i32> undef ,<vscale x 1 x i32> undef, <vscale x 1 x i32> undef, <vscale x 1 x i32> undef, ptr %base, i64 %vl)
780 %1 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 1
781 %2 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 8
782 store i64 %2, ptr %outvl
783 ret <vscale x 1 x i32> %1
786 define <vscale x 1 x i32> @test_vlseg8ff_mask_nxv1i32(<vscale x 1 x i32> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
787 ; CHECK-LABEL: test_vlseg8ff_mask_nxv1i32:
788 ; CHECK: # %bb.0: # %entry
789 ; CHECK-NEXT: vmv1r.v v7, v8
790 ; CHECK-NEXT: vmv1r.v v9, v8
791 ; CHECK-NEXT: vmv1r.v v10, v8
792 ; CHECK-NEXT: vmv1r.v v11, v8
793 ; CHECK-NEXT: vmv1r.v v12, v8
794 ; CHECK-NEXT: vmv1r.v v13, v8
795 ; CHECK-NEXT: vmv1r.v v14, v8
796 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu
797 ; CHECK-NEXT: vlseg8e32ff.v v7, (a0), v0.t
798 ; CHECK-NEXT: csrr a0, vl
799 ; CHECK-NEXT: sd a0, 0(a2)
802 %0 = tail call {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} @llvm.riscv.vlseg8ff.mask.nxv1i32(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
803 %1 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 1
804 %2 = extractvalue {<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, i64} %0, 8
805 store i64 %2, ptr %outvl
806 ret <vscale x 1 x i32> %1
809 declare {<vscale x 8 x i16>,<vscale x 8 x i16>, i64} @llvm.riscv.vlseg2ff.nxv8i16(<vscale x 8 x i16>,<vscale x 8 x i16>, ptr , i64)
810 declare {<vscale x 8 x i16>,<vscale x 8 x i16>, i64} @llvm.riscv.vlseg2ff.mask.nxv8i16(<vscale x 8 x i16>,<vscale x 8 x i16>, ptr, <vscale x 8 x i1>, i64, i64)
812 define <vscale x 8 x i16> @test_vlseg2ff_nxv8i16(ptr %base, i64 %vl, ptr %outvl) {
813 ; CHECK-LABEL: test_vlseg2ff_nxv8i16:
814 ; CHECK: # %bb.0: # %entry
815 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
816 ; CHECK-NEXT: vlseg2e16ff.v v6, (a0)
817 ; CHECK-NEXT: csrr a0, vl
818 ; CHECK-NEXT: sd a0, 0(a2)
821 %0 = tail call {<vscale x 8 x i16>,<vscale x 8 x i16>, i64} @llvm.riscv.vlseg2ff.nxv8i16(<vscale x 8 x i16> undef, <vscale x 8 x i16> undef, ptr %base, i64 %vl)
822 %1 = extractvalue {<vscale x 8 x i16>,<vscale x 8 x i16>, i64} %0, 1
823 %2 = extractvalue {<vscale x 8 x i16>,<vscale x 8 x i16>, i64} %0, 2
824 store i64 %2, ptr %outvl
825 ret <vscale x 8 x i16> %1
828 define <vscale x 8 x i16> @test_vlseg2ff_mask_nxv8i16(<vscale x 8 x i16> %val, ptr %base, i64 %vl, <vscale x 8 x i1> %mask, ptr %outvl) {
829 ; CHECK-LABEL: test_vlseg2ff_mask_nxv8i16:
830 ; CHECK: # %bb.0: # %entry
831 ; CHECK-NEXT: vmv2r.v v6, v8
832 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu
833 ; CHECK-NEXT: vlseg2e16ff.v v6, (a0), v0.t
834 ; CHECK-NEXT: csrr a0, vl
835 ; CHECK-NEXT: sd a0, 0(a2)
838 %0 = tail call {<vscale x 8 x i16>,<vscale x 8 x i16>, i64} @llvm.riscv.vlseg2ff.mask.nxv8i16(<vscale x 8 x i16> %val,<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i1> %mask, i64 %vl, i64 1)
839 %1 = extractvalue {<vscale x 8 x i16>,<vscale x 8 x i16>, i64} %0, 1
840 %2 = extractvalue {<vscale x 8 x i16>,<vscale x 8 x i16>, i64} %0, 2
841 store i64 %2, ptr %outvl
842 ret <vscale x 8 x i16> %1
845 declare {<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, i64} @llvm.riscv.vlseg3ff.nxv8i16(<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, ptr , i64)
846 declare {<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, i64} @llvm.riscv.vlseg3ff.mask.nxv8i16(<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, ptr, <vscale x 8 x i1>, i64, i64)
848 define <vscale x 8 x i16> @test_vlseg3ff_nxv8i16(ptr %base, i64 %vl, ptr %outvl) {
849 ; CHECK-LABEL: test_vlseg3ff_nxv8i16:
850 ; CHECK: # %bb.0: # %entry
851 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
852 ; CHECK-NEXT: vlseg3e16ff.v v6, (a0)
853 ; CHECK-NEXT: csrr a0, vl
854 ; CHECK-NEXT: sd a0, 0(a2)
857 %0 = tail call {<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, i64} @llvm.riscv.vlseg3ff.nxv8i16(<vscale x 8 x i16> undef, <vscale x 8 x i16> undef, <vscale x 8 x i16> undef, ptr %base, i64 %vl)
858 %1 = extractvalue {<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, i64} %0, 1
859 %2 = extractvalue {<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, i64} %0, 3
860 store i64 %2, ptr %outvl
861 ret <vscale x 8 x i16> %1
864 define <vscale x 8 x i16> @test_vlseg3ff_mask_nxv8i16(<vscale x 8 x i16> %val, ptr %base, i64 %vl, <vscale x 8 x i1> %mask, ptr %outvl) {
865 ; CHECK-LABEL: test_vlseg3ff_mask_nxv8i16:
866 ; CHECK: # %bb.0: # %entry
867 ; CHECK-NEXT: vmv2r.v v6, v8
868 ; CHECK-NEXT: vmv2r.v v10, v8
869 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu
870 ; CHECK-NEXT: vlseg3e16ff.v v6, (a0), v0.t
871 ; CHECK-NEXT: csrr a0, vl
872 ; CHECK-NEXT: sd a0, 0(a2)
875 %0 = tail call {<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, i64} @llvm.riscv.vlseg3ff.mask.nxv8i16(<vscale x 8 x i16> %val,<vscale x 8 x i16> %val,<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i1> %mask, i64 %vl, i64 1)
876 %1 = extractvalue {<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, i64} %0, 1
877 %2 = extractvalue {<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, i64} %0, 3
878 store i64 %2, ptr %outvl
879 ret <vscale x 8 x i16> %1
882 declare {<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, i64} @llvm.riscv.vlseg4ff.nxv8i16(<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, ptr , i64)
883 declare {<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, i64} @llvm.riscv.vlseg4ff.mask.nxv8i16(<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, ptr, <vscale x 8 x i1>, i64, i64)
885 define <vscale x 8 x i16> @test_vlseg4ff_nxv8i16(ptr %base, i64 %vl, ptr %outvl) {
886 ; CHECK-LABEL: test_vlseg4ff_nxv8i16:
887 ; CHECK: # %bb.0: # %entry
888 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
889 ; CHECK-NEXT: vlseg4e16ff.v v6, (a0)
890 ; CHECK-NEXT: csrr a0, vl
891 ; CHECK-NEXT: sd a0, 0(a2)
894 %0 = tail call {<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, i64} @llvm.riscv.vlseg4ff.nxv8i16(<vscale x 8 x i16> undef, <vscale x 8 x i16> undef, <vscale x 8 x i16> undef, <vscale x 8 x i16> undef, ptr %base, i64 %vl)
895 %1 = extractvalue {<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, i64} %0, 1
896 %2 = extractvalue {<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, i64} %0, 4
897 store i64 %2, ptr %outvl
898 ret <vscale x 8 x i16> %1
901 define <vscale x 8 x i16> @test_vlseg4ff_mask_nxv8i16(<vscale x 8 x i16> %val, ptr %base, i64 %vl, <vscale x 8 x i1> %mask, ptr %outvl) {
902 ; CHECK-LABEL: test_vlseg4ff_mask_nxv8i16:
903 ; CHECK: # %bb.0: # %entry
904 ; CHECK-NEXT: vmv2r.v v6, v8
905 ; CHECK-NEXT: vmv2r.v v10, v8
906 ; CHECK-NEXT: vmv2r.v v12, v8
907 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu
908 ; CHECK-NEXT: vlseg4e16ff.v v6, (a0), v0.t
909 ; CHECK-NEXT: csrr a0, vl
910 ; CHECK-NEXT: sd a0, 0(a2)
913 %0 = tail call {<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, i64} @llvm.riscv.vlseg4ff.mask.nxv8i16(<vscale x 8 x i16> %val,<vscale x 8 x i16> %val,<vscale x 8 x i16> %val,<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i1> %mask, i64 %vl, i64 1)
914 %1 = extractvalue {<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, i64} %0, 1
915 %2 = extractvalue {<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, i64} %0, 4
916 store i64 %2, ptr %outvl
917 ret <vscale x 8 x i16> %1
920 declare {<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg2ff.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>, ptr , i64)
921 declare {<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg2ff.mask.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i1>, i64, i64)
923 define <vscale x 4 x i8> @test_vlseg2ff_nxv4i8(ptr %base, i64 %vl, ptr %outvl) {
924 ; CHECK-LABEL: test_vlseg2ff_nxv4i8:
925 ; CHECK: # %bb.0: # %entry
926 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
927 ; CHECK-NEXT: vlseg2e8ff.v v7, (a0)
928 ; CHECK-NEXT: csrr a0, vl
929 ; CHECK-NEXT: sd a0, 0(a2)
932 %0 = tail call {<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg2ff.nxv4i8(<vscale x 4 x i8> undef, <vscale x 4 x i8> undef, ptr %base, i64 %vl)
933 %1 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 1
934 %2 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 2
935 store i64 %2, ptr %outvl
936 ret <vscale x 4 x i8> %1
939 define <vscale x 4 x i8> @test_vlseg2ff_mask_nxv4i8(<vscale x 4 x i8> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
940 ; CHECK-LABEL: test_vlseg2ff_mask_nxv4i8:
941 ; CHECK: # %bb.0: # %entry
942 ; CHECK-NEXT: vmv1r.v v7, v8
943 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu
944 ; CHECK-NEXT: vlseg2e8ff.v v7, (a0), v0.t
945 ; CHECK-NEXT: csrr a0, vl
946 ; CHECK-NEXT: sd a0, 0(a2)
949 %0 = tail call {<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg2ff.mask.nxv4i8(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
950 %1 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 1
951 %2 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 2
952 store i64 %2, ptr %outvl
953 ret <vscale x 4 x i8> %1
956 declare {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg3ff.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr , i64)
957 declare {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg3ff.mask.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i1>, i64, i64)
959 define <vscale x 4 x i8> @test_vlseg3ff_nxv4i8(ptr %base, i64 %vl, ptr %outvl) {
960 ; CHECK-LABEL: test_vlseg3ff_nxv4i8:
961 ; CHECK: # %bb.0: # %entry
962 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
963 ; CHECK-NEXT: vlseg3e8ff.v v7, (a0)
964 ; CHECK-NEXT: csrr a0, vl
965 ; CHECK-NEXT: sd a0, 0(a2)
968 %0 = tail call {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg3ff.nxv4i8(<vscale x 4 x i8> undef, <vscale x 4 x i8> undef, <vscale x 4 x i8> undef, ptr %base, i64 %vl)
969 %1 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 1
970 %2 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 3
971 store i64 %2, ptr %outvl
972 ret <vscale x 4 x i8> %1
975 define <vscale x 4 x i8> @test_vlseg3ff_mask_nxv4i8(<vscale x 4 x i8> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
976 ; CHECK-LABEL: test_vlseg3ff_mask_nxv4i8:
977 ; CHECK: # %bb.0: # %entry
978 ; CHECK-NEXT: vmv1r.v v7, v8
979 ; CHECK-NEXT: vmv1r.v v9, v8
980 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu
981 ; CHECK-NEXT: vlseg3e8ff.v v7, (a0), v0.t
982 ; CHECK-NEXT: csrr a0, vl
983 ; CHECK-NEXT: sd a0, 0(a2)
986 %0 = tail call {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg3ff.mask.nxv4i8(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
987 %1 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 1
988 %2 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 3
989 store i64 %2, ptr %outvl
990 ret <vscale x 4 x i8> %1
993 declare {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg4ff.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr , i64)
994 declare {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg4ff.mask.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i1>, i64, i64)
996 define <vscale x 4 x i8> @test_vlseg4ff_nxv4i8(ptr %base, i64 %vl, ptr %outvl) {
997 ; CHECK-LABEL: test_vlseg4ff_nxv4i8:
998 ; CHECK: # %bb.0: # %entry
999 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
1000 ; CHECK-NEXT: vlseg4e8ff.v v7, (a0)
1001 ; CHECK-NEXT: csrr a0, vl
1002 ; CHECK-NEXT: sd a0, 0(a2)
1005 %0 = tail call {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg4ff.nxv4i8(<vscale x 4 x i8> undef, <vscale x 4 x i8> undef, <vscale x 4 x i8> undef, <vscale x 4 x i8> undef, ptr %base, i64 %vl)
1006 %1 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 1
1007 %2 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 4
1008 store i64 %2, ptr %outvl
1009 ret <vscale x 4 x i8> %1
1012 define <vscale x 4 x i8> @test_vlseg4ff_mask_nxv4i8(<vscale x 4 x i8> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
1013 ; CHECK-LABEL: test_vlseg4ff_mask_nxv4i8:
1014 ; CHECK: # %bb.0: # %entry
1015 ; CHECK-NEXT: vmv1r.v v7, v8
1016 ; CHECK-NEXT: vmv1r.v v9, v8
1017 ; CHECK-NEXT: vmv1r.v v10, v8
1018 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu
1019 ; CHECK-NEXT: vlseg4e8ff.v v7, (a0), v0.t
1020 ; CHECK-NEXT: csrr a0, vl
1021 ; CHECK-NEXT: sd a0, 0(a2)
1024 %0 = tail call {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg4ff.mask.nxv4i8(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
1025 %1 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 1
1026 %2 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 4
1027 store i64 %2, ptr %outvl
1028 ret <vscale x 4 x i8> %1
1031 declare {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg5ff.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr , i64)
1032 declare {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg5ff.mask.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i1>, i64, i64)
1034 define <vscale x 4 x i8> @test_vlseg5ff_nxv4i8(ptr %base, i64 %vl, ptr %outvl) {
1035 ; CHECK-LABEL: test_vlseg5ff_nxv4i8:
1036 ; CHECK: # %bb.0: # %entry
1037 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
1038 ; CHECK-NEXT: vlseg5e8ff.v v7, (a0)
1039 ; CHECK-NEXT: csrr a0, vl
1040 ; CHECK-NEXT: sd a0, 0(a2)
1043 %0 = tail call {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg5ff.nxv4i8(<vscale x 4 x i8> undef, <vscale x 4 x i8> undef, <vscale x 4 x i8> undef, <vscale x 4 x i8> undef, <vscale x 4 x i8> undef, ptr %base, i64 %vl)
1044 %1 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 1
1045 %2 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 5
1046 store i64 %2, ptr %outvl
1047 ret <vscale x 4 x i8> %1
1050 define <vscale x 4 x i8> @test_vlseg5ff_mask_nxv4i8(<vscale x 4 x i8> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
1051 ; CHECK-LABEL: test_vlseg5ff_mask_nxv4i8:
1052 ; CHECK: # %bb.0: # %entry
1053 ; CHECK-NEXT: vmv1r.v v7, v8
1054 ; CHECK-NEXT: vmv1r.v v9, v8
1055 ; CHECK-NEXT: vmv1r.v v10, v8
1056 ; CHECK-NEXT: vmv1r.v v11, v8
1057 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu
1058 ; CHECK-NEXT: vlseg5e8ff.v v7, (a0), v0.t
1059 ; CHECK-NEXT: csrr a0, vl
1060 ; CHECK-NEXT: sd a0, 0(a2)
1063 %0 = tail call {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg5ff.mask.nxv4i8(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
1064 %1 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 1
1065 %2 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 5
1066 store i64 %2, ptr %outvl
1067 ret <vscale x 4 x i8> %1
1070 declare {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg6ff.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr , i64)
1071 declare {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg6ff.mask.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i1>, i64, i64)
1073 define <vscale x 4 x i8> @test_vlseg6ff_nxv4i8(ptr %base, i64 %vl, ptr %outvl) {
1074 ; CHECK-LABEL: test_vlseg6ff_nxv4i8:
1075 ; CHECK: # %bb.0: # %entry
1076 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
1077 ; CHECK-NEXT: vlseg6e8ff.v v7, (a0)
1078 ; CHECK-NEXT: csrr a0, vl
1079 ; CHECK-NEXT: sd a0, 0(a2)
1082 %0 = tail call {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg6ff.nxv4i8(<vscale x 4 x i8> undef, <vscale x 4 x i8> undef, <vscale x 4 x i8> undef, <vscale x 4 x i8> undef, <vscale x 4 x i8> undef, <vscale x 4 x i8> undef, ptr %base, i64 %vl)
1083 %1 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 1
1084 %2 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 6
1085 store i64 %2, ptr %outvl
1086 ret <vscale x 4 x i8> %1
1089 define <vscale x 4 x i8> @test_vlseg6ff_mask_nxv4i8(<vscale x 4 x i8> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
1090 ; CHECK-LABEL: test_vlseg6ff_mask_nxv4i8:
1091 ; CHECK: # %bb.0: # %entry
1092 ; CHECK-NEXT: vmv1r.v v7, v8
1093 ; CHECK-NEXT: vmv1r.v v9, v8
1094 ; CHECK-NEXT: vmv1r.v v10, v8
1095 ; CHECK-NEXT: vmv1r.v v11, v8
1096 ; CHECK-NEXT: vmv1r.v v12, v8
1097 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu
1098 ; CHECK-NEXT: vlseg6e8ff.v v7, (a0), v0.t
1099 ; CHECK-NEXT: csrr a0, vl
1100 ; CHECK-NEXT: sd a0, 0(a2)
1103 %0 = tail call {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg6ff.mask.nxv4i8(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
1104 %1 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 1
1105 %2 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 6
1106 store i64 %2, ptr %outvl
1107 ret <vscale x 4 x i8> %1
1110 declare {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg7ff.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr , i64)
1111 declare {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg7ff.mask.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i1>, i64, i64)
1113 define <vscale x 4 x i8> @test_vlseg7ff_nxv4i8(ptr %base, i64 %vl, ptr %outvl) {
1114 ; CHECK-LABEL: test_vlseg7ff_nxv4i8:
1115 ; CHECK: # %bb.0: # %entry
1116 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
1117 ; CHECK-NEXT: vlseg7e8ff.v v7, (a0)
1118 ; CHECK-NEXT: csrr a0, vl
1119 ; CHECK-NEXT: sd a0, 0(a2)
1122 %0 = tail call {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg7ff.nxv4i8(<vscale x 4 x i8> undef, <vscale x 4 x i8> undef, <vscale x 4 x i8> undef, <vscale x 4 x i8> undef, <vscale x 4 x i8> undef, <vscale x 4 x i8> undef, <vscale x 4 x i8> undef, ptr %base, i64 %vl)
1123 %1 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 1
1124 %2 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 7
1125 store i64 %2, ptr %outvl
1126 ret <vscale x 4 x i8> %1
1129 define <vscale x 4 x i8> @test_vlseg7ff_mask_nxv4i8(<vscale x 4 x i8> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
1130 ; CHECK-LABEL: test_vlseg7ff_mask_nxv4i8:
1131 ; CHECK: # %bb.0: # %entry
1132 ; CHECK-NEXT: vmv1r.v v7, v8
1133 ; CHECK-NEXT: vmv1r.v v9, v8
1134 ; CHECK-NEXT: vmv1r.v v10, v8
1135 ; CHECK-NEXT: vmv1r.v v11, v8
1136 ; CHECK-NEXT: vmv1r.v v12, v8
1137 ; CHECK-NEXT: vmv1r.v v13, v8
1138 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu
1139 ; CHECK-NEXT: vlseg7e8ff.v v7, (a0), v0.t
1140 ; CHECK-NEXT: csrr a0, vl
1141 ; CHECK-NEXT: sd a0, 0(a2)
1144 %0 = tail call {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg7ff.mask.nxv4i8(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
1145 %1 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 1
1146 %2 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 7
1147 store i64 %2, ptr %outvl
1148 ret <vscale x 4 x i8> %1
1151 declare {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg8ff.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr , i64)
1152 declare {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg8ff.mask.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i1>, i64, i64)
1154 define <vscale x 4 x i8> @test_vlseg8ff_nxv4i8(ptr %base, i64 %vl, ptr %outvl) {
1155 ; CHECK-LABEL: test_vlseg8ff_nxv4i8:
1156 ; CHECK: # %bb.0: # %entry
1157 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
1158 ; CHECK-NEXT: vlseg8e8ff.v v7, (a0)
1159 ; CHECK-NEXT: csrr a0, vl
1160 ; CHECK-NEXT: sd a0, 0(a2)
1163 %0 = tail call {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg8ff.nxv4i8(<vscale x 4 x i8> undef, <vscale x 4 x i8> undef ,<vscale x 4 x i8> undef ,<vscale x 4 x i8> undef, <vscale x 4 x i8> undef ,<vscale x 4 x i8> undef, <vscale x 4 x i8> undef, <vscale x 4 x i8> undef, ptr %base, i64 %vl)
1164 %1 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 1
1165 %2 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 8
1166 store i64 %2, ptr %outvl
1167 ret <vscale x 4 x i8> %1
1170 define <vscale x 4 x i8> @test_vlseg8ff_mask_nxv4i8(<vscale x 4 x i8> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
1171 ; CHECK-LABEL: test_vlseg8ff_mask_nxv4i8:
1172 ; CHECK: # %bb.0: # %entry
1173 ; CHECK-NEXT: vmv1r.v v7, v8
1174 ; CHECK-NEXT: vmv1r.v v9, v8
1175 ; CHECK-NEXT: vmv1r.v v10, v8
1176 ; CHECK-NEXT: vmv1r.v v11, v8
1177 ; CHECK-NEXT: vmv1r.v v12, v8
1178 ; CHECK-NEXT: vmv1r.v v13, v8
1179 ; CHECK-NEXT: vmv1r.v v14, v8
1180 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, mu
1181 ; CHECK-NEXT: vlseg8e8ff.v v7, (a0), v0.t
1182 ; CHECK-NEXT: csrr a0, vl
1183 ; CHECK-NEXT: sd a0, 0(a2)
1186 %0 = tail call {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} @llvm.riscv.vlseg8ff.mask.nxv4i8(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
1187 %1 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 1
1188 %2 = extractvalue {<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, i64} %0, 8
1189 store i64 %2, ptr %outvl
1190 ret <vscale x 4 x i8> %1
1193 declare {<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg2ff.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>, ptr , i64)
1194 declare {<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg2ff.mask.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i1>, i64, i64)
1196 define <vscale x 1 x i16> @test_vlseg2ff_nxv1i16(ptr %base, i64 %vl, ptr %outvl) {
1197 ; CHECK-LABEL: test_vlseg2ff_nxv1i16:
1198 ; CHECK: # %bb.0: # %entry
1199 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
1200 ; CHECK-NEXT: vlseg2e16ff.v v7, (a0)
1201 ; CHECK-NEXT: csrr a0, vl
1202 ; CHECK-NEXT: sd a0, 0(a2)
1205 %0 = tail call {<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg2ff.nxv1i16(<vscale x 1 x i16> undef, <vscale x 1 x i16> undef, ptr %base, i64 %vl)
1206 %1 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 1
1207 %2 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 2
1208 store i64 %2, ptr %outvl
1209 ret <vscale x 1 x i16> %1
1212 define <vscale x 1 x i16> @test_vlseg2ff_mask_nxv1i16(<vscale x 1 x i16> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
1213 ; CHECK-LABEL: test_vlseg2ff_mask_nxv1i16:
1214 ; CHECK: # %bb.0: # %entry
1215 ; CHECK-NEXT: vmv1r.v v7, v8
1216 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu
1217 ; CHECK-NEXT: vlseg2e16ff.v v7, (a0), v0.t
1218 ; CHECK-NEXT: csrr a0, vl
1219 ; CHECK-NEXT: sd a0, 0(a2)
1222 %0 = tail call {<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg2ff.mask.nxv1i16(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
1223 %1 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 1
1224 %2 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 2
1225 store i64 %2, ptr %outvl
1226 ret <vscale x 1 x i16> %1
1229 declare {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg3ff.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr , i64)
1230 declare {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg3ff.mask.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i1>, i64, i64)
1232 define <vscale x 1 x i16> @test_vlseg3ff_nxv1i16(ptr %base, i64 %vl, ptr %outvl) {
1233 ; CHECK-LABEL: test_vlseg3ff_nxv1i16:
1234 ; CHECK: # %bb.0: # %entry
1235 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
1236 ; CHECK-NEXT: vlseg3e16ff.v v7, (a0)
1237 ; CHECK-NEXT: csrr a0, vl
1238 ; CHECK-NEXT: sd a0, 0(a2)
1241 %0 = tail call {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg3ff.nxv1i16(<vscale x 1 x i16> undef, <vscale x 1 x i16> undef, <vscale x 1 x i16> undef, ptr %base, i64 %vl)
1242 %1 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 1
1243 %2 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 3
1244 store i64 %2, ptr %outvl
1245 ret <vscale x 1 x i16> %1
1248 define <vscale x 1 x i16> @test_vlseg3ff_mask_nxv1i16(<vscale x 1 x i16> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
1249 ; CHECK-LABEL: test_vlseg3ff_mask_nxv1i16:
1250 ; CHECK: # %bb.0: # %entry
1251 ; CHECK-NEXT: vmv1r.v v7, v8
1252 ; CHECK-NEXT: vmv1r.v v9, v8
1253 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu
1254 ; CHECK-NEXT: vlseg3e16ff.v v7, (a0), v0.t
1255 ; CHECK-NEXT: csrr a0, vl
1256 ; CHECK-NEXT: sd a0, 0(a2)
1259 %0 = tail call {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg3ff.mask.nxv1i16(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
1260 %1 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 1
1261 %2 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 3
1262 store i64 %2, ptr %outvl
1263 ret <vscale x 1 x i16> %1
1266 declare {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg4ff.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr , i64)
1267 declare {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg4ff.mask.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i1>, i64, i64)
1269 define <vscale x 1 x i16> @test_vlseg4ff_nxv1i16(ptr %base, i64 %vl, ptr %outvl) {
1270 ; CHECK-LABEL: test_vlseg4ff_nxv1i16:
1271 ; CHECK: # %bb.0: # %entry
1272 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
1273 ; CHECK-NEXT: vlseg4e16ff.v v7, (a0)
1274 ; CHECK-NEXT: csrr a0, vl
1275 ; CHECK-NEXT: sd a0, 0(a2)
1278 %0 = tail call {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg4ff.nxv1i16(<vscale x 1 x i16> undef, <vscale x 1 x i16> undef, <vscale x 1 x i16> undef, <vscale x 1 x i16> undef, ptr %base, i64 %vl)
1279 %1 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 1
1280 %2 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 4
1281 store i64 %2, ptr %outvl
1282 ret <vscale x 1 x i16> %1
1285 define <vscale x 1 x i16> @test_vlseg4ff_mask_nxv1i16(<vscale x 1 x i16> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
1286 ; CHECK-LABEL: test_vlseg4ff_mask_nxv1i16:
1287 ; CHECK: # %bb.0: # %entry
1288 ; CHECK-NEXT: vmv1r.v v7, v8
1289 ; CHECK-NEXT: vmv1r.v v9, v8
1290 ; CHECK-NEXT: vmv1r.v v10, v8
1291 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu
1292 ; CHECK-NEXT: vlseg4e16ff.v v7, (a0), v0.t
1293 ; CHECK-NEXT: csrr a0, vl
1294 ; CHECK-NEXT: sd a0, 0(a2)
1297 %0 = tail call {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg4ff.mask.nxv1i16(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
1298 %1 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 1
1299 %2 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 4
1300 store i64 %2, ptr %outvl
1301 ret <vscale x 1 x i16> %1
1304 declare {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg5ff.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr , i64)
1305 declare {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg5ff.mask.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i1>, i64, i64)
1307 define <vscale x 1 x i16> @test_vlseg5ff_nxv1i16(ptr %base, i64 %vl, ptr %outvl) {
1308 ; CHECK-LABEL: test_vlseg5ff_nxv1i16:
1309 ; CHECK: # %bb.0: # %entry
1310 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
1311 ; CHECK-NEXT: vlseg5e16ff.v v7, (a0)
1312 ; CHECK-NEXT: csrr a0, vl
1313 ; CHECK-NEXT: sd a0, 0(a2)
1316 %0 = tail call {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg5ff.nxv1i16(<vscale x 1 x i16> undef, <vscale x 1 x i16> undef, <vscale x 1 x i16> undef, <vscale x 1 x i16> undef, <vscale x 1 x i16> undef, ptr %base, i64 %vl)
1317 %1 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 1
1318 %2 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 5
1319 store i64 %2, ptr %outvl
1320 ret <vscale x 1 x i16> %1
1323 define <vscale x 1 x i16> @test_vlseg5ff_mask_nxv1i16(<vscale x 1 x i16> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
1324 ; CHECK-LABEL: test_vlseg5ff_mask_nxv1i16:
1325 ; CHECK: # %bb.0: # %entry
1326 ; CHECK-NEXT: vmv1r.v v7, v8
1327 ; CHECK-NEXT: vmv1r.v v9, v8
1328 ; CHECK-NEXT: vmv1r.v v10, v8
1329 ; CHECK-NEXT: vmv1r.v v11, v8
1330 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu
1331 ; CHECK-NEXT: vlseg5e16ff.v v7, (a0), v0.t
1332 ; CHECK-NEXT: csrr a0, vl
1333 ; CHECK-NEXT: sd a0, 0(a2)
1336 %0 = tail call {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg5ff.mask.nxv1i16(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
1337 %1 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 1
1338 %2 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 5
1339 store i64 %2, ptr %outvl
1340 ret <vscale x 1 x i16> %1
1343 declare {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg6ff.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr , i64)
1344 declare {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg6ff.mask.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i1>, i64, i64)
1346 define <vscale x 1 x i16> @test_vlseg6ff_nxv1i16(ptr %base, i64 %vl, ptr %outvl) {
1347 ; CHECK-LABEL: test_vlseg6ff_nxv1i16:
1348 ; CHECK: # %bb.0: # %entry
1349 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
1350 ; CHECK-NEXT: vlseg6e16ff.v v7, (a0)
1351 ; CHECK-NEXT: csrr a0, vl
1352 ; CHECK-NEXT: sd a0, 0(a2)
1355 %0 = tail call {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg6ff.nxv1i16(<vscale x 1 x i16> undef, <vscale x 1 x i16> undef, <vscale x 1 x i16> undef, <vscale x 1 x i16> undef, <vscale x 1 x i16> undef, <vscale x 1 x i16> undef, ptr %base, i64 %vl)
1356 %1 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 1
1357 %2 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 6
1358 store i64 %2, ptr %outvl
1359 ret <vscale x 1 x i16> %1
1362 define <vscale x 1 x i16> @test_vlseg6ff_mask_nxv1i16(<vscale x 1 x i16> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
1363 ; CHECK-LABEL: test_vlseg6ff_mask_nxv1i16:
1364 ; CHECK: # %bb.0: # %entry
1365 ; CHECK-NEXT: vmv1r.v v7, v8
1366 ; CHECK-NEXT: vmv1r.v v9, v8
1367 ; CHECK-NEXT: vmv1r.v v10, v8
1368 ; CHECK-NEXT: vmv1r.v v11, v8
1369 ; CHECK-NEXT: vmv1r.v v12, v8
1370 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu
1371 ; CHECK-NEXT: vlseg6e16ff.v v7, (a0), v0.t
1372 ; CHECK-NEXT: csrr a0, vl
1373 ; CHECK-NEXT: sd a0, 0(a2)
1376 %0 = tail call {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg6ff.mask.nxv1i16(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
1377 %1 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 1
1378 %2 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 6
1379 store i64 %2, ptr %outvl
1380 ret <vscale x 1 x i16> %1
1383 declare {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg7ff.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr , i64)
1384 declare {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg7ff.mask.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i1>, i64, i64)
1386 define <vscale x 1 x i16> @test_vlseg7ff_nxv1i16(ptr %base, i64 %vl, ptr %outvl) {
1387 ; CHECK-LABEL: test_vlseg7ff_nxv1i16:
1388 ; CHECK: # %bb.0: # %entry
1389 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
1390 ; CHECK-NEXT: vlseg7e16ff.v v7, (a0)
1391 ; CHECK-NEXT: csrr a0, vl
1392 ; CHECK-NEXT: sd a0, 0(a2)
1395 %0 = tail call {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg7ff.nxv1i16(<vscale x 1 x i16> undef, <vscale x 1 x i16> undef, <vscale x 1 x i16> undef, <vscale x 1 x i16> undef, <vscale x 1 x i16> undef, <vscale x 1 x i16> undef, <vscale x 1 x i16> undef, ptr %base, i64 %vl)
1396 %1 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 1
1397 %2 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 7
1398 store i64 %2, ptr %outvl
1399 ret <vscale x 1 x i16> %1
1402 define <vscale x 1 x i16> @test_vlseg7ff_mask_nxv1i16(<vscale x 1 x i16> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
1403 ; CHECK-LABEL: test_vlseg7ff_mask_nxv1i16:
1404 ; CHECK: # %bb.0: # %entry
1405 ; CHECK-NEXT: vmv1r.v v7, v8
1406 ; CHECK-NEXT: vmv1r.v v9, v8
1407 ; CHECK-NEXT: vmv1r.v v10, v8
1408 ; CHECK-NEXT: vmv1r.v v11, v8
1409 ; CHECK-NEXT: vmv1r.v v12, v8
1410 ; CHECK-NEXT: vmv1r.v v13, v8
1411 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu
1412 ; CHECK-NEXT: vlseg7e16ff.v v7, (a0), v0.t
1413 ; CHECK-NEXT: csrr a0, vl
1414 ; CHECK-NEXT: sd a0, 0(a2)
1417 %0 = tail call {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg7ff.mask.nxv1i16(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
1418 %1 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 1
1419 %2 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 7
1420 store i64 %2, ptr %outvl
1421 ret <vscale x 1 x i16> %1
1424 declare {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg8ff.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr , i64)
1425 declare {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg8ff.mask.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i1>, i64, i64)
1427 define <vscale x 1 x i16> @test_vlseg8ff_nxv1i16(ptr %base, i64 %vl, ptr %outvl) {
1428 ; CHECK-LABEL: test_vlseg8ff_nxv1i16:
1429 ; CHECK: # %bb.0: # %entry
1430 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
1431 ; CHECK-NEXT: vlseg8e16ff.v v7, (a0)
1432 ; CHECK-NEXT: csrr a0, vl
1433 ; CHECK-NEXT: sd a0, 0(a2)
1436 %0 = tail call {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg8ff.nxv1i16(<vscale x 1 x i16> undef, <vscale x 1 x i16> undef ,<vscale x 1 x i16> undef ,<vscale x 1 x i16> undef, <vscale x 1 x i16> undef ,<vscale x 1 x i16> undef, <vscale x 1 x i16> undef, <vscale x 1 x i16> undef, ptr %base, i64 %vl)
1437 %1 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 1
1438 %2 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 8
1439 store i64 %2, ptr %outvl
1440 ret <vscale x 1 x i16> %1
1443 define <vscale x 1 x i16> @test_vlseg8ff_mask_nxv1i16(<vscale x 1 x i16> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
1444 ; CHECK-LABEL: test_vlseg8ff_mask_nxv1i16:
1445 ; CHECK: # %bb.0: # %entry
1446 ; CHECK-NEXT: vmv1r.v v7, v8
1447 ; CHECK-NEXT: vmv1r.v v9, v8
1448 ; CHECK-NEXT: vmv1r.v v10, v8
1449 ; CHECK-NEXT: vmv1r.v v11, v8
1450 ; CHECK-NEXT: vmv1r.v v12, v8
1451 ; CHECK-NEXT: vmv1r.v v13, v8
1452 ; CHECK-NEXT: vmv1r.v v14, v8
1453 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu
1454 ; CHECK-NEXT: vlseg8e16ff.v v7, (a0), v0.t
1455 ; CHECK-NEXT: csrr a0, vl
1456 ; CHECK-NEXT: sd a0, 0(a2)
1459 %0 = tail call {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} @llvm.riscv.vlseg8ff.mask.nxv1i16(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
1460 %1 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 1
1461 %2 = extractvalue {<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, i64} %0, 8
1462 store i64 %2, ptr %outvl
1463 ret <vscale x 1 x i16> %1
1466 declare {<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg2ff.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>, ptr , i64)
1467 declare {<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg2ff.mask.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i1>, i64, i64)
1469 define <vscale x 2 x i32> @test_vlseg2ff_nxv2i32(ptr %base, i64 %vl, ptr %outvl) {
1470 ; CHECK-LABEL: test_vlseg2ff_nxv2i32:
1471 ; CHECK: # %bb.0: # %entry
1472 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1473 ; CHECK-NEXT: vlseg2e32ff.v v7, (a0)
1474 ; CHECK-NEXT: csrr a0, vl
1475 ; CHECK-NEXT: sd a0, 0(a2)
1478 %0 = tail call {<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg2ff.nxv2i32(<vscale x 2 x i32> undef, <vscale x 2 x i32> undef, ptr %base, i64 %vl)
1479 %1 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 1
1480 %2 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 2
1481 store i64 %2, ptr %outvl
1482 ret <vscale x 2 x i32> %1
1485 define <vscale x 2 x i32> @test_vlseg2ff_mask_nxv2i32(<vscale x 2 x i32> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
1486 ; CHECK-LABEL: test_vlseg2ff_mask_nxv2i32:
1487 ; CHECK: # %bb.0: # %entry
1488 ; CHECK-NEXT: vmv1r.v v7, v8
1489 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu
1490 ; CHECK-NEXT: vlseg2e32ff.v v7, (a0), v0.t
1491 ; CHECK-NEXT: csrr a0, vl
1492 ; CHECK-NEXT: sd a0, 0(a2)
1495 %0 = tail call {<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg2ff.mask.nxv2i32(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
1496 %1 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 1
1497 %2 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 2
1498 store i64 %2, ptr %outvl
1499 ret <vscale x 2 x i32> %1
1502 declare {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg3ff.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr , i64)
1503 declare {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg3ff.mask.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i1>, i64, i64)
1505 define <vscale x 2 x i32> @test_vlseg3ff_nxv2i32(ptr %base, i64 %vl, ptr %outvl) {
1506 ; CHECK-LABEL: test_vlseg3ff_nxv2i32:
1507 ; CHECK: # %bb.0: # %entry
1508 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1509 ; CHECK-NEXT: vlseg3e32ff.v v7, (a0)
1510 ; CHECK-NEXT: csrr a0, vl
1511 ; CHECK-NEXT: sd a0, 0(a2)
1514 %0 = tail call {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg3ff.nxv2i32(<vscale x 2 x i32> undef, <vscale x 2 x i32> undef, <vscale x 2 x i32> undef, ptr %base, i64 %vl)
1515 %1 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 1
1516 %2 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 3
1517 store i64 %2, ptr %outvl
1518 ret <vscale x 2 x i32> %1
1521 define <vscale x 2 x i32> @test_vlseg3ff_mask_nxv2i32(<vscale x 2 x i32> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
1522 ; CHECK-LABEL: test_vlseg3ff_mask_nxv2i32:
1523 ; CHECK: # %bb.0: # %entry
1524 ; CHECK-NEXT: vmv1r.v v7, v8
1525 ; CHECK-NEXT: vmv1r.v v9, v8
1526 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu
1527 ; CHECK-NEXT: vlseg3e32ff.v v7, (a0), v0.t
1528 ; CHECK-NEXT: csrr a0, vl
1529 ; CHECK-NEXT: sd a0, 0(a2)
1532 %0 = tail call {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg3ff.mask.nxv2i32(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
1533 %1 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 1
1534 %2 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 3
1535 store i64 %2, ptr %outvl
1536 ret <vscale x 2 x i32> %1
1539 declare {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg4ff.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr , i64)
1540 declare {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg4ff.mask.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i1>, i64, i64)
1542 define <vscale x 2 x i32> @test_vlseg4ff_nxv2i32(ptr %base, i64 %vl, ptr %outvl) {
1543 ; CHECK-LABEL: test_vlseg4ff_nxv2i32:
1544 ; CHECK: # %bb.0: # %entry
1545 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1546 ; CHECK-NEXT: vlseg4e32ff.v v7, (a0)
1547 ; CHECK-NEXT: csrr a0, vl
1548 ; CHECK-NEXT: sd a0, 0(a2)
1551 %0 = tail call {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg4ff.nxv2i32(<vscale x 2 x i32> undef, <vscale x 2 x i32> undef, <vscale x 2 x i32> undef, <vscale x 2 x i32> undef, ptr %base, i64 %vl)
1552 %1 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 1
1553 %2 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 4
1554 store i64 %2, ptr %outvl
1555 ret <vscale x 2 x i32> %1
1558 define <vscale x 2 x i32> @test_vlseg4ff_mask_nxv2i32(<vscale x 2 x i32> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
1559 ; CHECK-LABEL: test_vlseg4ff_mask_nxv2i32:
1560 ; CHECK: # %bb.0: # %entry
1561 ; CHECK-NEXT: vmv1r.v v7, v8
1562 ; CHECK-NEXT: vmv1r.v v9, v8
1563 ; CHECK-NEXT: vmv1r.v v10, v8
1564 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu
1565 ; CHECK-NEXT: vlseg4e32ff.v v7, (a0), v0.t
1566 ; CHECK-NEXT: csrr a0, vl
1567 ; CHECK-NEXT: sd a0, 0(a2)
1570 %0 = tail call {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg4ff.mask.nxv2i32(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
1571 %1 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 1
1572 %2 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 4
1573 store i64 %2, ptr %outvl
1574 ret <vscale x 2 x i32> %1
1577 declare {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg5ff.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr , i64)
1578 declare {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg5ff.mask.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i1>, i64, i64)
1580 define <vscale x 2 x i32> @test_vlseg5ff_nxv2i32(ptr %base, i64 %vl, ptr %outvl) {
1581 ; CHECK-LABEL: test_vlseg5ff_nxv2i32:
1582 ; CHECK: # %bb.0: # %entry
1583 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1584 ; CHECK-NEXT: vlseg5e32ff.v v7, (a0)
1585 ; CHECK-NEXT: csrr a0, vl
1586 ; CHECK-NEXT: sd a0, 0(a2)
1589 %0 = tail call {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg5ff.nxv2i32(<vscale x 2 x i32> undef, <vscale x 2 x i32> undef, <vscale x 2 x i32> undef, <vscale x 2 x i32> undef, <vscale x 2 x i32> undef, ptr %base, i64 %vl)
1590 %1 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 1
1591 %2 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 5
1592 store i64 %2, ptr %outvl
1593 ret <vscale x 2 x i32> %1
1596 define <vscale x 2 x i32> @test_vlseg5ff_mask_nxv2i32(<vscale x 2 x i32> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
1597 ; CHECK-LABEL: test_vlseg5ff_mask_nxv2i32:
1598 ; CHECK: # %bb.0: # %entry
1599 ; CHECK-NEXT: vmv1r.v v7, v8
1600 ; CHECK-NEXT: vmv1r.v v9, v8
1601 ; CHECK-NEXT: vmv1r.v v10, v8
1602 ; CHECK-NEXT: vmv1r.v v11, v8
1603 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu
1604 ; CHECK-NEXT: vlseg5e32ff.v v7, (a0), v0.t
1605 ; CHECK-NEXT: csrr a0, vl
1606 ; CHECK-NEXT: sd a0, 0(a2)
1609 %0 = tail call {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg5ff.mask.nxv2i32(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
1610 %1 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 1
1611 %2 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 5
1612 store i64 %2, ptr %outvl
1613 ret <vscale x 2 x i32> %1
1616 declare {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg6ff.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr , i64)
1617 declare {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg6ff.mask.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i1>, i64, i64)
1619 define <vscale x 2 x i32> @test_vlseg6ff_nxv2i32(ptr %base, i64 %vl, ptr %outvl) {
1620 ; CHECK-LABEL: test_vlseg6ff_nxv2i32:
1621 ; CHECK: # %bb.0: # %entry
1622 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1623 ; CHECK-NEXT: vlseg6e32ff.v v7, (a0)
1624 ; CHECK-NEXT: csrr a0, vl
1625 ; CHECK-NEXT: sd a0, 0(a2)
1628 %0 = tail call {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg6ff.nxv2i32(<vscale x 2 x i32> undef, <vscale x 2 x i32> undef, <vscale x 2 x i32> undef, <vscale x 2 x i32> undef, <vscale x 2 x i32> undef, <vscale x 2 x i32> undef, ptr %base, i64 %vl)
1629 %1 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 1
1630 %2 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 6
1631 store i64 %2, ptr %outvl
1632 ret <vscale x 2 x i32> %1
1635 define <vscale x 2 x i32> @test_vlseg6ff_mask_nxv2i32(<vscale x 2 x i32> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
1636 ; CHECK-LABEL: test_vlseg6ff_mask_nxv2i32:
1637 ; CHECK: # %bb.0: # %entry
1638 ; CHECK-NEXT: vmv1r.v v7, v8
1639 ; CHECK-NEXT: vmv1r.v v9, v8
1640 ; CHECK-NEXT: vmv1r.v v10, v8
1641 ; CHECK-NEXT: vmv1r.v v11, v8
1642 ; CHECK-NEXT: vmv1r.v v12, v8
1643 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu
1644 ; CHECK-NEXT: vlseg6e32ff.v v7, (a0), v0.t
1645 ; CHECK-NEXT: csrr a0, vl
1646 ; CHECK-NEXT: sd a0, 0(a2)
1649 %0 = tail call {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg6ff.mask.nxv2i32(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
1650 %1 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 1
1651 %2 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 6
1652 store i64 %2, ptr %outvl
1653 ret <vscale x 2 x i32> %1
1656 declare {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg7ff.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr , i64)
1657 declare {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg7ff.mask.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i1>, i64, i64)
1659 define <vscale x 2 x i32> @test_vlseg7ff_nxv2i32(ptr %base, i64 %vl, ptr %outvl) {
1660 ; CHECK-LABEL: test_vlseg7ff_nxv2i32:
1661 ; CHECK: # %bb.0: # %entry
1662 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1663 ; CHECK-NEXT: vlseg7e32ff.v v7, (a0)
1664 ; CHECK-NEXT: csrr a0, vl
1665 ; CHECK-NEXT: sd a0, 0(a2)
1668 %0 = tail call {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg7ff.nxv2i32(<vscale x 2 x i32> undef, <vscale x 2 x i32> undef, <vscale x 2 x i32> undef, <vscale x 2 x i32> undef, <vscale x 2 x i32> undef, <vscale x 2 x i32> undef, <vscale x 2 x i32> undef, ptr %base, i64 %vl)
1669 %1 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 1
1670 %2 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 7
1671 store i64 %2, ptr %outvl
1672 ret <vscale x 2 x i32> %1
1675 define <vscale x 2 x i32> @test_vlseg7ff_mask_nxv2i32(<vscale x 2 x i32> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
1676 ; CHECK-LABEL: test_vlseg7ff_mask_nxv2i32:
1677 ; CHECK: # %bb.0: # %entry
1678 ; CHECK-NEXT: vmv1r.v v7, v8
1679 ; CHECK-NEXT: vmv1r.v v9, v8
1680 ; CHECK-NEXT: vmv1r.v v10, v8
1681 ; CHECK-NEXT: vmv1r.v v11, v8
1682 ; CHECK-NEXT: vmv1r.v v12, v8
1683 ; CHECK-NEXT: vmv1r.v v13, v8
1684 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu
1685 ; CHECK-NEXT: vlseg7e32ff.v v7, (a0), v0.t
1686 ; CHECK-NEXT: csrr a0, vl
1687 ; CHECK-NEXT: sd a0, 0(a2)
1690 %0 = tail call {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg7ff.mask.nxv2i32(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
1691 %1 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 1
1692 %2 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 7
1693 store i64 %2, ptr %outvl
1694 ret <vscale x 2 x i32> %1
1697 declare {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg8ff.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr , i64)
1698 declare {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg8ff.mask.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i1>, i64, i64)
1700 define <vscale x 2 x i32> @test_vlseg8ff_nxv2i32(ptr %base, i64 %vl, ptr %outvl) {
1701 ; CHECK-LABEL: test_vlseg8ff_nxv2i32:
1702 ; CHECK: # %bb.0: # %entry
1703 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1704 ; CHECK-NEXT: vlseg8e32ff.v v7, (a0)
1705 ; CHECK-NEXT: csrr a0, vl
1706 ; CHECK-NEXT: sd a0, 0(a2)
1709 %0 = tail call {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg8ff.nxv2i32(<vscale x 2 x i32> undef, <vscale x 2 x i32> undef ,<vscale x 2 x i32> undef ,<vscale x 2 x i32> undef, <vscale x 2 x i32> undef ,<vscale x 2 x i32> undef, <vscale x 2 x i32> undef, <vscale x 2 x i32> undef, ptr %base, i64 %vl)
1710 %1 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 1
1711 %2 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 8
1712 store i64 %2, ptr %outvl
1713 ret <vscale x 2 x i32> %1
1716 define <vscale x 2 x i32> @test_vlseg8ff_mask_nxv2i32(<vscale x 2 x i32> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
1717 ; CHECK-LABEL: test_vlseg8ff_mask_nxv2i32:
1718 ; CHECK: # %bb.0: # %entry
1719 ; CHECK-NEXT: vmv1r.v v7, v8
1720 ; CHECK-NEXT: vmv1r.v v9, v8
1721 ; CHECK-NEXT: vmv1r.v v10, v8
1722 ; CHECK-NEXT: vmv1r.v v11, v8
1723 ; CHECK-NEXT: vmv1r.v v12, v8
1724 ; CHECK-NEXT: vmv1r.v v13, v8
1725 ; CHECK-NEXT: vmv1r.v v14, v8
1726 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu
1727 ; CHECK-NEXT: vlseg8e32ff.v v7, (a0), v0.t
1728 ; CHECK-NEXT: csrr a0, vl
1729 ; CHECK-NEXT: sd a0, 0(a2)
1732 %0 = tail call {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} @llvm.riscv.vlseg8ff.mask.nxv2i32(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
1733 %1 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 1
1734 %2 = extractvalue {<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, i64} %0, 8
1735 store i64 %2, ptr %outvl
1736 ret <vscale x 2 x i32> %1
1739 declare {<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg2ff.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>, ptr , i64)
1740 declare {<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg2ff.mask.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i1>, i64, i64)
1742 define <vscale x 8 x i8> @test_vlseg2ff_nxv8i8(ptr %base, i64 %vl, ptr %outvl) {
1743 ; CHECK-LABEL: test_vlseg2ff_nxv8i8:
1744 ; CHECK: # %bb.0: # %entry
1745 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
1746 ; CHECK-NEXT: vlseg2e8ff.v v7, (a0)
1747 ; CHECK-NEXT: csrr a0, vl
1748 ; CHECK-NEXT: sd a0, 0(a2)
1751 %0 = tail call {<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg2ff.nxv8i8(<vscale x 8 x i8> undef, <vscale x 8 x i8> undef, ptr %base, i64 %vl)
1752 %1 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 1
1753 %2 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 2
1754 store i64 %2, ptr %outvl
1755 ret <vscale x 8 x i8> %1
1758 define <vscale x 8 x i8> @test_vlseg2ff_mask_nxv8i8(<vscale x 8 x i8> %val, ptr %base, i64 %vl, <vscale x 8 x i1> %mask, ptr %outvl) {
1759 ; CHECK-LABEL: test_vlseg2ff_mask_nxv8i8:
1760 ; CHECK: # %bb.0: # %entry
1761 ; CHECK-NEXT: vmv1r.v v7, v8
1762 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu
1763 ; CHECK-NEXT: vlseg2e8ff.v v7, (a0), v0.t
1764 ; CHECK-NEXT: csrr a0, vl
1765 ; CHECK-NEXT: sd a0, 0(a2)
1768 %0 = tail call {<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg2ff.mask.nxv8i8(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i1> %mask, i64 %vl, i64 1)
1769 %1 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 1
1770 %2 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 2
1771 store i64 %2, ptr %outvl
1772 ret <vscale x 8 x i8> %1
1775 declare {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg3ff.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr , i64)
1776 declare {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg3ff.mask.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i1>, i64, i64)
1778 define <vscale x 8 x i8> @test_vlseg3ff_nxv8i8(ptr %base, i64 %vl, ptr %outvl) {
1779 ; CHECK-LABEL: test_vlseg3ff_nxv8i8:
1780 ; CHECK: # %bb.0: # %entry
1781 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
1782 ; CHECK-NEXT: vlseg3e8ff.v v7, (a0)
1783 ; CHECK-NEXT: csrr a0, vl
1784 ; CHECK-NEXT: sd a0, 0(a2)
1787 %0 = tail call {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg3ff.nxv8i8(<vscale x 8 x i8> undef, <vscale x 8 x i8> undef, <vscale x 8 x i8> undef, ptr %base, i64 %vl)
1788 %1 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 1
1789 %2 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 3
1790 store i64 %2, ptr %outvl
1791 ret <vscale x 8 x i8> %1
1794 define <vscale x 8 x i8> @test_vlseg3ff_mask_nxv8i8(<vscale x 8 x i8> %val, ptr %base, i64 %vl, <vscale x 8 x i1> %mask, ptr %outvl) {
1795 ; CHECK-LABEL: test_vlseg3ff_mask_nxv8i8:
1796 ; CHECK: # %bb.0: # %entry
1797 ; CHECK-NEXT: vmv1r.v v7, v8
1798 ; CHECK-NEXT: vmv1r.v v9, v8
1799 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu
1800 ; CHECK-NEXT: vlseg3e8ff.v v7, (a0), v0.t
1801 ; CHECK-NEXT: csrr a0, vl
1802 ; CHECK-NEXT: sd a0, 0(a2)
1805 %0 = tail call {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg3ff.mask.nxv8i8(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i1> %mask, i64 %vl, i64 1)
1806 %1 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 1
1807 %2 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 3
1808 store i64 %2, ptr %outvl
1809 ret <vscale x 8 x i8> %1
1812 declare {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg4ff.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr , i64)
1813 declare {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg4ff.mask.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i1>, i64, i64)
1815 define <vscale x 8 x i8> @test_vlseg4ff_nxv8i8(ptr %base, i64 %vl, ptr %outvl) {
1816 ; CHECK-LABEL: test_vlseg4ff_nxv8i8:
1817 ; CHECK: # %bb.0: # %entry
1818 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
1819 ; CHECK-NEXT: vlseg4e8ff.v v7, (a0)
1820 ; CHECK-NEXT: csrr a0, vl
1821 ; CHECK-NEXT: sd a0, 0(a2)
1824 %0 = tail call {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg4ff.nxv8i8(<vscale x 8 x i8> undef, <vscale x 8 x i8> undef, <vscale x 8 x i8> undef, <vscale x 8 x i8> undef, ptr %base, i64 %vl)
1825 %1 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 1
1826 %2 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 4
1827 store i64 %2, ptr %outvl
1828 ret <vscale x 8 x i8> %1
1831 define <vscale x 8 x i8> @test_vlseg4ff_mask_nxv8i8(<vscale x 8 x i8> %val, ptr %base, i64 %vl, <vscale x 8 x i1> %mask, ptr %outvl) {
1832 ; CHECK-LABEL: test_vlseg4ff_mask_nxv8i8:
1833 ; CHECK: # %bb.0: # %entry
1834 ; CHECK-NEXT: vmv1r.v v7, v8
1835 ; CHECK-NEXT: vmv1r.v v9, v8
1836 ; CHECK-NEXT: vmv1r.v v10, v8
1837 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu
1838 ; CHECK-NEXT: vlseg4e8ff.v v7, (a0), v0.t
1839 ; CHECK-NEXT: csrr a0, vl
1840 ; CHECK-NEXT: sd a0, 0(a2)
1843 %0 = tail call {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg4ff.mask.nxv8i8(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i1> %mask, i64 %vl, i64 1)
1844 %1 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 1
1845 %2 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 4
1846 store i64 %2, ptr %outvl
1847 ret <vscale x 8 x i8> %1
1850 declare {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg5ff.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr , i64)
1851 declare {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg5ff.mask.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i1>, i64, i64)
1853 define <vscale x 8 x i8> @test_vlseg5ff_nxv8i8(ptr %base, i64 %vl, ptr %outvl) {
1854 ; CHECK-LABEL: test_vlseg5ff_nxv8i8:
1855 ; CHECK: # %bb.0: # %entry
1856 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
1857 ; CHECK-NEXT: vlseg5e8ff.v v7, (a0)
1858 ; CHECK-NEXT: csrr a0, vl
1859 ; CHECK-NEXT: sd a0, 0(a2)
1862 %0 = tail call {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg5ff.nxv8i8(<vscale x 8 x i8> undef, <vscale x 8 x i8> undef, <vscale x 8 x i8> undef, <vscale x 8 x i8> undef, <vscale x 8 x i8> undef, ptr %base, i64 %vl)
1863 %1 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 1
1864 %2 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 5
1865 store i64 %2, ptr %outvl
1866 ret <vscale x 8 x i8> %1
1869 define <vscale x 8 x i8> @test_vlseg5ff_mask_nxv8i8(<vscale x 8 x i8> %val, ptr %base, i64 %vl, <vscale x 8 x i1> %mask, ptr %outvl) {
1870 ; CHECK-LABEL: test_vlseg5ff_mask_nxv8i8:
1871 ; CHECK: # %bb.0: # %entry
1872 ; CHECK-NEXT: vmv1r.v v7, v8
1873 ; CHECK-NEXT: vmv1r.v v9, v8
1874 ; CHECK-NEXT: vmv1r.v v10, v8
1875 ; CHECK-NEXT: vmv1r.v v11, v8
1876 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu
1877 ; CHECK-NEXT: vlseg5e8ff.v v7, (a0), v0.t
1878 ; CHECK-NEXT: csrr a0, vl
1879 ; CHECK-NEXT: sd a0, 0(a2)
1882 %0 = tail call {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg5ff.mask.nxv8i8(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i1> %mask, i64 %vl, i64 1)
1883 %1 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 1
1884 %2 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 5
1885 store i64 %2, ptr %outvl
1886 ret <vscale x 8 x i8> %1
1889 declare {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg6ff.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr , i64)
1890 declare {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg6ff.mask.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i1>, i64, i64)
1892 define <vscale x 8 x i8> @test_vlseg6ff_nxv8i8(ptr %base, i64 %vl, ptr %outvl) {
1893 ; CHECK-LABEL: test_vlseg6ff_nxv8i8:
1894 ; CHECK: # %bb.0: # %entry
1895 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
1896 ; CHECK-NEXT: vlseg6e8ff.v v7, (a0)
1897 ; CHECK-NEXT: csrr a0, vl
1898 ; CHECK-NEXT: sd a0, 0(a2)
1901 %0 = tail call {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg6ff.nxv8i8(<vscale x 8 x i8> undef, <vscale x 8 x i8> undef, <vscale x 8 x i8> undef, <vscale x 8 x i8> undef, <vscale x 8 x i8> undef, <vscale x 8 x i8> undef, ptr %base, i64 %vl)
1902 %1 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 1
1903 %2 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 6
1904 store i64 %2, ptr %outvl
1905 ret <vscale x 8 x i8> %1
1908 define <vscale x 8 x i8> @test_vlseg6ff_mask_nxv8i8(<vscale x 8 x i8> %val, ptr %base, i64 %vl, <vscale x 8 x i1> %mask, ptr %outvl) {
1909 ; CHECK-LABEL: test_vlseg6ff_mask_nxv8i8:
1910 ; CHECK: # %bb.0: # %entry
1911 ; CHECK-NEXT: vmv1r.v v7, v8
1912 ; CHECK-NEXT: vmv1r.v v9, v8
1913 ; CHECK-NEXT: vmv1r.v v10, v8
1914 ; CHECK-NEXT: vmv1r.v v11, v8
1915 ; CHECK-NEXT: vmv1r.v v12, v8
1916 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu
1917 ; CHECK-NEXT: vlseg6e8ff.v v7, (a0), v0.t
1918 ; CHECK-NEXT: csrr a0, vl
1919 ; CHECK-NEXT: sd a0, 0(a2)
1922 %0 = tail call {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg6ff.mask.nxv8i8(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i1> %mask, i64 %vl, i64 1)
1923 %1 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 1
1924 %2 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 6
1925 store i64 %2, ptr %outvl
1926 ret <vscale x 8 x i8> %1
1929 declare {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg7ff.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr , i64)
1930 declare {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg7ff.mask.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i1>, i64, i64)
1932 define <vscale x 8 x i8> @test_vlseg7ff_nxv8i8(ptr %base, i64 %vl, ptr %outvl) {
1933 ; CHECK-LABEL: test_vlseg7ff_nxv8i8:
1934 ; CHECK: # %bb.0: # %entry
1935 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
1936 ; CHECK-NEXT: vlseg7e8ff.v v7, (a0)
1937 ; CHECK-NEXT: csrr a0, vl
1938 ; CHECK-NEXT: sd a0, 0(a2)
1941 %0 = tail call {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg7ff.nxv8i8(<vscale x 8 x i8> undef, <vscale x 8 x i8> undef, <vscale x 8 x i8> undef, <vscale x 8 x i8> undef, <vscale x 8 x i8> undef, <vscale x 8 x i8> undef, <vscale x 8 x i8> undef, ptr %base, i64 %vl)
1942 %1 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 1
1943 %2 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 7
1944 store i64 %2, ptr %outvl
1945 ret <vscale x 8 x i8> %1
1948 define <vscale x 8 x i8> @test_vlseg7ff_mask_nxv8i8(<vscale x 8 x i8> %val, ptr %base, i64 %vl, <vscale x 8 x i1> %mask, ptr %outvl) {
1949 ; CHECK-LABEL: test_vlseg7ff_mask_nxv8i8:
1950 ; CHECK: # %bb.0: # %entry
1951 ; CHECK-NEXT: vmv1r.v v7, v8
1952 ; CHECK-NEXT: vmv1r.v v9, v8
1953 ; CHECK-NEXT: vmv1r.v v10, v8
1954 ; CHECK-NEXT: vmv1r.v v11, v8
1955 ; CHECK-NEXT: vmv1r.v v12, v8
1956 ; CHECK-NEXT: vmv1r.v v13, v8
1957 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu
1958 ; CHECK-NEXT: vlseg7e8ff.v v7, (a0), v0.t
1959 ; CHECK-NEXT: csrr a0, vl
1960 ; CHECK-NEXT: sd a0, 0(a2)
1963 %0 = tail call {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg7ff.mask.nxv8i8(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i1> %mask, i64 %vl, i64 1)
1964 %1 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 1
1965 %2 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 7
1966 store i64 %2, ptr %outvl
1967 ret <vscale x 8 x i8> %1
1970 declare {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg8ff.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr , i64)
1971 declare {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg8ff.mask.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i1>, i64, i64)
1973 define <vscale x 8 x i8> @test_vlseg8ff_nxv8i8(ptr %base, i64 %vl, ptr %outvl) {
1974 ; CHECK-LABEL: test_vlseg8ff_nxv8i8:
1975 ; CHECK: # %bb.0: # %entry
1976 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
1977 ; CHECK-NEXT: vlseg8e8ff.v v7, (a0)
1978 ; CHECK-NEXT: csrr a0, vl
1979 ; CHECK-NEXT: sd a0, 0(a2)
1982 %0 = tail call {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg8ff.nxv8i8(<vscale x 8 x i8> undef, <vscale x 8 x i8> undef ,<vscale x 8 x i8> undef ,<vscale x 8 x i8> undef, <vscale x 8 x i8> undef ,<vscale x 8 x i8> undef, <vscale x 8 x i8> undef, <vscale x 8 x i8> undef, ptr %base, i64 %vl)
1983 %1 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 1
1984 %2 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 8
1985 store i64 %2, ptr %outvl
1986 ret <vscale x 8 x i8> %1
1989 define <vscale x 8 x i8> @test_vlseg8ff_mask_nxv8i8(<vscale x 8 x i8> %val, ptr %base, i64 %vl, <vscale x 8 x i1> %mask, ptr %outvl) {
1990 ; CHECK-LABEL: test_vlseg8ff_mask_nxv8i8:
1991 ; CHECK: # %bb.0: # %entry
1992 ; CHECK-NEXT: vmv1r.v v7, v8
1993 ; CHECK-NEXT: vmv1r.v v9, v8
1994 ; CHECK-NEXT: vmv1r.v v10, v8
1995 ; CHECK-NEXT: vmv1r.v v11, v8
1996 ; CHECK-NEXT: vmv1r.v v12, v8
1997 ; CHECK-NEXT: vmv1r.v v13, v8
1998 ; CHECK-NEXT: vmv1r.v v14, v8
1999 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, mu
2000 ; CHECK-NEXT: vlseg8e8ff.v v7, (a0), v0.t
2001 ; CHECK-NEXT: csrr a0, vl
2002 ; CHECK-NEXT: sd a0, 0(a2)
2005 %0 = tail call {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} @llvm.riscv.vlseg8ff.mask.nxv8i8(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i1> %mask, i64 %vl, i64 1)
2006 %1 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 1
2007 %2 = extractvalue {<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, i64} %0, 8
2008 store i64 %2, ptr %outvl
2009 ret <vscale x 8 x i8> %1
2012 declare {<vscale x 4 x i64>,<vscale x 4 x i64>, i64} @llvm.riscv.vlseg2ff.nxv4i64(<vscale x 4 x i64>,<vscale x 4 x i64>, ptr , i64)
2013 declare {<vscale x 4 x i64>,<vscale x 4 x i64>, i64} @llvm.riscv.vlseg2ff.mask.nxv4i64(<vscale x 4 x i64>,<vscale x 4 x i64>, ptr, <vscale x 4 x i1>, i64, i64)
2015 define <vscale x 4 x i64> @test_vlseg2ff_nxv4i64(ptr %base, i64 %vl, ptr %outvl) {
2016 ; CHECK-LABEL: test_vlseg2ff_nxv4i64:
2017 ; CHECK: # %bb.0: # %entry
2018 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, ma
2019 ; CHECK-NEXT: vlseg2e64ff.v v4, (a0)
2020 ; CHECK-NEXT: csrr a0, vl
2021 ; CHECK-NEXT: sd a0, 0(a2)
2024 %0 = tail call {<vscale x 4 x i64>,<vscale x 4 x i64>, i64} @llvm.riscv.vlseg2ff.nxv4i64(<vscale x 4 x i64> undef, <vscale x 4 x i64> undef, ptr %base, i64 %vl)
2025 %1 = extractvalue {<vscale x 4 x i64>,<vscale x 4 x i64>, i64} %0, 1
2026 %2 = extractvalue {<vscale x 4 x i64>,<vscale x 4 x i64>, i64} %0, 2
2027 store i64 %2, ptr %outvl
2028 ret <vscale x 4 x i64> %1
2031 define <vscale x 4 x i64> @test_vlseg2ff_mask_nxv4i64(<vscale x 4 x i64> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
2032 ; CHECK-LABEL: test_vlseg2ff_mask_nxv4i64:
2033 ; CHECK: # %bb.0: # %entry
2034 ; CHECK-NEXT: vmv4r.v v4, v8
2035 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu
2036 ; CHECK-NEXT: vlseg2e64ff.v v4, (a0), v0.t
2037 ; CHECK-NEXT: csrr a0, vl
2038 ; CHECK-NEXT: sd a0, 0(a2)
2041 %0 = tail call {<vscale x 4 x i64>,<vscale x 4 x i64>, i64} @llvm.riscv.vlseg2ff.mask.nxv4i64(<vscale x 4 x i64> %val,<vscale x 4 x i64> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
2042 %1 = extractvalue {<vscale x 4 x i64>,<vscale x 4 x i64>, i64} %0, 1
2043 %2 = extractvalue {<vscale x 4 x i64>,<vscale x 4 x i64>, i64} %0, 2
2044 store i64 %2, ptr %outvl
2045 ret <vscale x 4 x i64> %1
2048 declare {<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg2ff.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>, ptr , i64)
2049 declare {<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg2ff.mask.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i1>, i64, i64)
2051 define <vscale x 4 x i16> @test_vlseg2ff_nxv4i16(ptr %base, i64 %vl, ptr %outvl) {
2052 ; CHECK-LABEL: test_vlseg2ff_nxv4i16:
2053 ; CHECK: # %bb.0: # %entry
2054 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2055 ; CHECK-NEXT: vlseg2e16ff.v v7, (a0)
2056 ; CHECK-NEXT: csrr a0, vl
2057 ; CHECK-NEXT: sd a0, 0(a2)
2060 %0 = tail call {<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg2ff.nxv4i16(<vscale x 4 x i16> undef, <vscale x 4 x i16> undef, ptr %base, i64 %vl)
2061 %1 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 1
2062 %2 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 2
2063 store i64 %2, ptr %outvl
2064 ret <vscale x 4 x i16> %1
2067 define <vscale x 4 x i16> @test_vlseg2ff_mask_nxv4i16(<vscale x 4 x i16> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
2068 ; CHECK-LABEL: test_vlseg2ff_mask_nxv4i16:
2069 ; CHECK: # %bb.0: # %entry
2070 ; CHECK-NEXT: vmv1r.v v7, v8
2071 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu
2072 ; CHECK-NEXT: vlseg2e16ff.v v7, (a0), v0.t
2073 ; CHECK-NEXT: csrr a0, vl
2074 ; CHECK-NEXT: sd a0, 0(a2)
2077 %0 = tail call {<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg2ff.mask.nxv4i16(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
2078 %1 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 1
2079 %2 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 2
2080 store i64 %2, ptr %outvl
2081 ret <vscale x 4 x i16> %1
2084 declare {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg3ff.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr , i64)
2085 declare {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg3ff.mask.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i1>, i64, i64)
2087 define <vscale x 4 x i16> @test_vlseg3ff_nxv4i16(ptr %base, i64 %vl, ptr %outvl) {
2088 ; CHECK-LABEL: test_vlseg3ff_nxv4i16:
2089 ; CHECK: # %bb.0: # %entry
2090 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2091 ; CHECK-NEXT: vlseg3e16ff.v v7, (a0)
2092 ; CHECK-NEXT: csrr a0, vl
2093 ; CHECK-NEXT: sd a0, 0(a2)
2096 %0 = tail call {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg3ff.nxv4i16(<vscale x 4 x i16> undef, <vscale x 4 x i16> undef, <vscale x 4 x i16> undef, ptr %base, i64 %vl)
2097 %1 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 1
2098 %2 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 3
2099 store i64 %2, ptr %outvl
2100 ret <vscale x 4 x i16> %1
2103 define <vscale x 4 x i16> @test_vlseg3ff_mask_nxv4i16(<vscale x 4 x i16> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
2104 ; CHECK-LABEL: test_vlseg3ff_mask_nxv4i16:
2105 ; CHECK: # %bb.0: # %entry
2106 ; CHECK-NEXT: vmv1r.v v7, v8
2107 ; CHECK-NEXT: vmv1r.v v9, v8
2108 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu
2109 ; CHECK-NEXT: vlseg3e16ff.v v7, (a0), v0.t
2110 ; CHECK-NEXT: csrr a0, vl
2111 ; CHECK-NEXT: sd a0, 0(a2)
2114 %0 = tail call {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg3ff.mask.nxv4i16(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
2115 %1 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 1
2116 %2 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 3
2117 store i64 %2, ptr %outvl
2118 ret <vscale x 4 x i16> %1
2121 declare {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg4ff.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr , i64)
2122 declare {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg4ff.mask.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i1>, i64, i64)
2124 define <vscale x 4 x i16> @test_vlseg4ff_nxv4i16(ptr %base, i64 %vl, ptr %outvl) {
2125 ; CHECK-LABEL: test_vlseg4ff_nxv4i16:
2126 ; CHECK: # %bb.0: # %entry
2127 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2128 ; CHECK-NEXT: vlseg4e16ff.v v7, (a0)
2129 ; CHECK-NEXT: csrr a0, vl
2130 ; CHECK-NEXT: sd a0, 0(a2)
2133 %0 = tail call {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg4ff.nxv4i16(<vscale x 4 x i16> undef, <vscale x 4 x i16> undef, <vscale x 4 x i16> undef, <vscale x 4 x i16> undef, ptr %base, i64 %vl)
2134 %1 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 1
2135 %2 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 4
2136 store i64 %2, ptr %outvl
2137 ret <vscale x 4 x i16> %1
2140 define <vscale x 4 x i16> @test_vlseg4ff_mask_nxv4i16(<vscale x 4 x i16> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
2141 ; CHECK-LABEL: test_vlseg4ff_mask_nxv4i16:
2142 ; CHECK: # %bb.0: # %entry
2143 ; CHECK-NEXT: vmv1r.v v7, v8
2144 ; CHECK-NEXT: vmv1r.v v9, v8
2145 ; CHECK-NEXT: vmv1r.v v10, v8
2146 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu
2147 ; CHECK-NEXT: vlseg4e16ff.v v7, (a0), v0.t
2148 ; CHECK-NEXT: csrr a0, vl
2149 ; CHECK-NEXT: sd a0, 0(a2)
2152 %0 = tail call {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg4ff.mask.nxv4i16(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
2153 %1 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 1
2154 %2 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 4
2155 store i64 %2, ptr %outvl
2156 ret <vscale x 4 x i16> %1
2159 declare {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg5ff.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr , i64)
2160 declare {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg5ff.mask.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i1>, i64, i64)
2162 define <vscale x 4 x i16> @test_vlseg5ff_nxv4i16(ptr %base, i64 %vl, ptr %outvl) {
2163 ; CHECK-LABEL: test_vlseg5ff_nxv4i16:
2164 ; CHECK: # %bb.0: # %entry
2165 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2166 ; CHECK-NEXT: vlseg5e16ff.v v7, (a0)
2167 ; CHECK-NEXT: csrr a0, vl
2168 ; CHECK-NEXT: sd a0, 0(a2)
2171 %0 = tail call {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg5ff.nxv4i16(<vscale x 4 x i16> undef, <vscale x 4 x i16> undef, <vscale x 4 x i16> undef, <vscale x 4 x i16> undef, <vscale x 4 x i16> undef, ptr %base, i64 %vl)
2172 %1 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 1
2173 %2 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 5
2174 store i64 %2, ptr %outvl
2175 ret <vscale x 4 x i16> %1
2178 define <vscale x 4 x i16> @test_vlseg5ff_mask_nxv4i16(<vscale x 4 x i16> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
2179 ; CHECK-LABEL: test_vlseg5ff_mask_nxv4i16:
2180 ; CHECK: # %bb.0: # %entry
2181 ; CHECK-NEXT: vmv1r.v v7, v8
2182 ; CHECK-NEXT: vmv1r.v v9, v8
2183 ; CHECK-NEXT: vmv1r.v v10, v8
2184 ; CHECK-NEXT: vmv1r.v v11, v8
2185 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu
2186 ; CHECK-NEXT: vlseg5e16ff.v v7, (a0), v0.t
2187 ; CHECK-NEXT: csrr a0, vl
2188 ; CHECK-NEXT: sd a0, 0(a2)
2191 %0 = tail call {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg5ff.mask.nxv4i16(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
2192 %1 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 1
2193 %2 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 5
2194 store i64 %2, ptr %outvl
2195 ret <vscale x 4 x i16> %1
2198 declare {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg6ff.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr , i64)
2199 declare {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg6ff.mask.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i1>, i64, i64)
2201 define <vscale x 4 x i16> @test_vlseg6ff_nxv4i16(ptr %base, i64 %vl, ptr %outvl) {
2202 ; CHECK-LABEL: test_vlseg6ff_nxv4i16:
2203 ; CHECK: # %bb.0: # %entry
2204 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2205 ; CHECK-NEXT: vlseg6e16ff.v v7, (a0)
2206 ; CHECK-NEXT: csrr a0, vl
2207 ; CHECK-NEXT: sd a0, 0(a2)
2210 %0 = tail call {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg6ff.nxv4i16(<vscale x 4 x i16> undef, <vscale x 4 x i16> undef, <vscale x 4 x i16> undef, <vscale x 4 x i16> undef, <vscale x 4 x i16> undef, <vscale x 4 x i16> undef, ptr %base, i64 %vl)
2211 %1 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 1
2212 %2 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 6
2213 store i64 %2, ptr %outvl
2214 ret <vscale x 4 x i16> %1
2217 define <vscale x 4 x i16> @test_vlseg6ff_mask_nxv4i16(<vscale x 4 x i16> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
2218 ; CHECK-LABEL: test_vlseg6ff_mask_nxv4i16:
2219 ; CHECK: # %bb.0: # %entry
2220 ; CHECK-NEXT: vmv1r.v v7, v8
2221 ; CHECK-NEXT: vmv1r.v v9, v8
2222 ; CHECK-NEXT: vmv1r.v v10, v8
2223 ; CHECK-NEXT: vmv1r.v v11, v8
2224 ; CHECK-NEXT: vmv1r.v v12, v8
2225 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu
2226 ; CHECK-NEXT: vlseg6e16ff.v v7, (a0), v0.t
2227 ; CHECK-NEXT: csrr a0, vl
2228 ; CHECK-NEXT: sd a0, 0(a2)
2231 %0 = tail call {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg6ff.mask.nxv4i16(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
2232 %1 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 1
2233 %2 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 6
2234 store i64 %2, ptr %outvl
2235 ret <vscale x 4 x i16> %1
2238 declare {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg7ff.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr , i64)
2239 declare {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg7ff.mask.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i1>, i64, i64)
2241 define <vscale x 4 x i16> @test_vlseg7ff_nxv4i16(ptr %base, i64 %vl, ptr %outvl) {
2242 ; CHECK-LABEL: test_vlseg7ff_nxv4i16:
2243 ; CHECK: # %bb.0: # %entry
2244 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2245 ; CHECK-NEXT: vlseg7e16ff.v v7, (a0)
2246 ; CHECK-NEXT: csrr a0, vl
2247 ; CHECK-NEXT: sd a0, 0(a2)
2250 %0 = tail call {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg7ff.nxv4i16(<vscale x 4 x i16> undef, <vscale x 4 x i16> undef, <vscale x 4 x i16> undef, <vscale x 4 x i16> undef, <vscale x 4 x i16> undef, <vscale x 4 x i16> undef, <vscale x 4 x i16> undef, ptr %base, i64 %vl)
2251 %1 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 1
2252 %2 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 7
2253 store i64 %2, ptr %outvl
2254 ret <vscale x 4 x i16> %1
2257 define <vscale x 4 x i16> @test_vlseg7ff_mask_nxv4i16(<vscale x 4 x i16> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
2258 ; CHECK-LABEL: test_vlseg7ff_mask_nxv4i16:
2259 ; CHECK: # %bb.0: # %entry
2260 ; CHECK-NEXT: vmv1r.v v7, v8
2261 ; CHECK-NEXT: vmv1r.v v9, v8
2262 ; CHECK-NEXT: vmv1r.v v10, v8
2263 ; CHECK-NEXT: vmv1r.v v11, v8
2264 ; CHECK-NEXT: vmv1r.v v12, v8
2265 ; CHECK-NEXT: vmv1r.v v13, v8
2266 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu
2267 ; CHECK-NEXT: vlseg7e16ff.v v7, (a0), v0.t
2268 ; CHECK-NEXT: csrr a0, vl
2269 ; CHECK-NEXT: sd a0, 0(a2)
2272 %0 = tail call {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg7ff.mask.nxv4i16(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
2273 %1 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 1
2274 %2 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 7
2275 store i64 %2, ptr %outvl
2276 ret <vscale x 4 x i16> %1
2279 declare {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg8ff.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr , i64)
2280 declare {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg8ff.mask.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i1>, i64, i64)
2282 define <vscale x 4 x i16> @test_vlseg8ff_nxv4i16(ptr %base, i64 %vl, ptr %outvl) {
2283 ; CHECK-LABEL: test_vlseg8ff_nxv4i16:
2284 ; CHECK: # %bb.0: # %entry
2285 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2286 ; CHECK-NEXT: vlseg8e16ff.v v7, (a0)
2287 ; CHECK-NEXT: csrr a0, vl
2288 ; CHECK-NEXT: sd a0, 0(a2)
2291 %0 = tail call {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg8ff.nxv4i16(<vscale x 4 x i16> undef, <vscale x 4 x i16> undef ,<vscale x 4 x i16> undef ,<vscale x 4 x i16> undef, <vscale x 4 x i16> undef ,<vscale x 4 x i16> undef, <vscale x 4 x i16> undef, <vscale x 4 x i16> undef, ptr %base, i64 %vl)
2292 %1 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 1
2293 %2 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 8
2294 store i64 %2, ptr %outvl
2295 ret <vscale x 4 x i16> %1
2298 define <vscale x 4 x i16> @test_vlseg8ff_mask_nxv4i16(<vscale x 4 x i16> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
2299 ; CHECK-LABEL: test_vlseg8ff_mask_nxv4i16:
2300 ; CHECK: # %bb.0: # %entry
2301 ; CHECK-NEXT: vmv1r.v v7, v8
2302 ; CHECK-NEXT: vmv1r.v v9, v8
2303 ; CHECK-NEXT: vmv1r.v v10, v8
2304 ; CHECK-NEXT: vmv1r.v v11, v8
2305 ; CHECK-NEXT: vmv1r.v v12, v8
2306 ; CHECK-NEXT: vmv1r.v v13, v8
2307 ; CHECK-NEXT: vmv1r.v v14, v8
2308 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu
2309 ; CHECK-NEXT: vlseg8e16ff.v v7, (a0), v0.t
2310 ; CHECK-NEXT: csrr a0, vl
2311 ; CHECK-NEXT: sd a0, 0(a2)
2314 %0 = tail call {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} @llvm.riscv.vlseg8ff.mask.nxv4i16(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
2315 %1 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 1
2316 %2 = extractvalue {<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, i64} %0, 8
2317 store i64 %2, ptr %outvl
2318 ret <vscale x 4 x i16> %1
2321 declare {<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg2ff.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>, ptr , i64)
2322 declare {<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg2ff.mask.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i1>, i64, i64)
2324 define <vscale x 1 x i8> @test_vlseg2ff_nxv1i8(ptr %base, i64 %vl, ptr %outvl) {
2325 ; CHECK-LABEL: test_vlseg2ff_nxv1i8:
2326 ; CHECK: # %bb.0: # %entry
2327 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
2328 ; CHECK-NEXT: vlseg2e8ff.v v7, (a0)
2329 ; CHECK-NEXT: csrr a0, vl
2330 ; CHECK-NEXT: sd a0, 0(a2)
2333 %0 = tail call {<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg2ff.nxv1i8(<vscale x 1 x i8> undef, <vscale x 1 x i8> undef, ptr %base, i64 %vl)
2334 %1 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 1
2335 %2 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 2
2336 store i64 %2, ptr %outvl
2337 ret <vscale x 1 x i8> %1
2340 define <vscale x 1 x i8> @test_vlseg2ff_mask_nxv1i8(<vscale x 1 x i8> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
2341 ; CHECK-LABEL: test_vlseg2ff_mask_nxv1i8:
2342 ; CHECK: # %bb.0: # %entry
2343 ; CHECK-NEXT: vmv1r.v v7, v8
2344 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu
2345 ; CHECK-NEXT: vlseg2e8ff.v v7, (a0), v0.t
2346 ; CHECK-NEXT: csrr a0, vl
2347 ; CHECK-NEXT: sd a0, 0(a2)
2350 %0 = tail call {<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg2ff.mask.nxv1i8(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
2351 %1 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 1
2352 %2 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 2
2353 store i64 %2, ptr %outvl
2354 ret <vscale x 1 x i8> %1
2357 declare {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg3ff.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr , i64)
2358 declare {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg3ff.mask.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i1>, i64, i64)
2360 define <vscale x 1 x i8> @test_vlseg3ff_nxv1i8(ptr %base, i64 %vl, ptr %outvl) {
2361 ; CHECK-LABEL: test_vlseg3ff_nxv1i8:
2362 ; CHECK: # %bb.0: # %entry
2363 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
2364 ; CHECK-NEXT: vlseg3e8ff.v v7, (a0)
2365 ; CHECK-NEXT: csrr a0, vl
2366 ; CHECK-NEXT: sd a0, 0(a2)
2369 %0 = tail call {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg3ff.nxv1i8(<vscale x 1 x i8> undef, <vscale x 1 x i8> undef, <vscale x 1 x i8> undef, ptr %base, i64 %vl)
2370 %1 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 1
2371 %2 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 3
2372 store i64 %2, ptr %outvl
2373 ret <vscale x 1 x i8> %1
2376 define <vscale x 1 x i8> @test_vlseg3ff_mask_nxv1i8(<vscale x 1 x i8> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
2377 ; CHECK-LABEL: test_vlseg3ff_mask_nxv1i8:
2378 ; CHECK: # %bb.0: # %entry
2379 ; CHECK-NEXT: vmv1r.v v7, v8
2380 ; CHECK-NEXT: vmv1r.v v9, v8
2381 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu
2382 ; CHECK-NEXT: vlseg3e8ff.v v7, (a0), v0.t
2383 ; CHECK-NEXT: csrr a0, vl
2384 ; CHECK-NEXT: sd a0, 0(a2)
2387 %0 = tail call {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg3ff.mask.nxv1i8(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
2388 %1 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 1
2389 %2 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 3
2390 store i64 %2, ptr %outvl
2391 ret <vscale x 1 x i8> %1
2394 declare {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg4ff.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr , i64)
2395 declare {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg4ff.mask.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i1>, i64, i64)
2397 define <vscale x 1 x i8> @test_vlseg4ff_nxv1i8(ptr %base, i64 %vl, ptr %outvl) {
2398 ; CHECK-LABEL: test_vlseg4ff_nxv1i8:
2399 ; CHECK: # %bb.0: # %entry
2400 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
2401 ; CHECK-NEXT: vlseg4e8ff.v v7, (a0)
2402 ; CHECK-NEXT: csrr a0, vl
2403 ; CHECK-NEXT: sd a0, 0(a2)
2406 %0 = tail call {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg4ff.nxv1i8(<vscale x 1 x i8> undef, <vscale x 1 x i8> undef, <vscale x 1 x i8> undef, <vscale x 1 x i8> undef, ptr %base, i64 %vl)
2407 %1 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 1
2408 %2 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 4
2409 store i64 %2, ptr %outvl
2410 ret <vscale x 1 x i8> %1
2413 define <vscale x 1 x i8> @test_vlseg4ff_mask_nxv1i8(<vscale x 1 x i8> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
2414 ; CHECK-LABEL: test_vlseg4ff_mask_nxv1i8:
2415 ; CHECK: # %bb.0: # %entry
2416 ; CHECK-NEXT: vmv1r.v v7, v8
2417 ; CHECK-NEXT: vmv1r.v v9, v8
2418 ; CHECK-NEXT: vmv1r.v v10, v8
2419 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu
2420 ; CHECK-NEXT: vlseg4e8ff.v v7, (a0), v0.t
2421 ; CHECK-NEXT: csrr a0, vl
2422 ; CHECK-NEXT: sd a0, 0(a2)
2425 %0 = tail call {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg4ff.mask.nxv1i8(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
2426 %1 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 1
2427 %2 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 4
2428 store i64 %2, ptr %outvl
2429 ret <vscale x 1 x i8> %1
2432 declare {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg5ff.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr , i64)
2433 declare {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg5ff.mask.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i1>, i64, i64)
2435 define <vscale x 1 x i8> @test_vlseg5ff_nxv1i8(ptr %base, i64 %vl, ptr %outvl) {
2436 ; CHECK-LABEL: test_vlseg5ff_nxv1i8:
2437 ; CHECK: # %bb.0: # %entry
2438 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
2439 ; CHECK-NEXT: vlseg5e8ff.v v7, (a0)
2440 ; CHECK-NEXT: csrr a0, vl
2441 ; CHECK-NEXT: sd a0, 0(a2)
2444 %0 = tail call {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg5ff.nxv1i8(<vscale x 1 x i8> undef, <vscale x 1 x i8> undef, <vscale x 1 x i8> undef, <vscale x 1 x i8> undef, <vscale x 1 x i8> undef, ptr %base, i64 %vl)
2445 %1 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 1
2446 %2 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 5
2447 store i64 %2, ptr %outvl
2448 ret <vscale x 1 x i8> %1
2451 define <vscale x 1 x i8> @test_vlseg5ff_mask_nxv1i8(<vscale x 1 x i8> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
2452 ; CHECK-LABEL: test_vlseg5ff_mask_nxv1i8:
2453 ; CHECK: # %bb.0: # %entry
2454 ; CHECK-NEXT: vmv1r.v v7, v8
2455 ; CHECK-NEXT: vmv1r.v v9, v8
2456 ; CHECK-NEXT: vmv1r.v v10, v8
2457 ; CHECK-NEXT: vmv1r.v v11, v8
2458 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu
2459 ; CHECK-NEXT: vlseg5e8ff.v v7, (a0), v0.t
2460 ; CHECK-NEXT: csrr a0, vl
2461 ; CHECK-NEXT: sd a0, 0(a2)
2464 %0 = tail call {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg5ff.mask.nxv1i8(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
2465 %1 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 1
2466 %2 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 5
2467 store i64 %2, ptr %outvl
2468 ret <vscale x 1 x i8> %1
2471 declare {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg6ff.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr , i64)
2472 declare {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg6ff.mask.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i1>, i64, i64)
2474 define <vscale x 1 x i8> @test_vlseg6ff_nxv1i8(ptr %base, i64 %vl, ptr %outvl) {
2475 ; CHECK-LABEL: test_vlseg6ff_nxv1i8:
2476 ; CHECK: # %bb.0: # %entry
2477 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
2478 ; CHECK-NEXT: vlseg6e8ff.v v7, (a0)
2479 ; CHECK-NEXT: csrr a0, vl
2480 ; CHECK-NEXT: sd a0, 0(a2)
2483 %0 = tail call {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg6ff.nxv1i8(<vscale x 1 x i8> undef, <vscale x 1 x i8> undef, <vscale x 1 x i8> undef, <vscale x 1 x i8> undef, <vscale x 1 x i8> undef, <vscale x 1 x i8> undef, ptr %base, i64 %vl)
2484 %1 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 1
2485 %2 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 6
2486 store i64 %2, ptr %outvl
2487 ret <vscale x 1 x i8> %1
2490 define <vscale x 1 x i8> @test_vlseg6ff_mask_nxv1i8(<vscale x 1 x i8> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
2491 ; CHECK-LABEL: test_vlseg6ff_mask_nxv1i8:
2492 ; CHECK: # %bb.0: # %entry
2493 ; CHECK-NEXT: vmv1r.v v7, v8
2494 ; CHECK-NEXT: vmv1r.v v9, v8
2495 ; CHECK-NEXT: vmv1r.v v10, v8
2496 ; CHECK-NEXT: vmv1r.v v11, v8
2497 ; CHECK-NEXT: vmv1r.v v12, v8
2498 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu
2499 ; CHECK-NEXT: vlseg6e8ff.v v7, (a0), v0.t
2500 ; CHECK-NEXT: csrr a0, vl
2501 ; CHECK-NEXT: sd a0, 0(a2)
2504 %0 = tail call {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg6ff.mask.nxv1i8(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
2505 %1 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 1
2506 %2 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 6
2507 store i64 %2, ptr %outvl
2508 ret <vscale x 1 x i8> %1
2511 declare {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg7ff.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr , i64)
2512 declare {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg7ff.mask.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i1>, i64, i64)
2514 define <vscale x 1 x i8> @test_vlseg7ff_nxv1i8(ptr %base, i64 %vl, ptr %outvl) {
2515 ; CHECK-LABEL: test_vlseg7ff_nxv1i8:
2516 ; CHECK: # %bb.0: # %entry
2517 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
2518 ; CHECK-NEXT: vlseg7e8ff.v v7, (a0)
2519 ; CHECK-NEXT: csrr a0, vl
2520 ; CHECK-NEXT: sd a0, 0(a2)
2523 %0 = tail call {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg7ff.nxv1i8(<vscale x 1 x i8> undef, <vscale x 1 x i8> undef, <vscale x 1 x i8> undef, <vscale x 1 x i8> undef, <vscale x 1 x i8> undef, <vscale x 1 x i8> undef, <vscale x 1 x i8> undef, ptr %base, i64 %vl)
2524 %1 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 1
2525 %2 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 7
2526 store i64 %2, ptr %outvl
2527 ret <vscale x 1 x i8> %1
2530 define <vscale x 1 x i8> @test_vlseg7ff_mask_nxv1i8(<vscale x 1 x i8> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
2531 ; CHECK-LABEL: test_vlseg7ff_mask_nxv1i8:
2532 ; CHECK: # %bb.0: # %entry
2533 ; CHECK-NEXT: vmv1r.v v7, v8
2534 ; CHECK-NEXT: vmv1r.v v9, v8
2535 ; CHECK-NEXT: vmv1r.v v10, v8
2536 ; CHECK-NEXT: vmv1r.v v11, v8
2537 ; CHECK-NEXT: vmv1r.v v12, v8
2538 ; CHECK-NEXT: vmv1r.v v13, v8
2539 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu
2540 ; CHECK-NEXT: vlseg7e8ff.v v7, (a0), v0.t
2541 ; CHECK-NEXT: csrr a0, vl
2542 ; CHECK-NEXT: sd a0, 0(a2)
2545 %0 = tail call {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg7ff.mask.nxv1i8(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
2546 %1 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 1
2547 %2 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 7
2548 store i64 %2, ptr %outvl
2549 ret <vscale x 1 x i8> %1
2552 declare {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg8ff.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr , i64)
2553 declare {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg8ff.mask.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i1>, i64, i64)
2555 define <vscale x 1 x i8> @test_vlseg8ff_nxv1i8(ptr %base, i64 %vl, ptr %outvl) {
2556 ; CHECK-LABEL: test_vlseg8ff_nxv1i8:
2557 ; CHECK: # %bb.0: # %entry
2558 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
2559 ; CHECK-NEXT: vlseg8e8ff.v v7, (a0)
2560 ; CHECK-NEXT: csrr a0, vl
2561 ; CHECK-NEXT: sd a0, 0(a2)
2564 %0 = tail call {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg8ff.nxv1i8(<vscale x 1 x i8> undef, <vscale x 1 x i8> undef ,<vscale x 1 x i8> undef ,<vscale x 1 x i8> undef, <vscale x 1 x i8> undef ,<vscale x 1 x i8> undef, <vscale x 1 x i8> undef, <vscale x 1 x i8> undef, ptr %base, i64 %vl)
2565 %1 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 1
2566 %2 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 8
2567 store i64 %2, ptr %outvl
2568 ret <vscale x 1 x i8> %1
2571 define <vscale x 1 x i8> @test_vlseg8ff_mask_nxv1i8(<vscale x 1 x i8> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
2572 ; CHECK-LABEL: test_vlseg8ff_mask_nxv1i8:
2573 ; CHECK: # %bb.0: # %entry
2574 ; CHECK-NEXT: vmv1r.v v7, v8
2575 ; CHECK-NEXT: vmv1r.v v9, v8
2576 ; CHECK-NEXT: vmv1r.v v10, v8
2577 ; CHECK-NEXT: vmv1r.v v11, v8
2578 ; CHECK-NEXT: vmv1r.v v12, v8
2579 ; CHECK-NEXT: vmv1r.v v13, v8
2580 ; CHECK-NEXT: vmv1r.v v14, v8
2581 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, mu
2582 ; CHECK-NEXT: vlseg8e8ff.v v7, (a0), v0.t
2583 ; CHECK-NEXT: csrr a0, vl
2584 ; CHECK-NEXT: sd a0, 0(a2)
2587 %0 = tail call {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} @llvm.riscv.vlseg8ff.mask.nxv1i8(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
2588 %1 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 1
2589 %2 = extractvalue {<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, i64} %0, 8
2590 store i64 %2, ptr %outvl
2591 ret <vscale x 1 x i8> %1
2594 declare {<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg2ff.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>, ptr , i64)
2595 declare {<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg2ff.mask.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i1>, i64, i64)
2597 define <vscale x 2 x i8> @test_vlseg2ff_nxv2i8(ptr %base, i64 %vl, ptr %outvl) {
2598 ; CHECK-LABEL: test_vlseg2ff_nxv2i8:
2599 ; CHECK: # %bb.0: # %entry
2600 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
2601 ; CHECK-NEXT: vlseg2e8ff.v v7, (a0)
2602 ; CHECK-NEXT: csrr a0, vl
2603 ; CHECK-NEXT: sd a0, 0(a2)
2606 %0 = tail call {<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg2ff.nxv2i8(<vscale x 2 x i8> undef, <vscale x 2 x i8> undef, ptr %base, i64 %vl)
2607 %1 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 1
2608 %2 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 2
2609 store i64 %2, ptr %outvl
2610 ret <vscale x 2 x i8> %1
2613 define <vscale x 2 x i8> @test_vlseg2ff_mask_nxv2i8(<vscale x 2 x i8> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
2614 ; CHECK-LABEL: test_vlseg2ff_mask_nxv2i8:
2615 ; CHECK: # %bb.0: # %entry
2616 ; CHECK-NEXT: vmv1r.v v7, v8
2617 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu
2618 ; CHECK-NEXT: vlseg2e8ff.v v7, (a0), v0.t
2619 ; CHECK-NEXT: csrr a0, vl
2620 ; CHECK-NEXT: sd a0, 0(a2)
2623 %0 = tail call {<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg2ff.mask.nxv2i8(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
2624 %1 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 1
2625 %2 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 2
2626 store i64 %2, ptr %outvl
2627 ret <vscale x 2 x i8> %1
2630 declare {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg3ff.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr , i64)
2631 declare {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg3ff.mask.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i1>, i64, i64)
2633 define <vscale x 2 x i8> @test_vlseg3ff_nxv2i8(ptr %base, i64 %vl, ptr %outvl) {
2634 ; CHECK-LABEL: test_vlseg3ff_nxv2i8:
2635 ; CHECK: # %bb.0: # %entry
2636 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
2637 ; CHECK-NEXT: vlseg3e8ff.v v7, (a0)
2638 ; CHECK-NEXT: csrr a0, vl
2639 ; CHECK-NEXT: sd a0, 0(a2)
2642 %0 = tail call {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg3ff.nxv2i8(<vscale x 2 x i8> undef, <vscale x 2 x i8> undef, <vscale x 2 x i8> undef, ptr %base, i64 %vl)
2643 %1 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 1
2644 %2 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 3
2645 store i64 %2, ptr %outvl
2646 ret <vscale x 2 x i8> %1
2649 define <vscale x 2 x i8> @test_vlseg3ff_mask_nxv2i8(<vscale x 2 x i8> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
2650 ; CHECK-LABEL: test_vlseg3ff_mask_nxv2i8:
2651 ; CHECK: # %bb.0: # %entry
2652 ; CHECK-NEXT: vmv1r.v v7, v8
2653 ; CHECK-NEXT: vmv1r.v v9, v8
2654 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu
2655 ; CHECK-NEXT: vlseg3e8ff.v v7, (a0), v0.t
2656 ; CHECK-NEXT: csrr a0, vl
2657 ; CHECK-NEXT: sd a0, 0(a2)
2660 %0 = tail call {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg3ff.mask.nxv2i8(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
2661 %1 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 1
2662 %2 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 3
2663 store i64 %2, ptr %outvl
2664 ret <vscale x 2 x i8> %1
2667 declare {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg4ff.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr , i64)
2668 declare {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg4ff.mask.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i1>, i64, i64)
2670 define <vscale x 2 x i8> @test_vlseg4ff_nxv2i8(ptr %base, i64 %vl, ptr %outvl) {
2671 ; CHECK-LABEL: test_vlseg4ff_nxv2i8:
2672 ; CHECK: # %bb.0: # %entry
2673 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
2674 ; CHECK-NEXT: vlseg4e8ff.v v7, (a0)
2675 ; CHECK-NEXT: csrr a0, vl
2676 ; CHECK-NEXT: sd a0, 0(a2)
2679 %0 = tail call {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg4ff.nxv2i8(<vscale x 2 x i8> undef, <vscale x 2 x i8> undef, <vscale x 2 x i8> undef, <vscale x 2 x i8> undef, ptr %base, i64 %vl)
2680 %1 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 1
2681 %2 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 4
2682 store i64 %2, ptr %outvl
2683 ret <vscale x 2 x i8> %1
2686 define <vscale x 2 x i8> @test_vlseg4ff_mask_nxv2i8(<vscale x 2 x i8> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
2687 ; CHECK-LABEL: test_vlseg4ff_mask_nxv2i8:
2688 ; CHECK: # %bb.0: # %entry
2689 ; CHECK-NEXT: vmv1r.v v7, v8
2690 ; CHECK-NEXT: vmv1r.v v9, v8
2691 ; CHECK-NEXT: vmv1r.v v10, v8
2692 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu
2693 ; CHECK-NEXT: vlseg4e8ff.v v7, (a0), v0.t
2694 ; CHECK-NEXT: csrr a0, vl
2695 ; CHECK-NEXT: sd a0, 0(a2)
2698 %0 = tail call {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg4ff.mask.nxv2i8(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
2699 %1 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 1
2700 %2 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 4
2701 store i64 %2, ptr %outvl
2702 ret <vscale x 2 x i8> %1
2705 declare {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg5ff.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr , i64)
2706 declare {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg5ff.mask.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i1>, i64, i64)
2708 define <vscale x 2 x i8> @test_vlseg5ff_nxv2i8(ptr %base, i64 %vl, ptr %outvl) {
2709 ; CHECK-LABEL: test_vlseg5ff_nxv2i8:
2710 ; CHECK: # %bb.0: # %entry
2711 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
2712 ; CHECK-NEXT: vlseg5e8ff.v v7, (a0)
2713 ; CHECK-NEXT: csrr a0, vl
2714 ; CHECK-NEXT: sd a0, 0(a2)
2717 %0 = tail call {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg5ff.nxv2i8(<vscale x 2 x i8> undef, <vscale x 2 x i8> undef, <vscale x 2 x i8> undef, <vscale x 2 x i8> undef, <vscale x 2 x i8> undef, ptr %base, i64 %vl)
2718 %1 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 1
2719 %2 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 5
2720 store i64 %2, ptr %outvl
2721 ret <vscale x 2 x i8> %1
2724 define <vscale x 2 x i8> @test_vlseg5ff_mask_nxv2i8(<vscale x 2 x i8> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
2725 ; CHECK-LABEL: test_vlseg5ff_mask_nxv2i8:
2726 ; CHECK: # %bb.0: # %entry
2727 ; CHECK-NEXT: vmv1r.v v7, v8
2728 ; CHECK-NEXT: vmv1r.v v9, v8
2729 ; CHECK-NEXT: vmv1r.v v10, v8
2730 ; CHECK-NEXT: vmv1r.v v11, v8
2731 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu
2732 ; CHECK-NEXT: vlseg5e8ff.v v7, (a0), v0.t
2733 ; CHECK-NEXT: csrr a0, vl
2734 ; CHECK-NEXT: sd a0, 0(a2)
2737 %0 = tail call {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg5ff.mask.nxv2i8(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
2738 %1 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 1
2739 %2 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 5
2740 store i64 %2, ptr %outvl
2741 ret <vscale x 2 x i8> %1
2744 declare {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg6ff.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr , i64)
2745 declare {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg6ff.mask.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i1>, i64, i64)
2747 define <vscale x 2 x i8> @test_vlseg6ff_nxv2i8(ptr %base, i64 %vl, ptr %outvl) {
2748 ; CHECK-LABEL: test_vlseg6ff_nxv2i8:
2749 ; CHECK: # %bb.0: # %entry
2750 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
2751 ; CHECK-NEXT: vlseg6e8ff.v v7, (a0)
2752 ; CHECK-NEXT: csrr a0, vl
2753 ; CHECK-NEXT: sd a0, 0(a2)
2756 %0 = tail call {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg6ff.nxv2i8(<vscale x 2 x i8> undef, <vscale x 2 x i8> undef, <vscale x 2 x i8> undef, <vscale x 2 x i8> undef, <vscale x 2 x i8> undef, <vscale x 2 x i8> undef, ptr %base, i64 %vl)
2757 %1 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 1
2758 %2 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 6
2759 store i64 %2, ptr %outvl
2760 ret <vscale x 2 x i8> %1
2763 define <vscale x 2 x i8> @test_vlseg6ff_mask_nxv2i8(<vscale x 2 x i8> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
2764 ; CHECK-LABEL: test_vlseg6ff_mask_nxv2i8:
2765 ; CHECK: # %bb.0: # %entry
2766 ; CHECK-NEXT: vmv1r.v v7, v8
2767 ; CHECK-NEXT: vmv1r.v v9, v8
2768 ; CHECK-NEXT: vmv1r.v v10, v8
2769 ; CHECK-NEXT: vmv1r.v v11, v8
2770 ; CHECK-NEXT: vmv1r.v v12, v8
2771 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu
2772 ; CHECK-NEXT: vlseg6e8ff.v v7, (a0), v0.t
2773 ; CHECK-NEXT: csrr a0, vl
2774 ; CHECK-NEXT: sd a0, 0(a2)
2777 %0 = tail call {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg6ff.mask.nxv2i8(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
2778 %1 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 1
2779 %2 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 6
2780 store i64 %2, ptr %outvl
2781 ret <vscale x 2 x i8> %1
2784 declare {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg7ff.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr , i64)
2785 declare {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg7ff.mask.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i1>, i64, i64)
2787 define <vscale x 2 x i8> @test_vlseg7ff_nxv2i8(ptr %base, i64 %vl, ptr %outvl) {
2788 ; CHECK-LABEL: test_vlseg7ff_nxv2i8:
2789 ; CHECK: # %bb.0: # %entry
2790 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
2791 ; CHECK-NEXT: vlseg7e8ff.v v7, (a0)
2792 ; CHECK-NEXT: csrr a0, vl
2793 ; CHECK-NEXT: sd a0, 0(a2)
2796 %0 = tail call {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg7ff.nxv2i8(<vscale x 2 x i8> undef, <vscale x 2 x i8> undef, <vscale x 2 x i8> undef, <vscale x 2 x i8> undef, <vscale x 2 x i8> undef, <vscale x 2 x i8> undef, <vscale x 2 x i8> undef, ptr %base, i64 %vl)
2797 %1 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 1
2798 %2 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 7
2799 store i64 %2, ptr %outvl
2800 ret <vscale x 2 x i8> %1
2803 define <vscale x 2 x i8> @test_vlseg7ff_mask_nxv2i8(<vscale x 2 x i8> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
2804 ; CHECK-LABEL: test_vlseg7ff_mask_nxv2i8:
2805 ; CHECK: # %bb.0: # %entry
2806 ; CHECK-NEXT: vmv1r.v v7, v8
2807 ; CHECK-NEXT: vmv1r.v v9, v8
2808 ; CHECK-NEXT: vmv1r.v v10, v8
2809 ; CHECK-NEXT: vmv1r.v v11, v8
2810 ; CHECK-NEXT: vmv1r.v v12, v8
2811 ; CHECK-NEXT: vmv1r.v v13, v8
2812 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu
2813 ; CHECK-NEXT: vlseg7e8ff.v v7, (a0), v0.t
2814 ; CHECK-NEXT: csrr a0, vl
2815 ; CHECK-NEXT: sd a0, 0(a2)
2818 %0 = tail call {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg7ff.mask.nxv2i8(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
2819 %1 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 1
2820 %2 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 7
2821 store i64 %2, ptr %outvl
2822 ret <vscale x 2 x i8> %1
2825 declare {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg8ff.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr , i64)
2826 declare {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg8ff.mask.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i1>, i64, i64)
2828 define <vscale x 2 x i8> @test_vlseg8ff_nxv2i8(ptr %base, i64 %vl, ptr %outvl) {
2829 ; CHECK-LABEL: test_vlseg8ff_nxv2i8:
2830 ; CHECK: # %bb.0: # %entry
2831 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
2832 ; CHECK-NEXT: vlseg8e8ff.v v7, (a0)
2833 ; CHECK-NEXT: csrr a0, vl
2834 ; CHECK-NEXT: sd a0, 0(a2)
2837 %0 = tail call {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg8ff.nxv2i8(<vscale x 2 x i8> undef, <vscale x 2 x i8> undef ,<vscale x 2 x i8> undef ,<vscale x 2 x i8> undef, <vscale x 2 x i8> undef ,<vscale x 2 x i8> undef, <vscale x 2 x i8> undef, <vscale x 2 x i8> undef, ptr %base, i64 %vl)
2838 %1 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 1
2839 %2 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 8
2840 store i64 %2, ptr %outvl
2841 ret <vscale x 2 x i8> %1
2844 define <vscale x 2 x i8> @test_vlseg8ff_mask_nxv2i8(<vscale x 2 x i8> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
2845 ; CHECK-LABEL: test_vlseg8ff_mask_nxv2i8:
2846 ; CHECK: # %bb.0: # %entry
2847 ; CHECK-NEXT: vmv1r.v v7, v8
2848 ; CHECK-NEXT: vmv1r.v v9, v8
2849 ; CHECK-NEXT: vmv1r.v v10, v8
2850 ; CHECK-NEXT: vmv1r.v v11, v8
2851 ; CHECK-NEXT: vmv1r.v v12, v8
2852 ; CHECK-NEXT: vmv1r.v v13, v8
2853 ; CHECK-NEXT: vmv1r.v v14, v8
2854 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, mu
2855 ; CHECK-NEXT: vlseg8e8ff.v v7, (a0), v0.t
2856 ; CHECK-NEXT: csrr a0, vl
2857 ; CHECK-NEXT: sd a0, 0(a2)
2860 %0 = tail call {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} @llvm.riscv.vlseg8ff.mask.nxv2i8(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
2861 %1 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 1
2862 %2 = extractvalue {<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, i64} %0, 8
2863 store i64 %2, ptr %outvl
2864 ret <vscale x 2 x i8> %1
2867 declare {<vscale x 8 x i32>,<vscale x 8 x i32>, i64} @llvm.riscv.vlseg2ff.nxv8i32(<vscale x 8 x i32>,<vscale x 8 x i32>, ptr , i64)
2868 declare {<vscale x 8 x i32>,<vscale x 8 x i32>, i64} @llvm.riscv.vlseg2ff.mask.nxv8i32(<vscale x 8 x i32>,<vscale x 8 x i32>, ptr, <vscale x 8 x i1>, i64, i64)
2870 define <vscale x 8 x i32> @test_vlseg2ff_nxv8i32(ptr %base, i64 %vl, ptr %outvl) {
2871 ; CHECK-LABEL: test_vlseg2ff_nxv8i32:
2872 ; CHECK: # %bb.0: # %entry
2873 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, ma
2874 ; CHECK-NEXT: vlseg2e32ff.v v4, (a0)
2875 ; CHECK-NEXT: csrr a0, vl
2876 ; CHECK-NEXT: sd a0, 0(a2)
2879 %0 = tail call {<vscale x 8 x i32>,<vscale x 8 x i32>, i64} @llvm.riscv.vlseg2ff.nxv8i32(<vscale x 8 x i32> undef, <vscale x 8 x i32> undef, ptr %base, i64 %vl)
2880 %1 = extractvalue {<vscale x 8 x i32>,<vscale x 8 x i32>, i64} %0, 1
2881 %2 = extractvalue {<vscale x 8 x i32>,<vscale x 8 x i32>, i64} %0, 2
2882 store i64 %2, ptr %outvl
2883 ret <vscale x 8 x i32> %1
2886 define <vscale x 8 x i32> @test_vlseg2ff_mask_nxv8i32(<vscale x 8 x i32> %val, ptr %base, i64 %vl, <vscale x 8 x i1> %mask, ptr %outvl) {
2887 ; CHECK-LABEL: test_vlseg2ff_mask_nxv8i32:
2888 ; CHECK: # %bb.0: # %entry
2889 ; CHECK-NEXT: vmv4r.v v4, v8
2890 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu
2891 ; CHECK-NEXT: vlseg2e32ff.v v4, (a0), v0.t
2892 ; CHECK-NEXT: csrr a0, vl
2893 ; CHECK-NEXT: sd a0, 0(a2)
2896 %0 = tail call {<vscale x 8 x i32>,<vscale x 8 x i32>, i64} @llvm.riscv.vlseg2ff.mask.nxv8i32(<vscale x 8 x i32> %val,<vscale x 8 x i32> %val, ptr %base, <vscale x 8 x i1> %mask, i64 %vl, i64 1)
2897 %1 = extractvalue {<vscale x 8 x i32>,<vscale x 8 x i32>, i64} %0, 1
2898 %2 = extractvalue {<vscale x 8 x i32>,<vscale x 8 x i32>, i64} %0, 2
2899 store i64 %2, ptr %outvl
2900 ret <vscale x 8 x i32> %1
2903 declare {<vscale x 32 x i8>,<vscale x 32 x i8>, i64} @llvm.riscv.vlseg2ff.nxv32i8(<vscale x 32 x i8>,<vscale x 32 x i8>, ptr , i64)
2904 declare {<vscale x 32 x i8>,<vscale x 32 x i8>, i64} @llvm.riscv.vlseg2ff.mask.nxv32i8(<vscale x 32 x i8>,<vscale x 32 x i8>, ptr, <vscale x 32 x i1>, i64, i64)
2906 define <vscale x 32 x i8> @test_vlseg2ff_nxv32i8(ptr %base, i64 %vl, ptr %outvl) {
2907 ; CHECK-LABEL: test_vlseg2ff_nxv32i8:
2908 ; CHECK: # %bb.0: # %entry
2909 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, ma
2910 ; CHECK-NEXT: vlseg2e8ff.v v4, (a0)
2911 ; CHECK-NEXT: csrr a0, vl
2912 ; CHECK-NEXT: sd a0, 0(a2)
2915 %0 = tail call {<vscale x 32 x i8>,<vscale x 32 x i8>, i64} @llvm.riscv.vlseg2ff.nxv32i8(<vscale x 32 x i8> undef, <vscale x 32 x i8> undef, ptr %base, i64 %vl)
2916 %1 = extractvalue {<vscale x 32 x i8>,<vscale x 32 x i8>, i64} %0, 1
2917 %2 = extractvalue {<vscale x 32 x i8>,<vscale x 32 x i8>, i64} %0, 2
2918 store i64 %2, ptr %outvl
2919 ret <vscale x 32 x i8> %1
2922 define <vscale x 32 x i8> @test_vlseg2ff_mask_nxv32i8(<vscale x 32 x i8> %val, ptr %base, i64 %vl, <vscale x 32 x i1> %mask, ptr %outvl) {
2923 ; CHECK-LABEL: test_vlseg2ff_mask_nxv32i8:
2924 ; CHECK: # %bb.0: # %entry
2925 ; CHECK-NEXT: vmv4r.v v4, v8
2926 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu
2927 ; CHECK-NEXT: vlseg2e8ff.v v4, (a0), v0.t
2928 ; CHECK-NEXT: csrr a0, vl
2929 ; CHECK-NEXT: sd a0, 0(a2)
2932 %0 = tail call {<vscale x 32 x i8>,<vscale x 32 x i8>, i64} @llvm.riscv.vlseg2ff.mask.nxv32i8(<vscale x 32 x i8> %val,<vscale x 32 x i8> %val, ptr %base, <vscale x 32 x i1> %mask, i64 %vl, i64 1)
2933 %1 = extractvalue {<vscale x 32 x i8>,<vscale x 32 x i8>, i64} %0, 1
2934 %2 = extractvalue {<vscale x 32 x i8>,<vscale x 32 x i8>, i64} %0, 2
2935 store i64 %2, ptr %outvl
2936 ret <vscale x 32 x i8> %1
2939 declare {<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg2ff.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>, ptr , i64)
2940 declare {<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg2ff.mask.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i1>, i64, i64)
2942 define <vscale x 2 x i16> @test_vlseg2ff_nxv2i16(ptr %base, i64 %vl, ptr %outvl) {
2943 ; CHECK-LABEL: test_vlseg2ff_nxv2i16:
2944 ; CHECK: # %bb.0: # %entry
2945 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
2946 ; CHECK-NEXT: vlseg2e16ff.v v7, (a0)
2947 ; CHECK-NEXT: csrr a0, vl
2948 ; CHECK-NEXT: sd a0, 0(a2)
2951 %0 = tail call {<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg2ff.nxv2i16(<vscale x 2 x i16> undef, <vscale x 2 x i16> undef, ptr %base, i64 %vl)
2952 %1 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 1
2953 %2 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 2
2954 store i64 %2, ptr %outvl
2955 ret <vscale x 2 x i16> %1
2958 define <vscale x 2 x i16> @test_vlseg2ff_mask_nxv2i16(<vscale x 2 x i16> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
2959 ; CHECK-LABEL: test_vlseg2ff_mask_nxv2i16:
2960 ; CHECK: # %bb.0: # %entry
2961 ; CHECK-NEXT: vmv1r.v v7, v8
2962 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu
2963 ; CHECK-NEXT: vlseg2e16ff.v v7, (a0), v0.t
2964 ; CHECK-NEXT: csrr a0, vl
2965 ; CHECK-NEXT: sd a0, 0(a2)
2968 %0 = tail call {<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg2ff.mask.nxv2i16(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
2969 %1 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 1
2970 %2 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 2
2971 store i64 %2, ptr %outvl
2972 ret <vscale x 2 x i16> %1
2975 declare {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg3ff.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr , i64)
2976 declare {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg3ff.mask.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i1>, i64, i64)
2978 define <vscale x 2 x i16> @test_vlseg3ff_nxv2i16(ptr %base, i64 %vl, ptr %outvl) {
2979 ; CHECK-LABEL: test_vlseg3ff_nxv2i16:
2980 ; CHECK: # %bb.0: # %entry
2981 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
2982 ; CHECK-NEXT: vlseg3e16ff.v v7, (a0)
2983 ; CHECK-NEXT: csrr a0, vl
2984 ; CHECK-NEXT: sd a0, 0(a2)
2987 %0 = tail call {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg3ff.nxv2i16(<vscale x 2 x i16> undef, <vscale x 2 x i16> undef, <vscale x 2 x i16> undef, ptr %base, i64 %vl)
2988 %1 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 1
2989 %2 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 3
2990 store i64 %2, ptr %outvl
2991 ret <vscale x 2 x i16> %1
2994 define <vscale x 2 x i16> @test_vlseg3ff_mask_nxv2i16(<vscale x 2 x i16> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
2995 ; CHECK-LABEL: test_vlseg3ff_mask_nxv2i16:
2996 ; CHECK: # %bb.0: # %entry
2997 ; CHECK-NEXT: vmv1r.v v7, v8
2998 ; CHECK-NEXT: vmv1r.v v9, v8
2999 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu
3000 ; CHECK-NEXT: vlseg3e16ff.v v7, (a0), v0.t
3001 ; CHECK-NEXT: csrr a0, vl
3002 ; CHECK-NEXT: sd a0, 0(a2)
3005 %0 = tail call {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg3ff.mask.nxv2i16(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
3006 %1 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 1
3007 %2 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 3
3008 store i64 %2, ptr %outvl
3009 ret <vscale x 2 x i16> %1
3012 declare {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg4ff.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr , i64)
3013 declare {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg4ff.mask.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i1>, i64, i64)
3015 define <vscale x 2 x i16> @test_vlseg4ff_nxv2i16(ptr %base, i64 %vl, ptr %outvl) {
3016 ; CHECK-LABEL: test_vlseg4ff_nxv2i16:
3017 ; CHECK: # %bb.0: # %entry
3018 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
3019 ; CHECK-NEXT: vlseg4e16ff.v v7, (a0)
3020 ; CHECK-NEXT: csrr a0, vl
3021 ; CHECK-NEXT: sd a0, 0(a2)
3024 %0 = tail call {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg4ff.nxv2i16(<vscale x 2 x i16> undef, <vscale x 2 x i16> undef, <vscale x 2 x i16> undef, <vscale x 2 x i16> undef, ptr %base, i64 %vl)
3025 %1 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 1
3026 %2 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 4
3027 store i64 %2, ptr %outvl
3028 ret <vscale x 2 x i16> %1
3031 define <vscale x 2 x i16> @test_vlseg4ff_mask_nxv2i16(<vscale x 2 x i16> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
3032 ; CHECK-LABEL: test_vlseg4ff_mask_nxv2i16:
3033 ; CHECK: # %bb.0: # %entry
3034 ; CHECK-NEXT: vmv1r.v v7, v8
3035 ; CHECK-NEXT: vmv1r.v v9, v8
3036 ; CHECK-NEXT: vmv1r.v v10, v8
3037 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu
3038 ; CHECK-NEXT: vlseg4e16ff.v v7, (a0), v0.t
3039 ; CHECK-NEXT: csrr a0, vl
3040 ; CHECK-NEXT: sd a0, 0(a2)
3043 %0 = tail call {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg4ff.mask.nxv2i16(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
3044 %1 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 1
3045 %2 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 4
3046 store i64 %2, ptr %outvl
3047 ret <vscale x 2 x i16> %1
3050 declare {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg5ff.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr , i64)
3051 declare {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg5ff.mask.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i1>, i64, i64)
3053 define <vscale x 2 x i16> @test_vlseg5ff_nxv2i16(ptr %base, i64 %vl, ptr %outvl) {
3054 ; CHECK-LABEL: test_vlseg5ff_nxv2i16:
3055 ; CHECK: # %bb.0: # %entry
3056 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
3057 ; CHECK-NEXT: vlseg5e16ff.v v7, (a0)
3058 ; CHECK-NEXT: csrr a0, vl
3059 ; CHECK-NEXT: sd a0, 0(a2)
3062 %0 = tail call {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg5ff.nxv2i16(<vscale x 2 x i16> undef, <vscale x 2 x i16> undef, <vscale x 2 x i16> undef, <vscale x 2 x i16> undef, <vscale x 2 x i16> undef, ptr %base, i64 %vl)
3063 %1 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 1
3064 %2 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 5
3065 store i64 %2, ptr %outvl
3066 ret <vscale x 2 x i16> %1
3069 define <vscale x 2 x i16> @test_vlseg5ff_mask_nxv2i16(<vscale x 2 x i16> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
3070 ; CHECK-LABEL: test_vlseg5ff_mask_nxv2i16:
3071 ; CHECK: # %bb.0: # %entry
3072 ; CHECK-NEXT: vmv1r.v v7, v8
3073 ; CHECK-NEXT: vmv1r.v v9, v8
3074 ; CHECK-NEXT: vmv1r.v v10, v8
3075 ; CHECK-NEXT: vmv1r.v v11, v8
3076 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu
3077 ; CHECK-NEXT: vlseg5e16ff.v v7, (a0), v0.t
3078 ; CHECK-NEXT: csrr a0, vl
3079 ; CHECK-NEXT: sd a0, 0(a2)
3082 %0 = tail call {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg5ff.mask.nxv2i16(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
3083 %1 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 1
3084 %2 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 5
3085 store i64 %2, ptr %outvl
3086 ret <vscale x 2 x i16> %1
3089 declare {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg6ff.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr , i64)
3090 declare {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg6ff.mask.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i1>, i64, i64)
3092 define <vscale x 2 x i16> @test_vlseg6ff_nxv2i16(ptr %base, i64 %vl, ptr %outvl) {
3093 ; CHECK-LABEL: test_vlseg6ff_nxv2i16:
3094 ; CHECK: # %bb.0: # %entry
3095 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
3096 ; CHECK-NEXT: vlseg6e16ff.v v7, (a0)
3097 ; CHECK-NEXT: csrr a0, vl
3098 ; CHECK-NEXT: sd a0, 0(a2)
3101 %0 = tail call {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg6ff.nxv2i16(<vscale x 2 x i16> undef, <vscale x 2 x i16> undef, <vscale x 2 x i16> undef, <vscale x 2 x i16> undef, <vscale x 2 x i16> undef, <vscale x 2 x i16> undef, ptr %base, i64 %vl)
3102 %1 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 1
3103 %2 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 6
3104 store i64 %2, ptr %outvl
3105 ret <vscale x 2 x i16> %1
3108 define <vscale x 2 x i16> @test_vlseg6ff_mask_nxv2i16(<vscale x 2 x i16> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
3109 ; CHECK-LABEL: test_vlseg6ff_mask_nxv2i16:
3110 ; CHECK: # %bb.0: # %entry
3111 ; CHECK-NEXT: vmv1r.v v7, v8
3112 ; CHECK-NEXT: vmv1r.v v9, v8
3113 ; CHECK-NEXT: vmv1r.v v10, v8
3114 ; CHECK-NEXT: vmv1r.v v11, v8
3115 ; CHECK-NEXT: vmv1r.v v12, v8
3116 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu
3117 ; CHECK-NEXT: vlseg6e16ff.v v7, (a0), v0.t
3118 ; CHECK-NEXT: csrr a0, vl
3119 ; CHECK-NEXT: sd a0, 0(a2)
3122 %0 = tail call {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg6ff.mask.nxv2i16(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
3123 %1 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 1
3124 %2 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 6
3125 store i64 %2, ptr %outvl
3126 ret <vscale x 2 x i16> %1
3129 declare {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg7ff.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr , i64)
3130 declare {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg7ff.mask.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i1>, i64, i64)
3132 define <vscale x 2 x i16> @test_vlseg7ff_nxv2i16(ptr %base, i64 %vl, ptr %outvl) {
3133 ; CHECK-LABEL: test_vlseg7ff_nxv2i16:
3134 ; CHECK: # %bb.0: # %entry
3135 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
3136 ; CHECK-NEXT: vlseg7e16ff.v v7, (a0)
3137 ; CHECK-NEXT: csrr a0, vl
3138 ; CHECK-NEXT: sd a0, 0(a2)
3141 %0 = tail call {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg7ff.nxv2i16(<vscale x 2 x i16> undef, <vscale x 2 x i16> undef, <vscale x 2 x i16> undef, <vscale x 2 x i16> undef, <vscale x 2 x i16> undef, <vscale x 2 x i16> undef, <vscale x 2 x i16> undef, ptr %base, i64 %vl)
3142 %1 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 1
3143 %2 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 7
3144 store i64 %2, ptr %outvl
3145 ret <vscale x 2 x i16> %1
3148 define <vscale x 2 x i16> @test_vlseg7ff_mask_nxv2i16(<vscale x 2 x i16> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
3149 ; CHECK-LABEL: test_vlseg7ff_mask_nxv2i16:
3150 ; CHECK: # %bb.0: # %entry
3151 ; CHECK-NEXT: vmv1r.v v7, v8
3152 ; CHECK-NEXT: vmv1r.v v9, v8
3153 ; CHECK-NEXT: vmv1r.v v10, v8
3154 ; CHECK-NEXT: vmv1r.v v11, v8
3155 ; CHECK-NEXT: vmv1r.v v12, v8
3156 ; CHECK-NEXT: vmv1r.v v13, v8
3157 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu
3158 ; CHECK-NEXT: vlseg7e16ff.v v7, (a0), v0.t
3159 ; CHECK-NEXT: csrr a0, vl
3160 ; CHECK-NEXT: sd a0, 0(a2)
3163 %0 = tail call {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg7ff.mask.nxv2i16(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
3164 %1 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 1
3165 %2 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 7
3166 store i64 %2, ptr %outvl
3167 ret <vscale x 2 x i16> %1
3170 declare {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg8ff.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr , i64)
3171 declare {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg8ff.mask.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i1>, i64, i64)
3173 define <vscale x 2 x i16> @test_vlseg8ff_nxv2i16(ptr %base, i64 %vl, ptr %outvl) {
3174 ; CHECK-LABEL: test_vlseg8ff_nxv2i16:
3175 ; CHECK: # %bb.0: # %entry
3176 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
3177 ; CHECK-NEXT: vlseg8e16ff.v v7, (a0)
3178 ; CHECK-NEXT: csrr a0, vl
3179 ; CHECK-NEXT: sd a0, 0(a2)
3182 %0 = tail call {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg8ff.nxv2i16(<vscale x 2 x i16> undef, <vscale x 2 x i16> undef ,<vscale x 2 x i16> undef ,<vscale x 2 x i16> undef, <vscale x 2 x i16> undef ,<vscale x 2 x i16> undef, <vscale x 2 x i16> undef, <vscale x 2 x i16> undef, ptr %base, i64 %vl)
3183 %1 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 1
3184 %2 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 8
3185 store i64 %2, ptr %outvl
3186 ret <vscale x 2 x i16> %1
3189 define <vscale x 2 x i16> @test_vlseg8ff_mask_nxv2i16(<vscale x 2 x i16> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
3190 ; CHECK-LABEL: test_vlseg8ff_mask_nxv2i16:
3191 ; CHECK: # %bb.0: # %entry
3192 ; CHECK-NEXT: vmv1r.v v7, v8
3193 ; CHECK-NEXT: vmv1r.v v9, v8
3194 ; CHECK-NEXT: vmv1r.v v10, v8
3195 ; CHECK-NEXT: vmv1r.v v11, v8
3196 ; CHECK-NEXT: vmv1r.v v12, v8
3197 ; CHECK-NEXT: vmv1r.v v13, v8
3198 ; CHECK-NEXT: vmv1r.v v14, v8
3199 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu
3200 ; CHECK-NEXT: vlseg8e16ff.v v7, (a0), v0.t
3201 ; CHECK-NEXT: csrr a0, vl
3202 ; CHECK-NEXT: sd a0, 0(a2)
3205 %0 = tail call {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} @llvm.riscv.vlseg8ff.mask.nxv2i16(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
3206 %1 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 1
3207 %2 = extractvalue {<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, i64} %0, 8
3208 store i64 %2, ptr %outvl
3209 ret <vscale x 2 x i16> %1
3212 declare {<vscale x 2 x i64>,<vscale x 2 x i64>, i64} @llvm.riscv.vlseg2ff.nxv2i64(<vscale x 2 x i64>,<vscale x 2 x i64>, ptr , i64)
3213 declare {<vscale x 2 x i64>,<vscale x 2 x i64>, i64} @llvm.riscv.vlseg2ff.mask.nxv2i64(<vscale x 2 x i64>,<vscale x 2 x i64>, ptr, <vscale x 2 x i1>, i64, i64)
3215 define <vscale x 2 x i64> @test_vlseg2ff_nxv2i64(ptr %base, i64 %vl, ptr %outvl) {
3216 ; CHECK-LABEL: test_vlseg2ff_nxv2i64:
3217 ; CHECK: # %bb.0: # %entry
3218 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
3219 ; CHECK-NEXT: vlseg2e64ff.v v6, (a0)
3220 ; CHECK-NEXT: csrr a0, vl
3221 ; CHECK-NEXT: sd a0, 0(a2)
3224 %0 = tail call {<vscale x 2 x i64>,<vscale x 2 x i64>, i64} @llvm.riscv.vlseg2ff.nxv2i64(<vscale x 2 x i64> undef, <vscale x 2 x i64> undef, ptr %base, i64 %vl)
3225 %1 = extractvalue {<vscale x 2 x i64>,<vscale x 2 x i64>, i64} %0, 1
3226 %2 = extractvalue {<vscale x 2 x i64>,<vscale x 2 x i64>, i64} %0, 2
3227 store i64 %2, ptr %outvl
3228 ret <vscale x 2 x i64> %1
3231 define <vscale x 2 x i64> @test_vlseg2ff_mask_nxv2i64(<vscale x 2 x i64> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
3232 ; CHECK-LABEL: test_vlseg2ff_mask_nxv2i64:
3233 ; CHECK: # %bb.0: # %entry
3234 ; CHECK-NEXT: vmv2r.v v6, v8
3235 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu
3236 ; CHECK-NEXT: vlseg2e64ff.v v6, (a0), v0.t
3237 ; CHECK-NEXT: csrr a0, vl
3238 ; CHECK-NEXT: sd a0, 0(a2)
3241 %0 = tail call {<vscale x 2 x i64>,<vscale x 2 x i64>, i64} @llvm.riscv.vlseg2ff.mask.nxv2i64(<vscale x 2 x i64> %val,<vscale x 2 x i64> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
3242 %1 = extractvalue {<vscale x 2 x i64>,<vscale x 2 x i64>, i64} %0, 1
3243 %2 = extractvalue {<vscale x 2 x i64>,<vscale x 2 x i64>, i64} %0, 2
3244 store i64 %2, ptr %outvl
3245 ret <vscale x 2 x i64> %1
3248 declare {<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>, i64} @llvm.riscv.vlseg3ff.nxv2i64(<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>, ptr , i64)
3249 declare {<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>, i64} @llvm.riscv.vlseg3ff.mask.nxv2i64(<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>, ptr, <vscale x 2 x i1>, i64, i64)
3251 define <vscale x 2 x i64> @test_vlseg3ff_nxv2i64(ptr %base, i64 %vl, ptr %outvl) {
3252 ; CHECK-LABEL: test_vlseg3ff_nxv2i64:
3253 ; CHECK: # %bb.0: # %entry
3254 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
3255 ; CHECK-NEXT: vlseg3e64ff.v v6, (a0)
3256 ; CHECK-NEXT: csrr a0, vl
3257 ; CHECK-NEXT: sd a0, 0(a2)
3260 %0 = tail call {<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>, i64} @llvm.riscv.vlseg3ff.nxv2i64(<vscale x 2 x i64> undef, <vscale x 2 x i64> undef, <vscale x 2 x i64> undef, ptr %base, i64 %vl)
3261 %1 = extractvalue {<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>, i64} %0, 1
3262 %2 = extractvalue {<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>, i64} %0, 3
3263 store i64 %2, ptr %outvl
3264 ret <vscale x 2 x i64> %1
3267 define <vscale x 2 x i64> @test_vlseg3ff_mask_nxv2i64(<vscale x 2 x i64> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
3268 ; CHECK-LABEL: test_vlseg3ff_mask_nxv2i64:
3269 ; CHECK: # %bb.0: # %entry
3270 ; CHECK-NEXT: vmv2r.v v6, v8
3271 ; CHECK-NEXT: vmv2r.v v10, v8
3272 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu
3273 ; CHECK-NEXT: vlseg3e64ff.v v6, (a0), v0.t
3274 ; CHECK-NEXT: csrr a0, vl
3275 ; CHECK-NEXT: sd a0, 0(a2)
3278 %0 = tail call {<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>, i64} @llvm.riscv.vlseg3ff.mask.nxv2i64(<vscale x 2 x i64> %val,<vscale x 2 x i64> %val,<vscale x 2 x i64> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
3279 %1 = extractvalue {<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>, i64} %0, 1
3280 %2 = extractvalue {<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>, i64} %0, 3
3281 store i64 %2, ptr %outvl
3282 ret <vscale x 2 x i64> %1
3285 declare {<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>, i64} @llvm.riscv.vlseg4ff.nxv2i64(<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>, ptr , i64)
3286 declare {<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>, i64} @llvm.riscv.vlseg4ff.mask.nxv2i64(<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>, ptr, <vscale x 2 x i1>, i64, i64)
3288 define <vscale x 2 x i64> @test_vlseg4ff_nxv2i64(ptr %base, i64 %vl, ptr %outvl) {
3289 ; CHECK-LABEL: test_vlseg4ff_nxv2i64:
3290 ; CHECK: # %bb.0: # %entry
3291 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
3292 ; CHECK-NEXT: vlseg4e64ff.v v6, (a0)
3293 ; CHECK-NEXT: csrr a0, vl
3294 ; CHECK-NEXT: sd a0, 0(a2)
3297 %0 = tail call {<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>, i64} @llvm.riscv.vlseg4ff.nxv2i64(<vscale x 2 x i64> undef, <vscale x 2 x i64> undef, <vscale x 2 x i64> undef, <vscale x 2 x i64> undef, ptr %base, i64 %vl)
3298 %1 = extractvalue {<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>, i64} %0, 1
3299 %2 = extractvalue {<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>, i64} %0, 4
3300 store i64 %2, ptr %outvl
3301 ret <vscale x 2 x i64> %1
3304 define <vscale x 2 x i64> @test_vlseg4ff_mask_nxv2i64(<vscale x 2 x i64> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
3305 ; CHECK-LABEL: test_vlseg4ff_mask_nxv2i64:
3306 ; CHECK: # %bb.0: # %entry
3307 ; CHECK-NEXT: vmv2r.v v6, v8
3308 ; CHECK-NEXT: vmv2r.v v10, v8
3309 ; CHECK-NEXT: vmv2r.v v12, v8
3310 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu
3311 ; CHECK-NEXT: vlseg4e64ff.v v6, (a0), v0.t
3312 ; CHECK-NEXT: csrr a0, vl
3313 ; CHECK-NEXT: sd a0, 0(a2)
3316 %0 = tail call {<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>, i64} @llvm.riscv.vlseg4ff.mask.nxv2i64(<vscale x 2 x i64> %val,<vscale x 2 x i64> %val,<vscale x 2 x i64> %val,<vscale x 2 x i64> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
3317 %1 = extractvalue {<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>, i64} %0, 1
3318 %2 = extractvalue {<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>,<vscale x 2 x i64>, i64} %0, 4
3319 store i64 %2, ptr %outvl
3320 ret <vscale x 2 x i64> %1
3323 declare {<vscale x 16 x half>,<vscale x 16 x half>, i64} @llvm.riscv.vlseg2ff.nxv16f16(<vscale x 16 x half>,<vscale x 16 x half>, ptr , i64)
3324 declare {<vscale x 16 x half>,<vscale x 16 x half>, i64} @llvm.riscv.vlseg2ff.mask.nxv16f16(<vscale x 16 x half>,<vscale x 16 x half>, ptr, <vscale x 16 x i1>, i64, i64)
3326 define <vscale x 16 x half> @test_vlseg2ff_nxv16f16(ptr %base, i64 %vl, ptr %outvl) {
3327 ; CHECK-LABEL: test_vlseg2ff_nxv16f16:
3328 ; CHECK: # %bb.0: # %entry
3329 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, ma
3330 ; CHECK-NEXT: vlseg2e16ff.v v4, (a0)
3331 ; CHECK-NEXT: csrr a0, vl
3332 ; CHECK-NEXT: sd a0, 0(a2)
3335 %0 = tail call {<vscale x 16 x half>,<vscale x 16 x half>, i64} @llvm.riscv.vlseg2ff.nxv16f16(<vscale x 16 x half> undef, <vscale x 16 x half> undef, ptr %base, i64 %vl)
3336 %1 = extractvalue {<vscale x 16 x half>,<vscale x 16 x half>, i64} %0, 1
3337 %2 = extractvalue {<vscale x 16 x half>,<vscale x 16 x half>, i64} %0, 2
3338 store i64 %2, ptr %outvl
3339 ret <vscale x 16 x half> %1
3342 define <vscale x 16 x half> @test_vlseg2ff_mask_nxv16f16(<vscale x 16 x half> %val, ptr %base, i64 %vl, <vscale x 16 x i1> %mask, ptr %outvl) {
3343 ; CHECK-LABEL: test_vlseg2ff_mask_nxv16f16:
3344 ; CHECK: # %bb.0: # %entry
3345 ; CHECK-NEXT: vmv4r.v v4, v8
3346 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu
3347 ; CHECK-NEXT: vlseg2e16ff.v v4, (a0), v0.t
3348 ; CHECK-NEXT: csrr a0, vl
3349 ; CHECK-NEXT: sd a0, 0(a2)
3352 %0 = tail call {<vscale x 16 x half>,<vscale x 16 x half>, i64} @llvm.riscv.vlseg2ff.mask.nxv16f16(<vscale x 16 x half> %val,<vscale x 16 x half> %val, ptr %base, <vscale x 16 x i1> %mask, i64 %vl, i64 1)
3353 %1 = extractvalue {<vscale x 16 x half>,<vscale x 16 x half>, i64} %0, 1
3354 %2 = extractvalue {<vscale x 16 x half>,<vscale x 16 x half>, i64} %0, 2
3355 store i64 %2, ptr %outvl
3356 ret <vscale x 16 x half> %1
3359 declare {<vscale x 4 x double>,<vscale x 4 x double>, i64} @llvm.riscv.vlseg2ff.nxv4f64(<vscale x 4 x double>,<vscale x 4 x double>, ptr , i64)
3360 declare {<vscale x 4 x double>,<vscale x 4 x double>, i64} @llvm.riscv.vlseg2ff.mask.nxv4f64(<vscale x 4 x double>,<vscale x 4 x double>, ptr, <vscale x 4 x i1>, i64, i64)
3362 define <vscale x 4 x double> @test_vlseg2ff_nxv4f64(ptr %base, i64 %vl, ptr %outvl) {
3363 ; CHECK-LABEL: test_vlseg2ff_nxv4f64:
3364 ; CHECK: # %bb.0: # %entry
3365 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, ma
3366 ; CHECK-NEXT: vlseg2e64ff.v v4, (a0)
3367 ; CHECK-NEXT: csrr a0, vl
3368 ; CHECK-NEXT: sd a0, 0(a2)
3371 %0 = tail call {<vscale x 4 x double>,<vscale x 4 x double>, i64} @llvm.riscv.vlseg2ff.nxv4f64(<vscale x 4 x double> undef, <vscale x 4 x double> undef, ptr %base, i64 %vl)
3372 %1 = extractvalue {<vscale x 4 x double>,<vscale x 4 x double>, i64} %0, 1
3373 %2 = extractvalue {<vscale x 4 x double>,<vscale x 4 x double>, i64} %0, 2
3374 store i64 %2, ptr %outvl
3375 ret <vscale x 4 x double> %1
3378 define <vscale x 4 x double> @test_vlseg2ff_mask_nxv4f64(<vscale x 4 x double> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
3379 ; CHECK-LABEL: test_vlseg2ff_mask_nxv4f64:
3380 ; CHECK: # %bb.0: # %entry
3381 ; CHECK-NEXT: vmv4r.v v4, v8
3382 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, mu
3383 ; CHECK-NEXT: vlseg2e64ff.v v4, (a0), v0.t
3384 ; CHECK-NEXT: csrr a0, vl
3385 ; CHECK-NEXT: sd a0, 0(a2)
3388 %0 = tail call {<vscale x 4 x double>,<vscale x 4 x double>, i64} @llvm.riscv.vlseg2ff.mask.nxv4f64(<vscale x 4 x double> %val,<vscale x 4 x double> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
3389 %1 = extractvalue {<vscale x 4 x double>,<vscale x 4 x double>, i64} %0, 1
3390 %2 = extractvalue {<vscale x 4 x double>,<vscale x 4 x double>, i64} %0, 2
3391 store i64 %2, ptr %outvl
3392 ret <vscale x 4 x double> %1
3395 declare {<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg2ff.nxv1f64(<vscale x 1 x double>,<vscale x 1 x double>, ptr , i64)
3396 declare {<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg2ff.mask.nxv1f64(<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i1>, i64, i64)
3398 define <vscale x 1 x double> @test_vlseg2ff_nxv1f64(ptr %base, i64 %vl, ptr %outvl) {
3399 ; CHECK-LABEL: test_vlseg2ff_nxv1f64:
3400 ; CHECK: # %bb.0: # %entry
3401 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
3402 ; CHECK-NEXT: vlseg2e64ff.v v7, (a0)
3403 ; CHECK-NEXT: csrr a0, vl
3404 ; CHECK-NEXT: sd a0, 0(a2)
3407 %0 = tail call {<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg2ff.nxv1f64(<vscale x 1 x double> undef, <vscale x 1 x double> undef, ptr %base, i64 %vl)
3408 %1 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 1
3409 %2 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 2
3410 store i64 %2, ptr %outvl
3411 ret <vscale x 1 x double> %1
3414 define <vscale x 1 x double> @test_vlseg2ff_mask_nxv1f64(<vscale x 1 x double> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
3415 ; CHECK-LABEL: test_vlseg2ff_mask_nxv1f64:
3416 ; CHECK: # %bb.0: # %entry
3417 ; CHECK-NEXT: vmv1r.v v7, v8
3418 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu
3419 ; CHECK-NEXT: vlseg2e64ff.v v7, (a0), v0.t
3420 ; CHECK-NEXT: csrr a0, vl
3421 ; CHECK-NEXT: sd a0, 0(a2)
3424 %0 = tail call {<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg2ff.mask.nxv1f64(<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
3425 %1 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 1
3426 %2 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 2
3427 store i64 %2, ptr %outvl
3428 ret <vscale x 1 x double> %1
3431 declare {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg3ff.nxv1f64(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr , i64)
3432 declare {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg3ff.mask.nxv1f64(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i1>, i64, i64)
3434 define <vscale x 1 x double> @test_vlseg3ff_nxv1f64(ptr %base, i64 %vl, ptr %outvl) {
3435 ; CHECK-LABEL: test_vlseg3ff_nxv1f64:
3436 ; CHECK: # %bb.0: # %entry
3437 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
3438 ; CHECK-NEXT: vlseg3e64ff.v v7, (a0)
3439 ; CHECK-NEXT: csrr a0, vl
3440 ; CHECK-NEXT: sd a0, 0(a2)
3443 %0 = tail call {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg3ff.nxv1f64(<vscale x 1 x double> undef, <vscale x 1 x double> undef, <vscale x 1 x double> undef, ptr %base, i64 %vl)
3444 %1 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 1
3445 %2 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 3
3446 store i64 %2, ptr %outvl
3447 ret <vscale x 1 x double> %1
3450 define <vscale x 1 x double> @test_vlseg3ff_mask_nxv1f64(<vscale x 1 x double> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
3451 ; CHECK-LABEL: test_vlseg3ff_mask_nxv1f64:
3452 ; CHECK: # %bb.0: # %entry
3453 ; CHECK-NEXT: vmv1r.v v7, v8
3454 ; CHECK-NEXT: vmv1r.v v9, v8
3455 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu
3456 ; CHECK-NEXT: vlseg3e64ff.v v7, (a0), v0.t
3457 ; CHECK-NEXT: csrr a0, vl
3458 ; CHECK-NEXT: sd a0, 0(a2)
3461 %0 = tail call {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg3ff.mask.nxv1f64(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
3462 %1 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 1
3463 %2 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 3
3464 store i64 %2, ptr %outvl
3465 ret <vscale x 1 x double> %1
3468 declare {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg4ff.nxv1f64(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr , i64)
3469 declare {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg4ff.mask.nxv1f64(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i1>, i64, i64)
3471 define <vscale x 1 x double> @test_vlseg4ff_nxv1f64(ptr %base, i64 %vl, ptr %outvl) {
3472 ; CHECK-LABEL: test_vlseg4ff_nxv1f64:
3473 ; CHECK: # %bb.0: # %entry
3474 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
3475 ; CHECK-NEXT: vlseg4e64ff.v v7, (a0)
3476 ; CHECK-NEXT: csrr a0, vl
3477 ; CHECK-NEXT: sd a0, 0(a2)
3480 %0 = tail call {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg4ff.nxv1f64(<vscale x 1 x double> undef, <vscale x 1 x double> undef, <vscale x 1 x double> undef, <vscale x 1 x double> undef, ptr %base, i64 %vl)
3481 %1 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 1
3482 %2 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 4
3483 store i64 %2, ptr %outvl
3484 ret <vscale x 1 x double> %1
3487 define <vscale x 1 x double> @test_vlseg4ff_mask_nxv1f64(<vscale x 1 x double> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
3488 ; CHECK-LABEL: test_vlseg4ff_mask_nxv1f64:
3489 ; CHECK: # %bb.0: # %entry
3490 ; CHECK-NEXT: vmv1r.v v7, v8
3491 ; CHECK-NEXT: vmv1r.v v9, v8
3492 ; CHECK-NEXT: vmv1r.v v10, v8
3493 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu
3494 ; CHECK-NEXT: vlseg4e64ff.v v7, (a0), v0.t
3495 ; CHECK-NEXT: csrr a0, vl
3496 ; CHECK-NEXT: sd a0, 0(a2)
3499 %0 = tail call {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg4ff.mask.nxv1f64(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
3500 %1 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 1
3501 %2 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 4
3502 store i64 %2, ptr %outvl
3503 ret <vscale x 1 x double> %1
3506 declare {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg5ff.nxv1f64(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr , i64)
3507 declare {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg5ff.mask.nxv1f64(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i1>, i64, i64)
3509 define <vscale x 1 x double> @test_vlseg5ff_nxv1f64(ptr %base, i64 %vl, ptr %outvl) {
3510 ; CHECK-LABEL: test_vlseg5ff_nxv1f64:
3511 ; CHECK: # %bb.0: # %entry
3512 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
3513 ; CHECK-NEXT: vlseg5e64ff.v v7, (a0)
3514 ; CHECK-NEXT: csrr a0, vl
3515 ; CHECK-NEXT: sd a0, 0(a2)
3518 %0 = tail call {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg5ff.nxv1f64(<vscale x 1 x double> undef, <vscale x 1 x double> undef, <vscale x 1 x double> undef, <vscale x 1 x double> undef, <vscale x 1 x double> undef, ptr %base, i64 %vl)
3519 %1 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 1
3520 %2 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 5
3521 store i64 %2, ptr %outvl
3522 ret <vscale x 1 x double> %1
3525 define <vscale x 1 x double> @test_vlseg5ff_mask_nxv1f64(<vscale x 1 x double> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
3526 ; CHECK-LABEL: test_vlseg5ff_mask_nxv1f64:
3527 ; CHECK: # %bb.0: # %entry
3528 ; CHECK-NEXT: vmv1r.v v7, v8
3529 ; CHECK-NEXT: vmv1r.v v9, v8
3530 ; CHECK-NEXT: vmv1r.v v10, v8
3531 ; CHECK-NEXT: vmv1r.v v11, v8
3532 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu
3533 ; CHECK-NEXT: vlseg5e64ff.v v7, (a0), v0.t
3534 ; CHECK-NEXT: csrr a0, vl
3535 ; CHECK-NEXT: sd a0, 0(a2)
3538 %0 = tail call {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg5ff.mask.nxv1f64(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
3539 %1 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 1
3540 %2 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 5
3541 store i64 %2, ptr %outvl
3542 ret <vscale x 1 x double> %1
3545 declare {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg6ff.nxv1f64(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr , i64)
3546 declare {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg6ff.mask.nxv1f64(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i1>, i64, i64)
3548 define <vscale x 1 x double> @test_vlseg6ff_nxv1f64(ptr %base, i64 %vl, ptr %outvl) {
3549 ; CHECK-LABEL: test_vlseg6ff_nxv1f64:
3550 ; CHECK: # %bb.0: # %entry
3551 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
3552 ; CHECK-NEXT: vlseg6e64ff.v v7, (a0)
3553 ; CHECK-NEXT: csrr a0, vl
3554 ; CHECK-NEXT: sd a0, 0(a2)
3557 %0 = tail call {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg6ff.nxv1f64(<vscale x 1 x double> undef, <vscale x 1 x double> undef, <vscale x 1 x double> undef, <vscale x 1 x double> undef, <vscale x 1 x double> undef, <vscale x 1 x double> undef, ptr %base, i64 %vl)
3558 %1 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 1
3559 %2 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 6
3560 store i64 %2, ptr %outvl
3561 ret <vscale x 1 x double> %1
3564 define <vscale x 1 x double> @test_vlseg6ff_mask_nxv1f64(<vscale x 1 x double> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
3565 ; CHECK-LABEL: test_vlseg6ff_mask_nxv1f64:
3566 ; CHECK: # %bb.0: # %entry
3567 ; CHECK-NEXT: vmv1r.v v7, v8
3568 ; CHECK-NEXT: vmv1r.v v9, v8
3569 ; CHECK-NEXT: vmv1r.v v10, v8
3570 ; CHECK-NEXT: vmv1r.v v11, v8
3571 ; CHECK-NEXT: vmv1r.v v12, v8
3572 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu
3573 ; CHECK-NEXT: vlseg6e64ff.v v7, (a0), v0.t
3574 ; CHECK-NEXT: csrr a0, vl
3575 ; CHECK-NEXT: sd a0, 0(a2)
3578 %0 = tail call {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg6ff.mask.nxv1f64(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
3579 %1 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 1
3580 %2 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 6
3581 store i64 %2, ptr %outvl
3582 ret <vscale x 1 x double> %1
3585 declare {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg7ff.nxv1f64(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr , i64)
3586 declare {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg7ff.mask.nxv1f64(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i1>, i64, i64)
3588 define <vscale x 1 x double> @test_vlseg7ff_nxv1f64(ptr %base, i64 %vl, ptr %outvl) {
3589 ; CHECK-LABEL: test_vlseg7ff_nxv1f64:
3590 ; CHECK: # %bb.0: # %entry
3591 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
3592 ; CHECK-NEXT: vlseg7e64ff.v v7, (a0)
3593 ; CHECK-NEXT: csrr a0, vl
3594 ; CHECK-NEXT: sd a0, 0(a2)
3597 %0 = tail call {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg7ff.nxv1f64(<vscale x 1 x double> undef, <vscale x 1 x double> undef, <vscale x 1 x double> undef, <vscale x 1 x double> undef, <vscale x 1 x double> undef, <vscale x 1 x double> undef, <vscale x 1 x double> undef, ptr %base, i64 %vl)
3598 %1 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 1
3599 %2 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 7
3600 store i64 %2, ptr %outvl
3601 ret <vscale x 1 x double> %1
3604 define <vscale x 1 x double> @test_vlseg7ff_mask_nxv1f64(<vscale x 1 x double> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
3605 ; CHECK-LABEL: test_vlseg7ff_mask_nxv1f64:
3606 ; CHECK: # %bb.0: # %entry
3607 ; CHECK-NEXT: vmv1r.v v7, v8
3608 ; CHECK-NEXT: vmv1r.v v9, v8
3609 ; CHECK-NEXT: vmv1r.v v10, v8
3610 ; CHECK-NEXT: vmv1r.v v11, v8
3611 ; CHECK-NEXT: vmv1r.v v12, v8
3612 ; CHECK-NEXT: vmv1r.v v13, v8
3613 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu
3614 ; CHECK-NEXT: vlseg7e64ff.v v7, (a0), v0.t
3615 ; CHECK-NEXT: csrr a0, vl
3616 ; CHECK-NEXT: sd a0, 0(a2)
3619 %0 = tail call {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg7ff.mask.nxv1f64(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
3620 %1 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 1
3621 %2 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 7
3622 store i64 %2, ptr %outvl
3623 ret <vscale x 1 x double> %1
3626 declare {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg8ff.nxv1f64(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr , i64)
3627 declare {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg8ff.mask.nxv1f64(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i1>, i64, i64)
3629 define <vscale x 1 x double> @test_vlseg8ff_nxv1f64(ptr %base, i64 %vl, ptr %outvl) {
3630 ; CHECK-LABEL: test_vlseg8ff_nxv1f64:
3631 ; CHECK: # %bb.0: # %entry
3632 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
3633 ; CHECK-NEXT: vlseg8e64ff.v v7, (a0)
3634 ; CHECK-NEXT: csrr a0, vl
3635 ; CHECK-NEXT: sd a0, 0(a2)
3638 %0 = tail call {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg8ff.nxv1f64(<vscale x 1 x double> undef, <vscale x 1 x double> undef ,<vscale x 1 x double> undef ,<vscale x 1 x double> undef, <vscale x 1 x double> undef ,<vscale x 1 x double> undef, <vscale x 1 x double> undef, <vscale x 1 x double> undef, ptr %base, i64 %vl)
3639 %1 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 1
3640 %2 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 8
3641 store i64 %2, ptr %outvl
3642 ret <vscale x 1 x double> %1
3645 define <vscale x 1 x double> @test_vlseg8ff_mask_nxv1f64(<vscale x 1 x double> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
3646 ; CHECK-LABEL: test_vlseg8ff_mask_nxv1f64:
3647 ; CHECK: # %bb.0: # %entry
3648 ; CHECK-NEXT: vmv1r.v v7, v8
3649 ; CHECK-NEXT: vmv1r.v v9, v8
3650 ; CHECK-NEXT: vmv1r.v v10, v8
3651 ; CHECK-NEXT: vmv1r.v v11, v8
3652 ; CHECK-NEXT: vmv1r.v v12, v8
3653 ; CHECK-NEXT: vmv1r.v v13, v8
3654 ; CHECK-NEXT: vmv1r.v v14, v8
3655 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu
3656 ; CHECK-NEXT: vlseg8e64ff.v v7, (a0), v0.t
3657 ; CHECK-NEXT: csrr a0, vl
3658 ; CHECK-NEXT: sd a0, 0(a2)
3661 %0 = tail call {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} @llvm.riscv.vlseg8ff.mask.nxv1f64(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
3662 %1 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 1
3663 %2 = extractvalue {<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, i64} %0, 8
3664 store i64 %2, ptr %outvl
3665 ret <vscale x 1 x double> %1
3668 declare {<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg2ff.nxv2f32(<vscale x 2 x float>,<vscale x 2 x float>, ptr , i64)
3669 declare {<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg2ff.mask.nxv2f32(<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i1>, i64, i64)
3671 define <vscale x 2 x float> @test_vlseg2ff_nxv2f32(ptr %base, i64 %vl, ptr %outvl) {
3672 ; CHECK-LABEL: test_vlseg2ff_nxv2f32:
3673 ; CHECK: # %bb.0: # %entry
3674 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
3675 ; CHECK-NEXT: vlseg2e32ff.v v7, (a0)
3676 ; CHECK-NEXT: csrr a0, vl
3677 ; CHECK-NEXT: sd a0, 0(a2)
3680 %0 = tail call {<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg2ff.nxv2f32(<vscale x 2 x float> undef, <vscale x 2 x float> undef, ptr %base, i64 %vl)
3681 %1 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 1
3682 %2 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 2
3683 store i64 %2, ptr %outvl
3684 ret <vscale x 2 x float> %1
3687 define <vscale x 2 x float> @test_vlseg2ff_mask_nxv2f32(<vscale x 2 x float> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
3688 ; CHECK-LABEL: test_vlseg2ff_mask_nxv2f32:
3689 ; CHECK: # %bb.0: # %entry
3690 ; CHECK-NEXT: vmv1r.v v7, v8
3691 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu
3692 ; CHECK-NEXT: vlseg2e32ff.v v7, (a0), v0.t
3693 ; CHECK-NEXT: csrr a0, vl
3694 ; CHECK-NEXT: sd a0, 0(a2)
3697 %0 = tail call {<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg2ff.mask.nxv2f32(<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
3698 %1 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 1
3699 %2 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 2
3700 store i64 %2, ptr %outvl
3701 ret <vscale x 2 x float> %1
3704 declare {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg3ff.nxv2f32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr , i64)
3705 declare {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg3ff.mask.nxv2f32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i1>, i64, i64)
3707 define <vscale x 2 x float> @test_vlseg3ff_nxv2f32(ptr %base, i64 %vl, ptr %outvl) {
3708 ; CHECK-LABEL: test_vlseg3ff_nxv2f32:
3709 ; CHECK: # %bb.0: # %entry
3710 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
3711 ; CHECK-NEXT: vlseg3e32ff.v v7, (a0)
3712 ; CHECK-NEXT: csrr a0, vl
3713 ; CHECK-NEXT: sd a0, 0(a2)
3716 %0 = tail call {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg3ff.nxv2f32(<vscale x 2 x float> undef, <vscale x 2 x float> undef, <vscale x 2 x float> undef, ptr %base, i64 %vl)
3717 %1 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 1
3718 %2 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 3
3719 store i64 %2, ptr %outvl
3720 ret <vscale x 2 x float> %1
3723 define <vscale x 2 x float> @test_vlseg3ff_mask_nxv2f32(<vscale x 2 x float> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
3724 ; CHECK-LABEL: test_vlseg3ff_mask_nxv2f32:
3725 ; CHECK: # %bb.0: # %entry
3726 ; CHECK-NEXT: vmv1r.v v7, v8
3727 ; CHECK-NEXT: vmv1r.v v9, v8
3728 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu
3729 ; CHECK-NEXT: vlseg3e32ff.v v7, (a0), v0.t
3730 ; CHECK-NEXT: csrr a0, vl
3731 ; CHECK-NEXT: sd a0, 0(a2)
3734 %0 = tail call {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg3ff.mask.nxv2f32(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
3735 %1 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 1
3736 %2 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 3
3737 store i64 %2, ptr %outvl
3738 ret <vscale x 2 x float> %1
3741 declare {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg4ff.nxv2f32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr , i64)
3742 declare {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg4ff.mask.nxv2f32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i1>, i64, i64)
3744 define <vscale x 2 x float> @test_vlseg4ff_nxv2f32(ptr %base, i64 %vl, ptr %outvl) {
3745 ; CHECK-LABEL: test_vlseg4ff_nxv2f32:
3746 ; CHECK: # %bb.0: # %entry
3747 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
3748 ; CHECK-NEXT: vlseg4e32ff.v v7, (a0)
3749 ; CHECK-NEXT: csrr a0, vl
3750 ; CHECK-NEXT: sd a0, 0(a2)
3753 %0 = tail call {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg4ff.nxv2f32(<vscale x 2 x float> undef, <vscale x 2 x float> undef, <vscale x 2 x float> undef, <vscale x 2 x float> undef, ptr %base, i64 %vl)
3754 %1 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 1
3755 %2 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 4
3756 store i64 %2, ptr %outvl
3757 ret <vscale x 2 x float> %1
3760 define <vscale x 2 x float> @test_vlseg4ff_mask_nxv2f32(<vscale x 2 x float> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
3761 ; CHECK-LABEL: test_vlseg4ff_mask_nxv2f32:
3762 ; CHECK: # %bb.0: # %entry
3763 ; CHECK-NEXT: vmv1r.v v7, v8
3764 ; CHECK-NEXT: vmv1r.v v9, v8
3765 ; CHECK-NEXT: vmv1r.v v10, v8
3766 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu
3767 ; CHECK-NEXT: vlseg4e32ff.v v7, (a0), v0.t
3768 ; CHECK-NEXT: csrr a0, vl
3769 ; CHECK-NEXT: sd a0, 0(a2)
3772 %0 = tail call {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg4ff.mask.nxv2f32(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
3773 %1 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 1
3774 %2 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 4
3775 store i64 %2, ptr %outvl
3776 ret <vscale x 2 x float> %1
3779 declare {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg5ff.nxv2f32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr , i64)
3780 declare {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg5ff.mask.nxv2f32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i1>, i64, i64)
3782 define <vscale x 2 x float> @test_vlseg5ff_nxv2f32(ptr %base, i64 %vl, ptr %outvl) {
3783 ; CHECK-LABEL: test_vlseg5ff_nxv2f32:
3784 ; CHECK: # %bb.0: # %entry
3785 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
3786 ; CHECK-NEXT: vlseg5e32ff.v v7, (a0)
3787 ; CHECK-NEXT: csrr a0, vl
3788 ; CHECK-NEXT: sd a0, 0(a2)
3791 %0 = tail call {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg5ff.nxv2f32(<vscale x 2 x float> undef, <vscale x 2 x float> undef, <vscale x 2 x float> undef, <vscale x 2 x float> undef, <vscale x 2 x float> undef, ptr %base, i64 %vl)
3792 %1 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 1
3793 %2 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 5
3794 store i64 %2, ptr %outvl
3795 ret <vscale x 2 x float> %1
3798 define <vscale x 2 x float> @test_vlseg5ff_mask_nxv2f32(<vscale x 2 x float> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
3799 ; CHECK-LABEL: test_vlseg5ff_mask_nxv2f32:
3800 ; CHECK: # %bb.0: # %entry
3801 ; CHECK-NEXT: vmv1r.v v7, v8
3802 ; CHECK-NEXT: vmv1r.v v9, v8
3803 ; CHECK-NEXT: vmv1r.v v10, v8
3804 ; CHECK-NEXT: vmv1r.v v11, v8
3805 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu
3806 ; CHECK-NEXT: vlseg5e32ff.v v7, (a0), v0.t
3807 ; CHECK-NEXT: csrr a0, vl
3808 ; CHECK-NEXT: sd a0, 0(a2)
3811 %0 = tail call {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg5ff.mask.nxv2f32(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
3812 %1 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 1
3813 %2 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 5
3814 store i64 %2, ptr %outvl
3815 ret <vscale x 2 x float> %1
3818 declare {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg6ff.nxv2f32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr , i64)
3819 declare {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg6ff.mask.nxv2f32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i1>, i64, i64)
3821 define <vscale x 2 x float> @test_vlseg6ff_nxv2f32(ptr %base, i64 %vl, ptr %outvl) {
3822 ; CHECK-LABEL: test_vlseg6ff_nxv2f32:
3823 ; CHECK: # %bb.0: # %entry
3824 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
3825 ; CHECK-NEXT: vlseg6e32ff.v v7, (a0)
3826 ; CHECK-NEXT: csrr a0, vl
3827 ; CHECK-NEXT: sd a0, 0(a2)
3830 %0 = tail call {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg6ff.nxv2f32(<vscale x 2 x float> undef, <vscale x 2 x float> undef, <vscale x 2 x float> undef, <vscale x 2 x float> undef, <vscale x 2 x float> undef, <vscale x 2 x float> undef, ptr %base, i64 %vl)
3831 %1 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 1
3832 %2 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 6
3833 store i64 %2, ptr %outvl
3834 ret <vscale x 2 x float> %1
3837 define <vscale x 2 x float> @test_vlseg6ff_mask_nxv2f32(<vscale x 2 x float> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
3838 ; CHECK-LABEL: test_vlseg6ff_mask_nxv2f32:
3839 ; CHECK: # %bb.0: # %entry
3840 ; CHECK-NEXT: vmv1r.v v7, v8
3841 ; CHECK-NEXT: vmv1r.v v9, v8
3842 ; CHECK-NEXT: vmv1r.v v10, v8
3843 ; CHECK-NEXT: vmv1r.v v11, v8
3844 ; CHECK-NEXT: vmv1r.v v12, v8
3845 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu
3846 ; CHECK-NEXT: vlseg6e32ff.v v7, (a0), v0.t
3847 ; CHECK-NEXT: csrr a0, vl
3848 ; CHECK-NEXT: sd a0, 0(a2)
3851 %0 = tail call {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg6ff.mask.nxv2f32(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
3852 %1 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 1
3853 %2 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 6
3854 store i64 %2, ptr %outvl
3855 ret <vscale x 2 x float> %1
3858 declare {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg7ff.nxv2f32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr , i64)
3859 declare {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg7ff.mask.nxv2f32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i1>, i64, i64)
3861 define <vscale x 2 x float> @test_vlseg7ff_nxv2f32(ptr %base, i64 %vl, ptr %outvl) {
3862 ; CHECK-LABEL: test_vlseg7ff_nxv2f32:
3863 ; CHECK: # %bb.0: # %entry
3864 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
3865 ; CHECK-NEXT: vlseg7e32ff.v v7, (a0)
3866 ; CHECK-NEXT: csrr a0, vl
3867 ; CHECK-NEXT: sd a0, 0(a2)
3870 %0 = tail call {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg7ff.nxv2f32(<vscale x 2 x float> undef, <vscale x 2 x float> undef, <vscale x 2 x float> undef, <vscale x 2 x float> undef, <vscale x 2 x float> undef, <vscale x 2 x float> undef, <vscale x 2 x float> undef, ptr %base, i64 %vl)
3871 %1 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 1
3872 %2 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 7
3873 store i64 %2, ptr %outvl
3874 ret <vscale x 2 x float> %1
3877 define <vscale x 2 x float> @test_vlseg7ff_mask_nxv2f32(<vscale x 2 x float> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
3878 ; CHECK-LABEL: test_vlseg7ff_mask_nxv2f32:
3879 ; CHECK: # %bb.0: # %entry
3880 ; CHECK-NEXT: vmv1r.v v7, v8
3881 ; CHECK-NEXT: vmv1r.v v9, v8
3882 ; CHECK-NEXT: vmv1r.v v10, v8
3883 ; CHECK-NEXT: vmv1r.v v11, v8
3884 ; CHECK-NEXT: vmv1r.v v12, v8
3885 ; CHECK-NEXT: vmv1r.v v13, v8
3886 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu
3887 ; CHECK-NEXT: vlseg7e32ff.v v7, (a0), v0.t
3888 ; CHECK-NEXT: csrr a0, vl
3889 ; CHECK-NEXT: sd a0, 0(a2)
3892 %0 = tail call {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg7ff.mask.nxv2f32(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
3893 %1 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 1
3894 %2 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 7
3895 store i64 %2, ptr %outvl
3896 ret <vscale x 2 x float> %1
3899 declare {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg8ff.nxv2f32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr , i64)
3900 declare {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg8ff.mask.nxv2f32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i1>, i64, i64)
3902 define <vscale x 2 x float> @test_vlseg8ff_nxv2f32(ptr %base, i64 %vl, ptr %outvl) {
3903 ; CHECK-LABEL: test_vlseg8ff_nxv2f32:
3904 ; CHECK: # %bb.0: # %entry
3905 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
3906 ; CHECK-NEXT: vlseg8e32ff.v v7, (a0)
3907 ; CHECK-NEXT: csrr a0, vl
3908 ; CHECK-NEXT: sd a0, 0(a2)
3911 %0 = tail call {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg8ff.nxv2f32(<vscale x 2 x float> undef, <vscale x 2 x float> undef ,<vscale x 2 x float> undef ,<vscale x 2 x float> undef, <vscale x 2 x float> undef ,<vscale x 2 x float> undef, <vscale x 2 x float> undef, <vscale x 2 x float> undef, ptr %base, i64 %vl)
3912 %1 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 1
3913 %2 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 8
3914 store i64 %2, ptr %outvl
3915 ret <vscale x 2 x float> %1
3918 define <vscale x 2 x float> @test_vlseg8ff_mask_nxv2f32(<vscale x 2 x float> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
3919 ; CHECK-LABEL: test_vlseg8ff_mask_nxv2f32:
3920 ; CHECK: # %bb.0: # %entry
3921 ; CHECK-NEXT: vmv1r.v v7, v8
3922 ; CHECK-NEXT: vmv1r.v v9, v8
3923 ; CHECK-NEXT: vmv1r.v v10, v8
3924 ; CHECK-NEXT: vmv1r.v v11, v8
3925 ; CHECK-NEXT: vmv1r.v v12, v8
3926 ; CHECK-NEXT: vmv1r.v v13, v8
3927 ; CHECK-NEXT: vmv1r.v v14, v8
3928 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, mu
3929 ; CHECK-NEXT: vlseg8e32ff.v v7, (a0), v0.t
3930 ; CHECK-NEXT: csrr a0, vl
3931 ; CHECK-NEXT: sd a0, 0(a2)
3934 %0 = tail call {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} @llvm.riscv.vlseg8ff.mask.nxv2f32(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
3935 %1 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 1
3936 %2 = extractvalue {<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, i64} %0, 8
3937 store i64 %2, ptr %outvl
3938 ret <vscale x 2 x float> %1
3941 declare {<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg2ff.nxv1f16(<vscale x 1 x half>,<vscale x 1 x half>, ptr , i64)
3942 declare {<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg2ff.mask.nxv1f16(<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i1>, i64, i64)
3944 define <vscale x 1 x half> @test_vlseg2ff_nxv1f16(ptr %base, i64 %vl, ptr %outvl) {
3945 ; CHECK-LABEL: test_vlseg2ff_nxv1f16:
3946 ; CHECK: # %bb.0: # %entry
3947 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
3948 ; CHECK-NEXT: vlseg2e16ff.v v7, (a0)
3949 ; CHECK-NEXT: csrr a0, vl
3950 ; CHECK-NEXT: sd a0, 0(a2)
3953 %0 = tail call {<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg2ff.nxv1f16(<vscale x 1 x half> undef, <vscale x 1 x half> undef, ptr %base, i64 %vl)
3954 %1 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 1
3955 %2 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 2
3956 store i64 %2, ptr %outvl
3957 ret <vscale x 1 x half> %1
3960 define <vscale x 1 x half> @test_vlseg2ff_mask_nxv1f16(<vscale x 1 x half> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
3961 ; CHECK-LABEL: test_vlseg2ff_mask_nxv1f16:
3962 ; CHECK: # %bb.0: # %entry
3963 ; CHECK-NEXT: vmv1r.v v7, v8
3964 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu
3965 ; CHECK-NEXT: vlseg2e16ff.v v7, (a0), v0.t
3966 ; CHECK-NEXT: csrr a0, vl
3967 ; CHECK-NEXT: sd a0, 0(a2)
3970 %0 = tail call {<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg2ff.mask.nxv1f16(<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
3971 %1 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 1
3972 %2 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 2
3973 store i64 %2, ptr %outvl
3974 ret <vscale x 1 x half> %1
3977 declare {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg3ff.nxv1f16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr , i64)
3978 declare {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg3ff.mask.nxv1f16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i1>, i64, i64)
3980 define <vscale x 1 x half> @test_vlseg3ff_nxv1f16(ptr %base, i64 %vl, ptr %outvl) {
3981 ; CHECK-LABEL: test_vlseg3ff_nxv1f16:
3982 ; CHECK: # %bb.0: # %entry
3983 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
3984 ; CHECK-NEXT: vlseg3e16ff.v v7, (a0)
3985 ; CHECK-NEXT: csrr a0, vl
3986 ; CHECK-NEXT: sd a0, 0(a2)
3989 %0 = tail call {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg3ff.nxv1f16(<vscale x 1 x half> undef, <vscale x 1 x half> undef, <vscale x 1 x half> undef, ptr %base, i64 %vl)
3990 %1 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 1
3991 %2 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 3
3992 store i64 %2, ptr %outvl
3993 ret <vscale x 1 x half> %1
3996 define <vscale x 1 x half> @test_vlseg3ff_mask_nxv1f16(<vscale x 1 x half> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
3997 ; CHECK-LABEL: test_vlseg3ff_mask_nxv1f16:
3998 ; CHECK: # %bb.0: # %entry
3999 ; CHECK-NEXT: vmv1r.v v7, v8
4000 ; CHECK-NEXT: vmv1r.v v9, v8
4001 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu
4002 ; CHECK-NEXT: vlseg3e16ff.v v7, (a0), v0.t
4003 ; CHECK-NEXT: csrr a0, vl
4004 ; CHECK-NEXT: sd a0, 0(a2)
4007 %0 = tail call {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg3ff.mask.nxv1f16(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
4008 %1 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 1
4009 %2 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 3
4010 store i64 %2, ptr %outvl
4011 ret <vscale x 1 x half> %1
4014 declare {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg4ff.nxv1f16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr , i64)
4015 declare {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg4ff.mask.nxv1f16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i1>, i64, i64)
4017 define <vscale x 1 x half> @test_vlseg4ff_nxv1f16(ptr %base, i64 %vl, ptr %outvl) {
4018 ; CHECK-LABEL: test_vlseg4ff_nxv1f16:
4019 ; CHECK: # %bb.0: # %entry
4020 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
4021 ; CHECK-NEXT: vlseg4e16ff.v v7, (a0)
4022 ; CHECK-NEXT: csrr a0, vl
4023 ; CHECK-NEXT: sd a0, 0(a2)
4026 %0 = tail call {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg4ff.nxv1f16(<vscale x 1 x half> undef, <vscale x 1 x half> undef, <vscale x 1 x half> undef, <vscale x 1 x half> undef, ptr %base, i64 %vl)
4027 %1 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 1
4028 %2 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 4
4029 store i64 %2, ptr %outvl
4030 ret <vscale x 1 x half> %1
4033 define <vscale x 1 x half> @test_vlseg4ff_mask_nxv1f16(<vscale x 1 x half> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
4034 ; CHECK-LABEL: test_vlseg4ff_mask_nxv1f16:
4035 ; CHECK: # %bb.0: # %entry
4036 ; CHECK-NEXT: vmv1r.v v7, v8
4037 ; CHECK-NEXT: vmv1r.v v9, v8
4038 ; CHECK-NEXT: vmv1r.v v10, v8
4039 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu
4040 ; CHECK-NEXT: vlseg4e16ff.v v7, (a0), v0.t
4041 ; CHECK-NEXT: csrr a0, vl
4042 ; CHECK-NEXT: sd a0, 0(a2)
4045 %0 = tail call {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg4ff.mask.nxv1f16(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
4046 %1 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 1
4047 %2 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 4
4048 store i64 %2, ptr %outvl
4049 ret <vscale x 1 x half> %1
4052 declare {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg5ff.nxv1f16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr , i64)
4053 declare {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg5ff.mask.nxv1f16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i1>, i64, i64)
4055 define <vscale x 1 x half> @test_vlseg5ff_nxv1f16(ptr %base, i64 %vl, ptr %outvl) {
4056 ; CHECK-LABEL: test_vlseg5ff_nxv1f16:
4057 ; CHECK: # %bb.0: # %entry
4058 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
4059 ; CHECK-NEXT: vlseg5e16ff.v v7, (a0)
4060 ; CHECK-NEXT: csrr a0, vl
4061 ; CHECK-NEXT: sd a0, 0(a2)
4064 %0 = tail call {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg5ff.nxv1f16(<vscale x 1 x half> undef, <vscale x 1 x half> undef, <vscale x 1 x half> undef, <vscale x 1 x half> undef, <vscale x 1 x half> undef, ptr %base, i64 %vl)
4065 %1 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 1
4066 %2 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 5
4067 store i64 %2, ptr %outvl
4068 ret <vscale x 1 x half> %1
4071 define <vscale x 1 x half> @test_vlseg5ff_mask_nxv1f16(<vscale x 1 x half> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
4072 ; CHECK-LABEL: test_vlseg5ff_mask_nxv1f16:
4073 ; CHECK: # %bb.0: # %entry
4074 ; CHECK-NEXT: vmv1r.v v7, v8
4075 ; CHECK-NEXT: vmv1r.v v9, v8
4076 ; CHECK-NEXT: vmv1r.v v10, v8
4077 ; CHECK-NEXT: vmv1r.v v11, v8
4078 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu
4079 ; CHECK-NEXT: vlseg5e16ff.v v7, (a0), v0.t
4080 ; CHECK-NEXT: csrr a0, vl
4081 ; CHECK-NEXT: sd a0, 0(a2)
4084 %0 = tail call {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg5ff.mask.nxv1f16(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
4085 %1 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 1
4086 %2 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 5
4087 store i64 %2, ptr %outvl
4088 ret <vscale x 1 x half> %1
4091 declare {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg6ff.nxv1f16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr , i64)
4092 declare {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg6ff.mask.nxv1f16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i1>, i64, i64)
4094 define <vscale x 1 x half> @test_vlseg6ff_nxv1f16(ptr %base, i64 %vl, ptr %outvl) {
4095 ; CHECK-LABEL: test_vlseg6ff_nxv1f16:
4096 ; CHECK: # %bb.0: # %entry
4097 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
4098 ; CHECK-NEXT: vlseg6e16ff.v v7, (a0)
4099 ; CHECK-NEXT: csrr a0, vl
4100 ; CHECK-NEXT: sd a0, 0(a2)
4103 %0 = tail call {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg6ff.nxv1f16(<vscale x 1 x half> undef, <vscale x 1 x half> undef, <vscale x 1 x half> undef, <vscale x 1 x half> undef, <vscale x 1 x half> undef, <vscale x 1 x half> undef, ptr %base, i64 %vl)
4104 %1 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 1
4105 %2 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 6
4106 store i64 %2, ptr %outvl
4107 ret <vscale x 1 x half> %1
4110 define <vscale x 1 x half> @test_vlseg6ff_mask_nxv1f16(<vscale x 1 x half> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
4111 ; CHECK-LABEL: test_vlseg6ff_mask_nxv1f16:
4112 ; CHECK: # %bb.0: # %entry
4113 ; CHECK-NEXT: vmv1r.v v7, v8
4114 ; CHECK-NEXT: vmv1r.v v9, v8
4115 ; CHECK-NEXT: vmv1r.v v10, v8
4116 ; CHECK-NEXT: vmv1r.v v11, v8
4117 ; CHECK-NEXT: vmv1r.v v12, v8
4118 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu
4119 ; CHECK-NEXT: vlseg6e16ff.v v7, (a0), v0.t
4120 ; CHECK-NEXT: csrr a0, vl
4121 ; CHECK-NEXT: sd a0, 0(a2)
4124 %0 = tail call {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg6ff.mask.nxv1f16(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
4125 %1 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 1
4126 %2 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 6
4127 store i64 %2, ptr %outvl
4128 ret <vscale x 1 x half> %1
4131 declare {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg7ff.nxv1f16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr , i64)
4132 declare {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg7ff.mask.nxv1f16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i1>, i64, i64)
4134 define <vscale x 1 x half> @test_vlseg7ff_nxv1f16(ptr %base, i64 %vl, ptr %outvl) {
4135 ; CHECK-LABEL: test_vlseg7ff_nxv1f16:
4136 ; CHECK: # %bb.0: # %entry
4137 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
4138 ; CHECK-NEXT: vlseg7e16ff.v v7, (a0)
4139 ; CHECK-NEXT: csrr a0, vl
4140 ; CHECK-NEXT: sd a0, 0(a2)
4143 %0 = tail call {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg7ff.nxv1f16(<vscale x 1 x half> undef, <vscale x 1 x half> undef, <vscale x 1 x half> undef, <vscale x 1 x half> undef, <vscale x 1 x half> undef, <vscale x 1 x half> undef, <vscale x 1 x half> undef, ptr %base, i64 %vl)
4144 %1 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 1
4145 %2 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 7
4146 store i64 %2, ptr %outvl
4147 ret <vscale x 1 x half> %1
4150 define <vscale x 1 x half> @test_vlseg7ff_mask_nxv1f16(<vscale x 1 x half> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
4151 ; CHECK-LABEL: test_vlseg7ff_mask_nxv1f16:
4152 ; CHECK: # %bb.0: # %entry
4153 ; CHECK-NEXT: vmv1r.v v7, v8
4154 ; CHECK-NEXT: vmv1r.v v9, v8
4155 ; CHECK-NEXT: vmv1r.v v10, v8
4156 ; CHECK-NEXT: vmv1r.v v11, v8
4157 ; CHECK-NEXT: vmv1r.v v12, v8
4158 ; CHECK-NEXT: vmv1r.v v13, v8
4159 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu
4160 ; CHECK-NEXT: vlseg7e16ff.v v7, (a0), v0.t
4161 ; CHECK-NEXT: csrr a0, vl
4162 ; CHECK-NEXT: sd a0, 0(a2)
4165 %0 = tail call {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg7ff.mask.nxv1f16(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
4166 %1 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 1
4167 %2 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 7
4168 store i64 %2, ptr %outvl
4169 ret <vscale x 1 x half> %1
4172 declare {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg8ff.nxv1f16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr , i64)
4173 declare {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg8ff.mask.nxv1f16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i1>, i64, i64)
4175 define <vscale x 1 x half> @test_vlseg8ff_nxv1f16(ptr %base, i64 %vl, ptr %outvl) {
4176 ; CHECK-LABEL: test_vlseg8ff_nxv1f16:
4177 ; CHECK: # %bb.0: # %entry
4178 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
4179 ; CHECK-NEXT: vlseg8e16ff.v v7, (a0)
4180 ; CHECK-NEXT: csrr a0, vl
4181 ; CHECK-NEXT: sd a0, 0(a2)
4184 %0 = tail call {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg8ff.nxv1f16(<vscale x 1 x half> undef, <vscale x 1 x half> undef ,<vscale x 1 x half> undef ,<vscale x 1 x half> undef, <vscale x 1 x half> undef ,<vscale x 1 x half> undef, <vscale x 1 x half> undef, <vscale x 1 x half> undef, ptr %base, i64 %vl)
4185 %1 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 1
4186 %2 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 8
4187 store i64 %2, ptr %outvl
4188 ret <vscale x 1 x half> %1
4191 define <vscale x 1 x half> @test_vlseg8ff_mask_nxv1f16(<vscale x 1 x half> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
4192 ; CHECK-LABEL: test_vlseg8ff_mask_nxv1f16:
4193 ; CHECK: # %bb.0: # %entry
4194 ; CHECK-NEXT: vmv1r.v v7, v8
4195 ; CHECK-NEXT: vmv1r.v v9, v8
4196 ; CHECK-NEXT: vmv1r.v v10, v8
4197 ; CHECK-NEXT: vmv1r.v v11, v8
4198 ; CHECK-NEXT: vmv1r.v v12, v8
4199 ; CHECK-NEXT: vmv1r.v v13, v8
4200 ; CHECK-NEXT: vmv1r.v v14, v8
4201 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, mu
4202 ; CHECK-NEXT: vlseg8e16ff.v v7, (a0), v0.t
4203 ; CHECK-NEXT: csrr a0, vl
4204 ; CHECK-NEXT: sd a0, 0(a2)
4207 %0 = tail call {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} @llvm.riscv.vlseg8ff.mask.nxv1f16(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
4208 %1 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 1
4209 %2 = extractvalue {<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, i64} %0, 8
4210 store i64 %2, ptr %outvl
4211 ret <vscale x 1 x half> %1
4214 declare {<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg2ff.nxv1f32(<vscale x 1 x float>,<vscale x 1 x float>, ptr , i64)
4215 declare {<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg2ff.mask.nxv1f32(<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i1>, i64, i64)
4217 define <vscale x 1 x float> @test_vlseg2ff_nxv1f32(ptr %base, i64 %vl, ptr %outvl) {
4218 ; CHECK-LABEL: test_vlseg2ff_nxv1f32:
4219 ; CHECK: # %bb.0: # %entry
4220 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
4221 ; CHECK-NEXT: vlseg2e32ff.v v7, (a0)
4222 ; CHECK-NEXT: csrr a0, vl
4223 ; CHECK-NEXT: sd a0, 0(a2)
4226 %0 = tail call {<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg2ff.nxv1f32(<vscale x 1 x float> undef, <vscale x 1 x float> undef, ptr %base, i64 %vl)
4227 %1 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 1
4228 %2 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 2
4229 store i64 %2, ptr %outvl
4230 ret <vscale x 1 x float> %1
4233 define <vscale x 1 x float> @test_vlseg2ff_mask_nxv1f32(<vscale x 1 x float> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
4234 ; CHECK-LABEL: test_vlseg2ff_mask_nxv1f32:
4235 ; CHECK: # %bb.0: # %entry
4236 ; CHECK-NEXT: vmv1r.v v7, v8
4237 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu
4238 ; CHECK-NEXT: vlseg2e32ff.v v7, (a0), v0.t
4239 ; CHECK-NEXT: csrr a0, vl
4240 ; CHECK-NEXT: sd a0, 0(a2)
4243 %0 = tail call {<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg2ff.mask.nxv1f32(<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
4244 %1 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 1
4245 %2 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 2
4246 store i64 %2, ptr %outvl
4247 ret <vscale x 1 x float> %1
4250 declare {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg3ff.nxv1f32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr , i64)
4251 declare {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg3ff.mask.nxv1f32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i1>, i64, i64)
4253 define <vscale x 1 x float> @test_vlseg3ff_nxv1f32(ptr %base, i64 %vl, ptr %outvl) {
4254 ; CHECK-LABEL: test_vlseg3ff_nxv1f32:
4255 ; CHECK: # %bb.0: # %entry
4256 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
4257 ; CHECK-NEXT: vlseg3e32ff.v v7, (a0)
4258 ; CHECK-NEXT: csrr a0, vl
4259 ; CHECK-NEXT: sd a0, 0(a2)
4262 %0 = tail call {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg3ff.nxv1f32(<vscale x 1 x float> undef, <vscale x 1 x float> undef, <vscale x 1 x float> undef, ptr %base, i64 %vl)
4263 %1 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 1
4264 %2 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 3
4265 store i64 %2, ptr %outvl
4266 ret <vscale x 1 x float> %1
4269 define <vscale x 1 x float> @test_vlseg3ff_mask_nxv1f32(<vscale x 1 x float> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
4270 ; CHECK-LABEL: test_vlseg3ff_mask_nxv1f32:
4271 ; CHECK: # %bb.0: # %entry
4272 ; CHECK-NEXT: vmv1r.v v7, v8
4273 ; CHECK-NEXT: vmv1r.v v9, v8
4274 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu
4275 ; CHECK-NEXT: vlseg3e32ff.v v7, (a0), v0.t
4276 ; CHECK-NEXT: csrr a0, vl
4277 ; CHECK-NEXT: sd a0, 0(a2)
4280 %0 = tail call {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg3ff.mask.nxv1f32(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
4281 %1 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 1
4282 %2 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 3
4283 store i64 %2, ptr %outvl
4284 ret <vscale x 1 x float> %1
4287 declare {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg4ff.nxv1f32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr , i64)
4288 declare {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg4ff.mask.nxv1f32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i1>, i64, i64)
4290 define <vscale x 1 x float> @test_vlseg4ff_nxv1f32(ptr %base, i64 %vl, ptr %outvl) {
4291 ; CHECK-LABEL: test_vlseg4ff_nxv1f32:
4292 ; CHECK: # %bb.0: # %entry
4293 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
4294 ; CHECK-NEXT: vlseg4e32ff.v v7, (a0)
4295 ; CHECK-NEXT: csrr a0, vl
4296 ; CHECK-NEXT: sd a0, 0(a2)
4299 %0 = tail call {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg4ff.nxv1f32(<vscale x 1 x float> undef, <vscale x 1 x float> undef, <vscale x 1 x float> undef, <vscale x 1 x float> undef, ptr %base, i64 %vl)
4300 %1 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 1
4301 %2 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 4
4302 store i64 %2, ptr %outvl
4303 ret <vscale x 1 x float> %1
4306 define <vscale x 1 x float> @test_vlseg4ff_mask_nxv1f32(<vscale x 1 x float> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
4307 ; CHECK-LABEL: test_vlseg4ff_mask_nxv1f32:
4308 ; CHECK: # %bb.0: # %entry
4309 ; CHECK-NEXT: vmv1r.v v7, v8
4310 ; CHECK-NEXT: vmv1r.v v9, v8
4311 ; CHECK-NEXT: vmv1r.v v10, v8
4312 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu
4313 ; CHECK-NEXT: vlseg4e32ff.v v7, (a0), v0.t
4314 ; CHECK-NEXT: csrr a0, vl
4315 ; CHECK-NEXT: sd a0, 0(a2)
4318 %0 = tail call {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg4ff.mask.nxv1f32(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
4319 %1 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 1
4320 %2 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 4
4321 store i64 %2, ptr %outvl
4322 ret <vscale x 1 x float> %1
4325 declare {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg5ff.nxv1f32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr , i64)
4326 declare {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg5ff.mask.nxv1f32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i1>, i64, i64)
4328 define <vscale x 1 x float> @test_vlseg5ff_nxv1f32(ptr %base, i64 %vl, ptr %outvl) {
4329 ; CHECK-LABEL: test_vlseg5ff_nxv1f32:
4330 ; CHECK: # %bb.0: # %entry
4331 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
4332 ; CHECK-NEXT: vlseg5e32ff.v v7, (a0)
4333 ; CHECK-NEXT: csrr a0, vl
4334 ; CHECK-NEXT: sd a0, 0(a2)
4337 %0 = tail call {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg5ff.nxv1f32(<vscale x 1 x float> undef, <vscale x 1 x float> undef, <vscale x 1 x float> undef, <vscale x 1 x float> undef, <vscale x 1 x float> undef, ptr %base, i64 %vl)
4338 %1 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 1
4339 %2 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 5
4340 store i64 %2, ptr %outvl
4341 ret <vscale x 1 x float> %1
4344 define <vscale x 1 x float> @test_vlseg5ff_mask_nxv1f32(<vscale x 1 x float> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
4345 ; CHECK-LABEL: test_vlseg5ff_mask_nxv1f32:
4346 ; CHECK: # %bb.0: # %entry
4347 ; CHECK-NEXT: vmv1r.v v7, v8
4348 ; CHECK-NEXT: vmv1r.v v9, v8
4349 ; CHECK-NEXT: vmv1r.v v10, v8
4350 ; CHECK-NEXT: vmv1r.v v11, v8
4351 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu
4352 ; CHECK-NEXT: vlseg5e32ff.v v7, (a0), v0.t
4353 ; CHECK-NEXT: csrr a0, vl
4354 ; CHECK-NEXT: sd a0, 0(a2)
4357 %0 = tail call {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg5ff.mask.nxv1f32(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
4358 %1 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 1
4359 %2 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 5
4360 store i64 %2, ptr %outvl
4361 ret <vscale x 1 x float> %1
4364 declare {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg6ff.nxv1f32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr , i64)
4365 declare {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg6ff.mask.nxv1f32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i1>, i64, i64)
4367 define <vscale x 1 x float> @test_vlseg6ff_nxv1f32(ptr %base, i64 %vl, ptr %outvl) {
4368 ; CHECK-LABEL: test_vlseg6ff_nxv1f32:
4369 ; CHECK: # %bb.0: # %entry
4370 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
4371 ; CHECK-NEXT: vlseg6e32ff.v v7, (a0)
4372 ; CHECK-NEXT: csrr a0, vl
4373 ; CHECK-NEXT: sd a0, 0(a2)
4376 %0 = tail call {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg6ff.nxv1f32(<vscale x 1 x float> undef, <vscale x 1 x float> undef, <vscale x 1 x float> undef, <vscale x 1 x float> undef, <vscale x 1 x float> undef, <vscale x 1 x float> undef, ptr %base, i64 %vl)
4377 %1 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 1
4378 %2 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 6
4379 store i64 %2, ptr %outvl
4380 ret <vscale x 1 x float> %1
4383 define <vscale x 1 x float> @test_vlseg6ff_mask_nxv1f32(<vscale x 1 x float> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
4384 ; CHECK-LABEL: test_vlseg6ff_mask_nxv1f32:
4385 ; CHECK: # %bb.0: # %entry
4386 ; CHECK-NEXT: vmv1r.v v7, v8
4387 ; CHECK-NEXT: vmv1r.v v9, v8
4388 ; CHECK-NEXT: vmv1r.v v10, v8
4389 ; CHECK-NEXT: vmv1r.v v11, v8
4390 ; CHECK-NEXT: vmv1r.v v12, v8
4391 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu
4392 ; CHECK-NEXT: vlseg6e32ff.v v7, (a0), v0.t
4393 ; CHECK-NEXT: csrr a0, vl
4394 ; CHECK-NEXT: sd a0, 0(a2)
4397 %0 = tail call {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg6ff.mask.nxv1f32(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
4398 %1 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 1
4399 %2 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 6
4400 store i64 %2, ptr %outvl
4401 ret <vscale x 1 x float> %1
4404 declare {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg7ff.nxv1f32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr , i64)
4405 declare {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg7ff.mask.nxv1f32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i1>, i64, i64)
4407 define <vscale x 1 x float> @test_vlseg7ff_nxv1f32(ptr %base, i64 %vl, ptr %outvl) {
4408 ; CHECK-LABEL: test_vlseg7ff_nxv1f32:
4409 ; CHECK: # %bb.0: # %entry
4410 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
4411 ; CHECK-NEXT: vlseg7e32ff.v v7, (a0)
4412 ; CHECK-NEXT: csrr a0, vl
4413 ; CHECK-NEXT: sd a0, 0(a2)
4416 %0 = tail call {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg7ff.nxv1f32(<vscale x 1 x float> undef, <vscale x 1 x float> undef, <vscale x 1 x float> undef, <vscale x 1 x float> undef, <vscale x 1 x float> undef, <vscale x 1 x float> undef, <vscale x 1 x float> undef, ptr %base, i64 %vl)
4417 %1 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 1
4418 %2 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 7
4419 store i64 %2, ptr %outvl
4420 ret <vscale x 1 x float> %1
4423 define <vscale x 1 x float> @test_vlseg7ff_mask_nxv1f32(<vscale x 1 x float> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
4424 ; CHECK-LABEL: test_vlseg7ff_mask_nxv1f32:
4425 ; CHECK: # %bb.0: # %entry
4426 ; CHECK-NEXT: vmv1r.v v7, v8
4427 ; CHECK-NEXT: vmv1r.v v9, v8
4428 ; CHECK-NEXT: vmv1r.v v10, v8
4429 ; CHECK-NEXT: vmv1r.v v11, v8
4430 ; CHECK-NEXT: vmv1r.v v12, v8
4431 ; CHECK-NEXT: vmv1r.v v13, v8
4432 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu
4433 ; CHECK-NEXT: vlseg7e32ff.v v7, (a0), v0.t
4434 ; CHECK-NEXT: csrr a0, vl
4435 ; CHECK-NEXT: sd a0, 0(a2)
4438 %0 = tail call {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg7ff.mask.nxv1f32(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
4439 %1 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 1
4440 %2 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 7
4441 store i64 %2, ptr %outvl
4442 ret <vscale x 1 x float> %1
4445 declare {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg8ff.nxv1f32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr , i64)
4446 declare {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg8ff.mask.nxv1f32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i1>, i64, i64)
4448 define <vscale x 1 x float> @test_vlseg8ff_nxv1f32(ptr %base, i64 %vl, ptr %outvl) {
4449 ; CHECK-LABEL: test_vlseg8ff_nxv1f32:
4450 ; CHECK: # %bb.0: # %entry
4451 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
4452 ; CHECK-NEXT: vlseg8e32ff.v v7, (a0)
4453 ; CHECK-NEXT: csrr a0, vl
4454 ; CHECK-NEXT: sd a0, 0(a2)
4457 %0 = tail call {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg8ff.nxv1f32(<vscale x 1 x float> undef, <vscale x 1 x float> undef ,<vscale x 1 x float> undef ,<vscale x 1 x float> undef, <vscale x 1 x float> undef ,<vscale x 1 x float> undef, <vscale x 1 x float> undef, <vscale x 1 x float> undef, ptr %base, i64 %vl)
4458 %1 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 1
4459 %2 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 8
4460 store i64 %2, ptr %outvl
4461 ret <vscale x 1 x float> %1
4464 define <vscale x 1 x float> @test_vlseg8ff_mask_nxv1f32(<vscale x 1 x float> %val, ptr %base, i64 %vl, <vscale x 1 x i1> %mask, ptr %outvl) {
4465 ; CHECK-LABEL: test_vlseg8ff_mask_nxv1f32:
4466 ; CHECK: # %bb.0: # %entry
4467 ; CHECK-NEXT: vmv1r.v v7, v8
4468 ; CHECK-NEXT: vmv1r.v v9, v8
4469 ; CHECK-NEXT: vmv1r.v v10, v8
4470 ; CHECK-NEXT: vmv1r.v v11, v8
4471 ; CHECK-NEXT: vmv1r.v v12, v8
4472 ; CHECK-NEXT: vmv1r.v v13, v8
4473 ; CHECK-NEXT: vmv1r.v v14, v8
4474 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, mu
4475 ; CHECK-NEXT: vlseg8e32ff.v v7, (a0), v0.t
4476 ; CHECK-NEXT: csrr a0, vl
4477 ; CHECK-NEXT: sd a0, 0(a2)
4480 %0 = tail call {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} @llvm.riscv.vlseg8ff.mask.nxv1f32(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i1> %mask, i64 %vl, i64 1)
4481 %1 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 1
4482 %2 = extractvalue {<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, i64} %0, 8
4483 store i64 %2, ptr %outvl
4484 ret <vscale x 1 x float> %1
4487 declare {<vscale x 8 x half>,<vscale x 8 x half>, i64} @llvm.riscv.vlseg2ff.nxv8f16(<vscale x 8 x half>,<vscale x 8 x half>, ptr , i64)
4488 declare {<vscale x 8 x half>,<vscale x 8 x half>, i64} @llvm.riscv.vlseg2ff.mask.nxv8f16(<vscale x 8 x half>,<vscale x 8 x half>, ptr, <vscale x 8 x i1>, i64, i64)
4490 define <vscale x 8 x half> @test_vlseg2ff_nxv8f16(ptr %base, i64 %vl, ptr %outvl) {
4491 ; CHECK-LABEL: test_vlseg2ff_nxv8f16:
4492 ; CHECK: # %bb.0: # %entry
4493 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
4494 ; CHECK-NEXT: vlseg2e16ff.v v6, (a0)
4495 ; CHECK-NEXT: csrr a0, vl
4496 ; CHECK-NEXT: sd a0, 0(a2)
4499 %0 = tail call {<vscale x 8 x half>,<vscale x 8 x half>, i64} @llvm.riscv.vlseg2ff.nxv8f16(<vscale x 8 x half> undef, <vscale x 8 x half> undef, ptr %base, i64 %vl)
4500 %1 = extractvalue {<vscale x 8 x half>,<vscale x 8 x half>, i64} %0, 1
4501 %2 = extractvalue {<vscale x 8 x half>,<vscale x 8 x half>, i64} %0, 2
4502 store i64 %2, ptr %outvl
4503 ret <vscale x 8 x half> %1
4506 define <vscale x 8 x half> @test_vlseg2ff_mask_nxv8f16(<vscale x 8 x half> %val, ptr %base, i64 %vl, <vscale x 8 x i1> %mask, ptr %outvl) {
4507 ; CHECK-LABEL: test_vlseg2ff_mask_nxv8f16:
4508 ; CHECK: # %bb.0: # %entry
4509 ; CHECK-NEXT: vmv2r.v v6, v8
4510 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu
4511 ; CHECK-NEXT: vlseg2e16ff.v v6, (a0), v0.t
4512 ; CHECK-NEXT: csrr a0, vl
4513 ; CHECK-NEXT: sd a0, 0(a2)
4516 %0 = tail call {<vscale x 8 x half>,<vscale x 8 x half>, i64} @llvm.riscv.vlseg2ff.mask.nxv8f16(<vscale x 8 x half> %val,<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i1> %mask, i64 %vl, i64 1)
4517 %1 = extractvalue {<vscale x 8 x half>,<vscale x 8 x half>, i64} %0, 1
4518 %2 = extractvalue {<vscale x 8 x half>,<vscale x 8 x half>, i64} %0, 2
4519 store i64 %2, ptr %outvl
4520 ret <vscale x 8 x half> %1
4523 declare {<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, i64} @llvm.riscv.vlseg3ff.nxv8f16(<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, ptr , i64)
4524 declare {<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, i64} @llvm.riscv.vlseg3ff.mask.nxv8f16(<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, ptr, <vscale x 8 x i1>, i64, i64)
4526 define <vscale x 8 x half> @test_vlseg3ff_nxv8f16(ptr %base, i64 %vl, ptr %outvl) {
4527 ; CHECK-LABEL: test_vlseg3ff_nxv8f16:
4528 ; CHECK: # %bb.0: # %entry
4529 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
4530 ; CHECK-NEXT: vlseg3e16ff.v v6, (a0)
4531 ; CHECK-NEXT: csrr a0, vl
4532 ; CHECK-NEXT: sd a0, 0(a2)
4535 %0 = tail call {<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, i64} @llvm.riscv.vlseg3ff.nxv8f16(<vscale x 8 x half> undef, <vscale x 8 x half> undef, <vscale x 8 x half> undef, ptr %base, i64 %vl)
4536 %1 = extractvalue {<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, i64} %0, 1
4537 %2 = extractvalue {<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, i64} %0, 3
4538 store i64 %2, ptr %outvl
4539 ret <vscale x 8 x half> %1
4542 define <vscale x 8 x half> @test_vlseg3ff_mask_nxv8f16(<vscale x 8 x half> %val, ptr %base, i64 %vl, <vscale x 8 x i1> %mask, ptr %outvl) {
4543 ; CHECK-LABEL: test_vlseg3ff_mask_nxv8f16:
4544 ; CHECK: # %bb.0: # %entry
4545 ; CHECK-NEXT: vmv2r.v v6, v8
4546 ; CHECK-NEXT: vmv2r.v v10, v8
4547 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu
4548 ; CHECK-NEXT: vlseg3e16ff.v v6, (a0), v0.t
4549 ; CHECK-NEXT: csrr a0, vl
4550 ; CHECK-NEXT: sd a0, 0(a2)
4553 %0 = tail call {<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, i64} @llvm.riscv.vlseg3ff.mask.nxv8f16(<vscale x 8 x half> %val,<vscale x 8 x half> %val,<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i1> %mask, i64 %vl, i64 1)
4554 %1 = extractvalue {<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, i64} %0, 1
4555 %2 = extractvalue {<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, i64} %0, 3
4556 store i64 %2, ptr %outvl
4557 ret <vscale x 8 x half> %1
4560 declare {<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, i64} @llvm.riscv.vlseg4ff.nxv8f16(<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, ptr , i64)
4561 declare {<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, i64} @llvm.riscv.vlseg4ff.mask.nxv8f16(<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, ptr, <vscale x 8 x i1>, i64, i64)
4563 define <vscale x 8 x half> @test_vlseg4ff_nxv8f16(ptr %base, i64 %vl, ptr %outvl) {
4564 ; CHECK-LABEL: test_vlseg4ff_nxv8f16:
4565 ; CHECK: # %bb.0: # %entry
4566 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
4567 ; CHECK-NEXT: vlseg4e16ff.v v6, (a0)
4568 ; CHECK-NEXT: csrr a0, vl
4569 ; CHECK-NEXT: sd a0, 0(a2)
4572 %0 = tail call {<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, i64} @llvm.riscv.vlseg4ff.nxv8f16(<vscale x 8 x half> undef, <vscale x 8 x half> undef, <vscale x 8 x half> undef, <vscale x 8 x half> undef, ptr %base, i64 %vl)
4573 %1 = extractvalue {<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, i64} %0, 1
4574 %2 = extractvalue {<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, i64} %0, 4
4575 store i64 %2, ptr %outvl
4576 ret <vscale x 8 x half> %1
4579 define <vscale x 8 x half> @test_vlseg4ff_mask_nxv8f16(<vscale x 8 x half> %val, ptr %base, i64 %vl, <vscale x 8 x i1> %mask, ptr %outvl) {
4580 ; CHECK-LABEL: test_vlseg4ff_mask_nxv8f16:
4581 ; CHECK: # %bb.0: # %entry
4582 ; CHECK-NEXT: vmv2r.v v6, v8
4583 ; CHECK-NEXT: vmv2r.v v10, v8
4584 ; CHECK-NEXT: vmv2r.v v12, v8
4585 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, mu
4586 ; CHECK-NEXT: vlseg4e16ff.v v6, (a0), v0.t
4587 ; CHECK-NEXT: csrr a0, vl
4588 ; CHECK-NEXT: sd a0, 0(a2)
4591 %0 = tail call {<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, i64} @llvm.riscv.vlseg4ff.mask.nxv8f16(<vscale x 8 x half> %val,<vscale x 8 x half> %val,<vscale x 8 x half> %val,<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i1> %mask, i64 %vl, i64 1)
4592 %1 = extractvalue {<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, i64} %0, 1
4593 %2 = extractvalue {<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, i64} %0, 4
4594 store i64 %2, ptr %outvl
4595 ret <vscale x 8 x half> %1
4598 declare {<vscale x 8 x float>,<vscale x 8 x float>, i64} @llvm.riscv.vlseg2ff.nxv8f32(<vscale x 8 x float>,<vscale x 8 x float>, ptr , i64)
4599 declare {<vscale x 8 x float>,<vscale x 8 x float>, i64} @llvm.riscv.vlseg2ff.mask.nxv8f32(<vscale x 8 x float>,<vscale x 8 x float>, ptr, <vscale x 8 x i1>, i64, i64)
4601 define <vscale x 8 x float> @test_vlseg2ff_nxv8f32(ptr %base, i64 %vl, ptr %outvl) {
4602 ; CHECK-LABEL: test_vlseg2ff_nxv8f32:
4603 ; CHECK: # %bb.0: # %entry
4604 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, ma
4605 ; CHECK-NEXT: vlseg2e32ff.v v4, (a0)
4606 ; CHECK-NEXT: csrr a0, vl
4607 ; CHECK-NEXT: sd a0, 0(a2)
4610 %0 = tail call {<vscale x 8 x float>,<vscale x 8 x float>, i64} @llvm.riscv.vlseg2ff.nxv8f32(<vscale x 8 x float> undef, <vscale x 8 x float> undef, ptr %base, i64 %vl)
4611 %1 = extractvalue {<vscale x 8 x float>,<vscale x 8 x float>, i64} %0, 1
4612 %2 = extractvalue {<vscale x 8 x float>,<vscale x 8 x float>, i64} %0, 2
4613 store i64 %2, ptr %outvl
4614 ret <vscale x 8 x float> %1
4617 define <vscale x 8 x float> @test_vlseg2ff_mask_nxv8f32(<vscale x 8 x float> %val, ptr %base, i64 %vl, <vscale x 8 x i1> %mask, ptr %outvl) {
4618 ; CHECK-LABEL: test_vlseg2ff_mask_nxv8f32:
4619 ; CHECK: # %bb.0: # %entry
4620 ; CHECK-NEXT: vmv4r.v v4, v8
4621 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, mu
4622 ; CHECK-NEXT: vlseg2e32ff.v v4, (a0), v0.t
4623 ; CHECK-NEXT: csrr a0, vl
4624 ; CHECK-NEXT: sd a0, 0(a2)
4627 %0 = tail call {<vscale x 8 x float>,<vscale x 8 x float>, i64} @llvm.riscv.vlseg2ff.mask.nxv8f32(<vscale x 8 x float> %val,<vscale x 8 x float> %val, ptr %base, <vscale x 8 x i1> %mask, i64 %vl, i64 1)
4628 %1 = extractvalue {<vscale x 8 x float>,<vscale x 8 x float>, i64} %0, 1
4629 %2 = extractvalue {<vscale x 8 x float>,<vscale x 8 x float>, i64} %0, 2
4630 store i64 %2, ptr %outvl
4631 ret <vscale x 8 x float> %1
4634 declare {<vscale x 2 x double>,<vscale x 2 x double>, i64} @llvm.riscv.vlseg2ff.nxv2f64(<vscale x 2 x double>,<vscale x 2 x double>, ptr , i64)
4635 declare {<vscale x 2 x double>,<vscale x 2 x double>, i64} @llvm.riscv.vlseg2ff.mask.nxv2f64(<vscale x 2 x double>,<vscale x 2 x double>, ptr, <vscale x 2 x i1>, i64, i64)
4637 define <vscale x 2 x double> @test_vlseg2ff_nxv2f64(ptr %base, i64 %vl, ptr %outvl) {
4638 ; CHECK-LABEL: test_vlseg2ff_nxv2f64:
4639 ; CHECK: # %bb.0: # %entry
4640 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
4641 ; CHECK-NEXT: vlseg2e64ff.v v6, (a0)
4642 ; CHECK-NEXT: csrr a0, vl
4643 ; CHECK-NEXT: sd a0, 0(a2)
4646 %0 = tail call {<vscale x 2 x double>,<vscale x 2 x double>, i64} @llvm.riscv.vlseg2ff.nxv2f64(<vscale x 2 x double> undef, <vscale x 2 x double> undef, ptr %base, i64 %vl)
4647 %1 = extractvalue {<vscale x 2 x double>,<vscale x 2 x double>, i64} %0, 1
4648 %2 = extractvalue {<vscale x 2 x double>,<vscale x 2 x double>, i64} %0, 2
4649 store i64 %2, ptr %outvl
4650 ret <vscale x 2 x double> %1
4653 define <vscale x 2 x double> @test_vlseg2ff_mask_nxv2f64(<vscale x 2 x double> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
4654 ; CHECK-LABEL: test_vlseg2ff_mask_nxv2f64:
4655 ; CHECK: # %bb.0: # %entry
4656 ; CHECK-NEXT: vmv2r.v v6, v8
4657 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu
4658 ; CHECK-NEXT: vlseg2e64ff.v v6, (a0), v0.t
4659 ; CHECK-NEXT: csrr a0, vl
4660 ; CHECK-NEXT: sd a0, 0(a2)
4663 %0 = tail call {<vscale x 2 x double>,<vscale x 2 x double>, i64} @llvm.riscv.vlseg2ff.mask.nxv2f64(<vscale x 2 x double> %val,<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
4664 %1 = extractvalue {<vscale x 2 x double>,<vscale x 2 x double>, i64} %0, 1
4665 %2 = extractvalue {<vscale x 2 x double>,<vscale x 2 x double>, i64} %0, 2
4666 store i64 %2, ptr %outvl
4667 ret <vscale x 2 x double> %1
4670 declare {<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, i64} @llvm.riscv.vlseg3ff.nxv2f64(<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, ptr , i64)
4671 declare {<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, i64} @llvm.riscv.vlseg3ff.mask.nxv2f64(<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, ptr, <vscale x 2 x i1>, i64, i64)
4673 define <vscale x 2 x double> @test_vlseg3ff_nxv2f64(ptr %base, i64 %vl, ptr %outvl) {
4674 ; CHECK-LABEL: test_vlseg3ff_nxv2f64:
4675 ; CHECK: # %bb.0: # %entry
4676 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
4677 ; CHECK-NEXT: vlseg3e64ff.v v6, (a0)
4678 ; CHECK-NEXT: csrr a0, vl
4679 ; CHECK-NEXT: sd a0, 0(a2)
4682 %0 = tail call {<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, i64} @llvm.riscv.vlseg3ff.nxv2f64(<vscale x 2 x double> undef, <vscale x 2 x double> undef, <vscale x 2 x double> undef, ptr %base, i64 %vl)
4683 %1 = extractvalue {<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, i64} %0, 1
4684 %2 = extractvalue {<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, i64} %0, 3
4685 store i64 %2, ptr %outvl
4686 ret <vscale x 2 x double> %1
4689 define <vscale x 2 x double> @test_vlseg3ff_mask_nxv2f64(<vscale x 2 x double> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
4690 ; CHECK-LABEL: test_vlseg3ff_mask_nxv2f64:
4691 ; CHECK: # %bb.0: # %entry
4692 ; CHECK-NEXT: vmv2r.v v6, v8
4693 ; CHECK-NEXT: vmv2r.v v10, v8
4694 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu
4695 ; CHECK-NEXT: vlseg3e64ff.v v6, (a0), v0.t
4696 ; CHECK-NEXT: csrr a0, vl
4697 ; CHECK-NEXT: sd a0, 0(a2)
4700 %0 = tail call {<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, i64} @llvm.riscv.vlseg3ff.mask.nxv2f64(<vscale x 2 x double> %val,<vscale x 2 x double> %val,<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
4701 %1 = extractvalue {<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, i64} %0, 1
4702 %2 = extractvalue {<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, i64} %0, 3
4703 store i64 %2, ptr %outvl
4704 ret <vscale x 2 x double> %1
4707 declare {<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, i64} @llvm.riscv.vlseg4ff.nxv2f64(<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, ptr , i64)
4708 declare {<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, i64} @llvm.riscv.vlseg4ff.mask.nxv2f64(<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, ptr, <vscale x 2 x i1>, i64, i64)
4710 define <vscale x 2 x double> @test_vlseg4ff_nxv2f64(ptr %base, i64 %vl, ptr %outvl) {
4711 ; CHECK-LABEL: test_vlseg4ff_nxv2f64:
4712 ; CHECK: # %bb.0: # %entry
4713 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
4714 ; CHECK-NEXT: vlseg4e64ff.v v6, (a0)
4715 ; CHECK-NEXT: csrr a0, vl
4716 ; CHECK-NEXT: sd a0, 0(a2)
4719 %0 = tail call {<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, i64} @llvm.riscv.vlseg4ff.nxv2f64(<vscale x 2 x double> undef, <vscale x 2 x double> undef, <vscale x 2 x double> undef, <vscale x 2 x double> undef, ptr %base, i64 %vl)
4720 %1 = extractvalue {<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, i64} %0, 1
4721 %2 = extractvalue {<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, i64} %0, 4
4722 store i64 %2, ptr %outvl
4723 ret <vscale x 2 x double> %1
4726 define <vscale x 2 x double> @test_vlseg4ff_mask_nxv2f64(<vscale x 2 x double> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
4727 ; CHECK-LABEL: test_vlseg4ff_mask_nxv2f64:
4728 ; CHECK: # %bb.0: # %entry
4729 ; CHECK-NEXT: vmv2r.v v6, v8
4730 ; CHECK-NEXT: vmv2r.v v10, v8
4731 ; CHECK-NEXT: vmv2r.v v12, v8
4732 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, mu
4733 ; CHECK-NEXT: vlseg4e64ff.v v6, (a0), v0.t
4734 ; CHECK-NEXT: csrr a0, vl
4735 ; CHECK-NEXT: sd a0, 0(a2)
4738 %0 = tail call {<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, i64} @llvm.riscv.vlseg4ff.mask.nxv2f64(<vscale x 2 x double> %val,<vscale x 2 x double> %val,<vscale x 2 x double> %val,<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
4739 %1 = extractvalue {<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, i64} %0, 1
4740 %2 = extractvalue {<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, i64} %0, 4
4741 store i64 %2, ptr %outvl
4742 ret <vscale x 2 x double> %1
4745 declare {<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg2ff.nxv4f16(<vscale x 4 x half>,<vscale x 4 x half>, ptr , i64)
4746 declare {<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg2ff.mask.nxv4f16(<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i1>, i64, i64)
4748 define <vscale x 4 x half> @test_vlseg2ff_nxv4f16(ptr %base, i64 %vl, ptr %outvl) {
4749 ; CHECK-LABEL: test_vlseg2ff_nxv4f16:
4750 ; CHECK: # %bb.0: # %entry
4751 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
4752 ; CHECK-NEXT: vlseg2e16ff.v v7, (a0)
4753 ; CHECK-NEXT: csrr a0, vl
4754 ; CHECK-NEXT: sd a0, 0(a2)
4757 %0 = tail call {<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg2ff.nxv4f16(<vscale x 4 x half> undef, <vscale x 4 x half> undef, ptr %base, i64 %vl)
4758 %1 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 1
4759 %2 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 2
4760 store i64 %2, ptr %outvl
4761 ret <vscale x 4 x half> %1
4764 define <vscale x 4 x half> @test_vlseg2ff_mask_nxv4f16(<vscale x 4 x half> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
4765 ; CHECK-LABEL: test_vlseg2ff_mask_nxv4f16:
4766 ; CHECK: # %bb.0: # %entry
4767 ; CHECK-NEXT: vmv1r.v v7, v8
4768 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu
4769 ; CHECK-NEXT: vlseg2e16ff.v v7, (a0), v0.t
4770 ; CHECK-NEXT: csrr a0, vl
4771 ; CHECK-NEXT: sd a0, 0(a2)
4774 %0 = tail call {<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg2ff.mask.nxv4f16(<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
4775 %1 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 1
4776 %2 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 2
4777 store i64 %2, ptr %outvl
4778 ret <vscale x 4 x half> %1
4781 declare {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg3ff.nxv4f16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr , i64)
4782 declare {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg3ff.mask.nxv4f16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i1>, i64, i64)
4784 define <vscale x 4 x half> @test_vlseg3ff_nxv4f16(ptr %base, i64 %vl, ptr %outvl) {
4785 ; CHECK-LABEL: test_vlseg3ff_nxv4f16:
4786 ; CHECK: # %bb.0: # %entry
4787 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
4788 ; CHECK-NEXT: vlseg3e16ff.v v7, (a0)
4789 ; CHECK-NEXT: csrr a0, vl
4790 ; CHECK-NEXT: sd a0, 0(a2)
4793 %0 = tail call {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg3ff.nxv4f16(<vscale x 4 x half> undef, <vscale x 4 x half> undef, <vscale x 4 x half> undef, ptr %base, i64 %vl)
4794 %1 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 1
4795 %2 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 3
4796 store i64 %2, ptr %outvl
4797 ret <vscale x 4 x half> %1
4800 define <vscale x 4 x half> @test_vlseg3ff_mask_nxv4f16(<vscale x 4 x half> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
4801 ; CHECK-LABEL: test_vlseg3ff_mask_nxv4f16:
4802 ; CHECK: # %bb.0: # %entry
4803 ; CHECK-NEXT: vmv1r.v v7, v8
4804 ; CHECK-NEXT: vmv1r.v v9, v8
4805 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu
4806 ; CHECK-NEXT: vlseg3e16ff.v v7, (a0), v0.t
4807 ; CHECK-NEXT: csrr a0, vl
4808 ; CHECK-NEXT: sd a0, 0(a2)
4811 %0 = tail call {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg3ff.mask.nxv4f16(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
4812 %1 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 1
4813 %2 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 3
4814 store i64 %2, ptr %outvl
4815 ret <vscale x 4 x half> %1
4818 declare {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg4ff.nxv4f16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr , i64)
4819 declare {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg4ff.mask.nxv4f16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i1>, i64, i64)
4821 define <vscale x 4 x half> @test_vlseg4ff_nxv4f16(ptr %base, i64 %vl, ptr %outvl) {
4822 ; CHECK-LABEL: test_vlseg4ff_nxv4f16:
4823 ; CHECK: # %bb.0: # %entry
4824 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
4825 ; CHECK-NEXT: vlseg4e16ff.v v7, (a0)
4826 ; CHECK-NEXT: csrr a0, vl
4827 ; CHECK-NEXT: sd a0, 0(a2)
4830 %0 = tail call {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg4ff.nxv4f16(<vscale x 4 x half> undef, <vscale x 4 x half> undef, <vscale x 4 x half> undef, <vscale x 4 x half> undef, ptr %base, i64 %vl)
4831 %1 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 1
4832 %2 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 4
4833 store i64 %2, ptr %outvl
4834 ret <vscale x 4 x half> %1
4837 define <vscale x 4 x half> @test_vlseg4ff_mask_nxv4f16(<vscale x 4 x half> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
4838 ; CHECK-LABEL: test_vlseg4ff_mask_nxv4f16:
4839 ; CHECK: # %bb.0: # %entry
4840 ; CHECK-NEXT: vmv1r.v v7, v8
4841 ; CHECK-NEXT: vmv1r.v v9, v8
4842 ; CHECK-NEXT: vmv1r.v v10, v8
4843 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu
4844 ; CHECK-NEXT: vlseg4e16ff.v v7, (a0), v0.t
4845 ; CHECK-NEXT: csrr a0, vl
4846 ; CHECK-NEXT: sd a0, 0(a2)
4849 %0 = tail call {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg4ff.mask.nxv4f16(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
4850 %1 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 1
4851 %2 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 4
4852 store i64 %2, ptr %outvl
4853 ret <vscale x 4 x half> %1
4856 declare {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg5ff.nxv4f16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr , i64)
4857 declare {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg5ff.mask.nxv4f16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i1>, i64, i64)
4859 define <vscale x 4 x half> @test_vlseg5ff_nxv4f16(ptr %base, i64 %vl, ptr %outvl) {
4860 ; CHECK-LABEL: test_vlseg5ff_nxv4f16:
4861 ; CHECK: # %bb.0: # %entry
4862 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
4863 ; CHECK-NEXT: vlseg5e16ff.v v7, (a0)
4864 ; CHECK-NEXT: csrr a0, vl
4865 ; CHECK-NEXT: sd a0, 0(a2)
4868 %0 = tail call {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg5ff.nxv4f16(<vscale x 4 x half> undef, <vscale x 4 x half> undef, <vscale x 4 x half> undef, <vscale x 4 x half> undef, <vscale x 4 x half> undef, ptr %base, i64 %vl)
4869 %1 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 1
4870 %2 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 5
4871 store i64 %2, ptr %outvl
4872 ret <vscale x 4 x half> %1
4875 define <vscale x 4 x half> @test_vlseg5ff_mask_nxv4f16(<vscale x 4 x half> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
4876 ; CHECK-LABEL: test_vlseg5ff_mask_nxv4f16:
4877 ; CHECK: # %bb.0: # %entry
4878 ; CHECK-NEXT: vmv1r.v v7, v8
4879 ; CHECK-NEXT: vmv1r.v v9, v8
4880 ; CHECK-NEXT: vmv1r.v v10, v8
4881 ; CHECK-NEXT: vmv1r.v v11, v8
4882 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu
4883 ; CHECK-NEXT: vlseg5e16ff.v v7, (a0), v0.t
4884 ; CHECK-NEXT: csrr a0, vl
4885 ; CHECK-NEXT: sd a0, 0(a2)
4888 %0 = tail call {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg5ff.mask.nxv4f16(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
4889 %1 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 1
4890 %2 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 5
4891 store i64 %2, ptr %outvl
4892 ret <vscale x 4 x half> %1
4895 declare {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg6ff.nxv4f16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr , i64)
4896 declare {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg6ff.mask.nxv4f16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i1>, i64, i64)
4898 define <vscale x 4 x half> @test_vlseg6ff_nxv4f16(ptr %base, i64 %vl, ptr %outvl) {
4899 ; CHECK-LABEL: test_vlseg6ff_nxv4f16:
4900 ; CHECK: # %bb.0: # %entry
4901 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
4902 ; CHECK-NEXT: vlseg6e16ff.v v7, (a0)
4903 ; CHECK-NEXT: csrr a0, vl
4904 ; CHECK-NEXT: sd a0, 0(a2)
4907 %0 = tail call {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg6ff.nxv4f16(<vscale x 4 x half> undef, <vscale x 4 x half> undef, <vscale x 4 x half> undef, <vscale x 4 x half> undef, <vscale x 4 x half> undef, <vscale x 4 x half> undef, ptr %base, i64 %vl)
4908 %1 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 1
4909 %2 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 6
4910 store i64 %2, ptr %outvl
4911 ret <vscale x 4 x half> %1
4914 define <vscale x 4 x half> @test_vlseg6ff_mask_nxv4f16(<vscale x 4 x half> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
4915 ; CHECK-LABEL: test_vlseg6ff_mask_nxv4f16:
4916 ; CHECK: # %bb.0: # %entry
4917 ; CHECK-NEXT: vmv1r.v v7, v8
4918 ; CHECK-NEXT: vmv1r.v v9, v8
4919 ; CHECK-NEXT: vmv1r.v v10, v8
4920 ; CHECK-NEXT: vmv1r.v v11, v8
4921 ; CHECK-NEXT: vmv1r.v v12, v8
4922 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu
4923 ; CHECK-NEXT: vlseg6e16ff.v v7, (a0), v0.t
4924 ; CHECK-NEXT: csrr a0, vl
4925 ; CHECK-NEXT: sd a0, 0(a2)
4928 %0 = tail call {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg6ff.mask.nxv4f16(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
4929 %1 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 1
4930 %2 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 6
4931 store i64 %2, ptr %outvl
4932 ret <vscale x 4 x half> %1
4935 declare {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg7ff.nxv4f16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr , i64)
4936 declare {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg7ff.mask.nxv4f16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i1>, i64, i64)
4938 define <vscale x 4 x half> @test_vlseg7ff_nxv4f16(ptr %base, i64 %vl, ptr %outvl) {
4939 ; CHECK-LABEL: test_vlseg7ff_nxv4f16:
4940 ; CHECK: # %bb.0: # %entry
4941 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
4942 ; CHECK-NEXT: vlseg7e16ff.v v7, (a0)
4943 ; CHECK-NEXT: csrr a0, vl
4944 ; CHECK-NEXT: sd a0, 0(a2)
4947 %0 = tail call {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg7ff.nxv4f16(<vscale x 4 x half> undef, <vscale x 4 x half> undef, <vscale x 4 x half> undef, <vscale x 4 x half> undef, <vscale x 4 x half> undef, <vscale x 4 x half> undef, <vscale x 4 x half> undef, ptr %base, i64 %vl)
4948 %1 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 1
4949 %2 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 7
4950 store i64 %2, ptr %outvl
4951 ret <vscale x 4 x half> %1
4954 define <vscale x 4 x half> @test_vlseg7ff_mask_nxv4f16(<vscale x 4 x half> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
4955 ; CHECK-LABEL: test_vlseg7ff_mask_nxv4f16:
4956 ; CHECK: # %bb.0: # %entry
4957 ; CHECK-NEXT: vmv1r.v v7, v8
4958 ; CHECK-NEXT: vmv1r.v v9, v8
4959 ; CHECK-NEXT: vmv1r.v v10, v8
4960 ; CHECK-NEXT: vmv1r.v v11, v8
4961 ; CHECK-NEXT: vmv1r.v v12, v8
4962 ; CHECK-NEXT: vmv1r.v v13, v8
4963 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu
4964 ; CHECK-NEXT: vlseg7e16ff.v v7, (a0), v0.t
4965 ; CHECK-NEXT: csrr a0, vl
4966 ; CHECK-NEXT: sd a0, 0(a2)
4969 %0 = tail call {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg7ff.mask.nxv4f16(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
4970 %1 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 1
4971 %2 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 7
4972 store i64 %2, ptr %outvl
4973 ret <vscale x 4 x half> %1
4976 declare {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg8ff.nxv4f16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr , i64)
4977 declare {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg8ff.mask.nxv4f16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i1>, i64, i64)
4979 define <vscale x 4 x half> @test_vlseg8ff_nxv4f16(ptr %base, i64 %vl, ptr %outvl) {
4980 ; CHECK-LABEL: test_vlseg8ff_nxv4f16:
4981 ; CHECK: # %bb.0: # %entry
4982 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
4983 ; CHECK-NEXT: vlseg8e16ff.v v7, (a0)
4984 ; CHECK-NEXT: csrr a0, vl
4985 ; CHECK-NEXT: sd a0, 0(a2)
4988 %0 = tail call {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg8ff.nxv4f16(<vscale x 4 x half> undef, <vscale x 4 x half> undef ,<vscale x 4 x half> undef ,<vscale x 4 x half> undef, <vscale x 4 x half> undef ,<vscale x 4 x half> undef, <vscale x 4 x half> undef, <vscale x 4 x half> undef, ptr %base, i64 %vl)
4989 %1 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 1
4990 %2 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 8
4991 store i64 %2, ptr %outvl
4992 ret <vscale x 4 x half> %1
4995 define <vscale x 4 x half> @test_vlseg8ff_mask_nxv4f16(<vscale x 4 x half> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
4996 ; CHECK-LABEL: test_vlseg8ff_mask_nxv4f16:
4997 ; CHECK: # %bb.0: # %entry
4998 ; CHECK-NEXT: vmv1r.v v7, v8
4999 ; CHECK-NEXT: vmv1r.v v9, v8
5000 ; CHECK-NEXT: vmv1r.v v10, v8
5001 ; CHECK-NEXT: vmv1r.v v11, v8
5002 ; CHECK-NEXT: vmv1r.v v12, v8
5003 ; CHECK-NEXT: vmv1r.v v13, v8
5004 ; CHECK-NEXT: vmv1r.v v14, v8
5005 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, mu
5006 ; CHECK-NEXT: vlseg8e16ff.v v7, (a0), v0.t
5007 ; CHECK-NEXT: csrr a0, vl
5008 ; CHECK-NEXT: sd a0, 0(a2)
5011 %0 = tail call {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} @llvm.riscv.vlseg8ff.mask.nxv4f16(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
5012 %1 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 1
5013 %2 = extractvalue {<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, i64} %0, 8
5014 store i64 %2, ptr %outvl
5015 ret <vscale x 4 x half> %1
5018 declare {<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg2ff.nxv2f16(<vscale x 2 x half>,<vscale x 2 x half>, ptr , i64)
5019 declare {<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg2ff.mask.nxv2f16(<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i1>, i64, i64)
5021 define <vscale x 2 x half> @test_vlseg2ff_nxv2f16(ptr %base, i64 %vl, ptr %outvl) {
5022 ; CHECK-LABEL: test_vlseg2ff_nxv2f16:
5023 ; CHECK: # %bb.0: # %entry
5024 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
5025 ; CHECK-NEXT: vlseg2e16ff.v v7, (a0)
5026 ; CHECK-NEXT: csrr a0, vl
5027 ; CHECK-NEXT: sd a0, 0(a2)
5030 %0 = tail call {<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg2ff.nxv2f16(<vscale x 2 x half> undef, <vscale x 2 x half> undef, ptr %base, i64 %vl)
5031 %1 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 1
5032 %2 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 2
5033 store i64 %2, ptr %outvl
5034 ret <vscale x 2 x half> %1
5037 define <vscale x 2 x half> @test_vlseg2ff_mask_nxv2f16(<vscale x 2 x half> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
5038 ; CHECK-LABEL: test_vlseg2ff_mask_nxv2f16:
5039 ; CHECK: # %bb.0: # %entry
5040 ; CHECK-NEXT: vmv1r.v v7, v8
5041 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu
5042 ; CHECK-NEXT: vlseg2e16ff.v v7, (a0), v0.t
5043 ; CHECK-NEXT: csrr a0, vl
5044 ; CHECK-NEXT: sd a0, 0(a2)
5047 %0 = tail call {<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg2ff.mask.nxv2f16(<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
5048 %1 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 1
5049 %2 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 2
5050 store i64 %2, ptr %outvl
5051 ret <vscale x 2 x half> %1
5054 declare {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg3ff.nxv2f16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr , i64)
5055 declare {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg3ff.mask.nxv2f16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i1>, i64, i64)
5057 define <vscale x 2 x half> @test_vlseg3ff_nxv2f16(ptr %base, i64 %vl, ptr %outvl) {
5058 ; CHECK-LABEL: test_vlseg3ff_nxv2f16:
5059 ; CHECK: # %bb.0: # %entry
5060 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
5061 ; CHECK-NEXT: vlseg3e16ff.v v7, (a0)
5062 ; CHECK-NEXT: csrr a0, vl
5063 ; CHECK-NEXT: sd a0, 0(a2)
5066 %0 = tail call {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg3ff.nxv2f16(<vscale x 2 x half> undef, <vscale x 2 x half> undef, <vscale x 2 x half> undef, ptr %base, i64 %vl)
5067 %1 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 1
5068 %2 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 3
5069 store i64 %2, ptr %outvl
5070 ret <vscale x 2 x half> %1
5073 define <vscale x 2 x half> @test_vlseg3ff_mask_nxv2f16(<vscale x 2 x half> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
5074 ; CHECK-LABEL: test_vlseg3ff_mask_nxv2f16:
5075 ; CHECK: # %bb.0: # %entry
5076 ; CHECK-NEXT: vmv1r.v v7, v8
5077 ; CHECK-NEXT: vmv1r.v v9, v8
5078 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu
5079 ; CHECK-NEXT: vlseg3e16ff.v v7, (a0), v0.t
5080 ; CHECK-NEXT: csrr a0, vl
5081 ; CHECK-NEXT: sd a0, 0(a2)
5084 %0 = tail call {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg3ff.mask.nxv2f16(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
5085 %1 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 1
5086 %2 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 3
5087 store i64 %2, ptr %outvl
5088 ret <vscale x 2 x half> %1
5091 declare {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg4ff.nxv2f16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr , i64)
5092 declare {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg4ff.mask.nxv2f16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i1>, i64, i64)
5094 define <vscale x 2 x half> @test_vlseg4ff_nxv2f16(ptr %base, i64 %vl, ptr %outvl) {
5095 ; CHECK-LABEL: test_vlseg4ff_nxv2f16:
5096 ; CHECK: # %bb.0: # %entry
5097 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
5098 ; CHECK-NEXT: vlseg4e16ff.v v7, (a0)
5099 ; CHECK-NEXT: csrr a0, vl
5100 ; CHECK-NEXT: sd a0, 0(a2)
5103 %0 = tail call {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg4ff.nxv2f16(<vscale x 2 x half> undef, <vscale x 2 x half> undef, <vscale x 2 x half> undef, <vscale x 2 x half> undef, ptr %base, i64 %vl)
5104 %1 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 1
5105 %2 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 4
5106 store i64 %2, ptr %outvl
5107 ret <vscale x 2 x half> %1
5110 define <vscale x 2 x half> @test_vlseg4ff_mask_nxv2f16(<vscale x 2 x half> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
5111 ; CHECK-LABEL: test_vlseg4ff_mask_nxv2f16:
5112 ; CHECK: # %bb.0: # %entry
5113 ; CHECK-NEXT: vmv1r.v v7, v8
5114 ; CHECK-NEXT: vmv1r.v v9, v8
5115 ; CHECK-NEXT: vmv1r.v v10, v8
5116 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu
5117 ; CHECK-NEXT: vlseg4e16ff.v v7, (a0), v0.t
5118 ; CHECK-NEXT: csrr a0, vl
5119 ; CHECK-NEXT: sd a0, 0(a2)
5122 %0 = tail call {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg4ff.mask.nxv2f16(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
5123 %1 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 1
5124 %2 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 4
5125 store i64 %2, ptr %outvl
5126 ret <vscale x 2 x half> %1
5129 declare {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg5ff.nxv2f16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr , i64)
5130 declare {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg5ff.mask.nxv2f16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i1>, i64, i64)
5132 define <vscale x 2 x half> @test_vlseg5ff_nxv2f16(ptr %base, i64 %vl, ptr %outvl) {
5133 ; CHECK-LABEL: test_vlseg5ff_nxv2f16:
5134 ; CHECK: # %bb.0: # %entry
5135 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
5136 ; CHECK-NEXT: vlseg5e16ff.v v7, (a0)
5137 ; CHECK-NEXT: csrr a0, vl
5138 ; CHECK-NEXT: sd a0, 0(a2)
5141 %0 = tail call {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg5ff.nxv2f16(<vscale x 2 x half> undef, <vscale x 2 x half> undef, <vscale x 2 x half> undef, <vscale x 2 x half> undef, <vscale x 2 x half> undef, ptr %base, i64 %vl)
5142 %1 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 1
5143 %2 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 5
5144 store i64 %2, ptr %outvl
5145 ret <vscale x 2 x half> %1
5148 define <vscale x 2 x half> @test_vlseg5ff_mask_nxv2f16(<vscale x 2 x half> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
5149 ; CHECK-LABEL: test_vlseg5ff_mask_nxv2f16:
5150 ; CHECK: # %bb.0: # %entry
5151 ; CHECK-NEXT: vmv1r.v v7, v8
5152 ; CHECK-NEXT: vmv1r.v v9, v8
5153 ; CHECK-NEXT: vmv1r.v v10, v8
5154 ; CHECK-NEXT: vmv1r.v v11, v8
5155 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu
5156 ; CHECK-NEXT: vlseg5e16ff.v v7, (a0), v0.t
5157 ; CHECK-NEXT: csrr a0, vl
5158 ; CHECK-NEXT: sd a0, 0(a2)
5161 %0 = tail call {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg5ff.mask.nxv2f16(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
5162 %1 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 1
5163 %2 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 5
5164 store i64 %2, ptr %outvl
5165 ret <vscale x 2 x half> %1
5168 declare {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg6ff.nxv2f16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr , i64)
5169 declare {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg6ff.mask.nxv2f16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i1>, i64, i64)
5171 define <vscale x 2 x half> @test_vlseg6ff_nxv2f16(ptr %base, i64 %vl, ptr %outvl) {
5172 ; CHECK-LABEL: test_vlseg6ff_nxv2f16:
5173 ; CHECK: # %bb.0: # %entry
5174 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
5175 ; CHECK-NEXT: vlseg6e16ff.v v7, (a0)
5176 ; CHECK-NEXT: csrr a0, vl
5177 ; CHECK-NEXT: sd a0, 0(a2)
5180 %0 = tail call {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg6ff.nxv2f16(<vscale x 2 x half> undef, <vscale x 2 x half> undef, <vscale x 2 x half> undef, <vscale x 2 x half> undef, <vscale x 2 x half> undef, <vscale x 2 x half> undef, ptr %base, i64 %vl)
5181 %1 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 1
5182 %2 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 6
5183 store i64 %2, ptr %outvl
5184 ret <vscale x 2 x half> %1
5187 define <vscale x 2 x half> @test_vlseg6ff_mask_nxv2f16(<vscale x 2 x half> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
5188 ; CHECK-LABEL: test_vlseg6ff_mask_nxv2f16:
5189 ; CHECK: # %bb.0: # %entry
5190 ; CHECK-NEXT: vmv1r.v v7, v8
5191 ; CHECK-NEXT: vmv1r.v v9, v8
5192 ; CHECK-NEXT: vmv1r.v v10, v8
5193 ; CHECK-NEXT: vmv1r.v v11, v8
5194 ; CHECK-NEXT: vmv1r.v v12, v8
5195 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu
5196 ; CHECK-NEXT: vlseg6e16ff.v v7, (a0), v0.t
5197 ; CHECK-NEXT: csrr a0, vl
5198 ; CHECK-NEXT: sd a0, 0(a2)
5201 %0 = tail call {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg6ff.mask.nxv2f16(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
5202 %1 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 1
5203 %2 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 6
5204 store i64 %2, ptr %outvl
5205 ret <vscale x 2 x half> %1
5208 declare {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg7ff.nxv2f16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr , i64)
5209 declare {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg7ff.mask.nxv2f16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i1>, i64, i64)
5211 define <vscale x 2 x half> @test_vlseg7ff_nxv2f16(ptr %base, i64 %vl, ptr %outvl) {
5212 ; CHECK-LABEL: test_vlseg7ff_nxv2f16:
5213 ; CHECK: # %bb.0: # %entry
5214 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
5215 ; CHECK-NEXT: vlseg7e16ff.v v7, (a0)
5216 ; CHECK-NEXT: csrr a0, vl
5217 ; CHECK-NEXT: sd a0, 0(a2)
5220 %0 = tail call {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg7ff.nxv2f16(<vscale x 2 x half> undef, <vscale x 2 x half> undef, <vscale x 2 x half> undef, <vscale x 2 x half> undef, <vscale x 2 x half> undef, <vscale x 2 x half> undef, <vscale x 2 x half> undef, ptr %base, i64 %vl)
5221 %1 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 1
5222 %2 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 7
5223 store i64 %2, ptr %outvl
5224 ret <vscale x 2 x half> %1
5227 define <vscale x 2 x half> @test_vlseg7ff_mask_nxv2f16(<vscale x 2 x half> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
5228 ; CHECK-LABEL: test_vlseg7ff_mask_nxv2f16:
5229 ; CHECK: # %bb.0: # %entry
5230 ; CHECK-NEXT: vmv1r.v v7, v8
5231 ; CHECK-NEXT: vmv1r.v v9, v8
5232 ; CHECK-NEXT: vmv1r.v v10, v8
5233 ; CHECK-NEXT: vmv1r.v v11, v8
5234 ; CHECK-NEXT: vmv1r.v v12, v8
5235 ; CHECK-NEXT: vmv1r.v v13, v8
5236 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu
5237 ; CHECK-NEXT: vlseg7e16ff.v v7, (a0), v0.t
5238 ; CHECK-NEXT: csrr a0, vl
5239 ; CHECK-NEXT: sd a0, 0(a2)
5242 %0 = tail call {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg7ff.mask.nxv2f16(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
5243 %1 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 1
5244 %2 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 7
5245 store i64 %2, ptr %outvl
5246 ret <vscale x 2 x half> %1
5249 declare {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg8ff.nxv2f16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr , i64)
5250 declare {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg8ff.mask.nxv2f16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i1>, i64, i64)
5252 define <vscale x 2 x half> @test_vlseg8ff_nxv2f16(ptr %base, i64 %vl, ptr %outvl) {
5253 ; CHECK-LABEL: test_vlseg8ff_nxv2f16:
5254 ; CHECK: # %bb.0: # %entry
5255 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
5256 ; CHECK-NEXT: vlseg8e16ff.v v7, (a0)
5257 ; CHECK-NEXT: csrr a0, vl
5258 ; CHECK-NEXT: sd a0, 0(a2)
5261 %0 = tail call {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg8ff.nxv2f16(<vscale x 2 x half> undef, <vscale x 2 x half> undef ,<vscale x 2 x half> undef ,<vscale x 2 x half> undef, <vscale x 2 x half> undef ,<vscale x 2 x half> undef, <vscale x 2 x half> undef, <vscale x 2 x half> undef, ptr %base, i64 %vl)
5262 %1 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 1
5263 %2 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 8
5264 store i64 %2, ptr %outvl
5265 ret <vscale x 2 x half> %1
5268 define <vscale x 2 x half> @test_vlseg8ff_mask_nxv2f16(<vscale x 2 x half> %val, ptr %base, i64 %vl, <vscale x 2 x i1> %mask, ptr %outvl) {
5269 ; CHECK-LABEL: test_vlseg8ff_mask_nxv2f16:
5270 ; CHECK: # %bb.0: # %entry
5271 ; CHECK-NEXT: vmv1r.v v7, v8
5272 ; CHECK-NEXT: vmv1r.v v9, v8
5273 ; CHECK-NEXT: vmv1r.v v10, v8
5274 ; CHECK-NEXT: vmv1r.v v11, v8
5275 ; CHECK-NEXT: vmv1r.v v12, v8
5276 ; CHECK-NEXT: vmv1r.v v13, v8
5277 ; CHECK-NEXT: vmv1r.v v14, v8
5278 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, mu
5279 ; CHECK-NEXT: vlseg8e16ff.v v7, (a0), v0.t
5280 ; CHECK-NEXT: csrr a0, vl
5281 ; CHECK-NEXT: sd a0, 0(a2)
5284 %0 = tail call {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} @llvm.riscv.vlseg8ff.mask.nxv2f16(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i1> %mask, i64 %vl, i64 1)
5285 %1 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 1
5286 %2 = extractvalue {<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, i64} %0, 8
5287 store i64 %2, ptr %outvl
5288 ret <vscale x 2 x half> %1
5291 declare {<vscale x 4 x float>,<vscale x 4 x float>, i64} @llvm.riscv.vlseg2ff.nxv4f32(<vscale x 4 x float>,<vscale x 4 x float>, ptr , i64)
5292 declare {<vscale x 4 x float>,<vscale x 4 x float>, i64} @llvm.riscv.vlseg2ff.mask.nxv4f32(<vscale x 4 x float>,<vscale x 4 x float>, ptr, <vscale x 4 x i1>, i64, i64)
5294 define <vscale x 4 x float> @test_vlseg2ff_nxv4f32(ptr %base, i64 %vl, ptr %outvl) {
5295 ; CHECK-LABEL: test_vlseg2ff_nxv4f32:
5296 ; CHECK: # %bb.0: # %entry
5297 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
5298 ; CHECK-NEXT: vlseg2e32ff.v v6, (a0)
5299 ; CHECK-NEXT: csrr a0, vl
5300 ; CHECK-NEXT: sd a0, 0(a2)
5303 %0 = tail call {<vscale x 4 x float>,<vscale x 4 x float>, i64} @llvm.riscv.vlseg2ff.nxv4f32(<vscale x 4 x float> undef, <vscale x 4 x float> undef, ptr %base, i64 %vl)
5304 %1 = extractvalue {<vscale x 4 x float>,<vscale x 4 x float>, i64} %0, 1
5305 %2 = extractvalue {<vscale x 4 x float>,<vscale x 4 x float>, i64} %0, 2
5306 store i64 %2, ptr %outvl
5307 ret <vscale x 4 x float> %1
5310 define <vscale x 4 x float> @test_vlseg2ff_mask_nxv4f32(<vscale x 4 x float> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
5311 ; CHECK-LABEL: test_vlseg2ff_mask_nxv4f32:
5312 ; CHECK: # %bb.0: # %entry
5313 ; CHECK-NEXT: vmv2r.v v6, v8
5314 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu
5315 ; CHECK-NEXT: vlseg2e32ff.v v6, (a0), v0.t
5316 ; CHECK-NEXT: csrr a0, vl
5317 ; CHECK-NEXT: sd a0, 0(a2)
5320 %0 = tail call {<vscale x 4 x float>,<vscale x 4 x float>, i64} @llvm.riscv.vlseg2ff.mask.nxv4f32(<vscale x 4 x float> %val,<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
5321 %1 = extractvalue {<vscale x 4 x float>,<vscale x 4 x float>, i64} %0, 1
5322 %2 = extractvalue {<vscale x 4 x float>,<vscale x 4 x float>, i64} %0, 2
5323 store i64 %2, ptr %outvl
5324 ret <vscale x 4 x float> %1
5327 declare {<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, i64} @llvm.riscv.vlseg3ff.nxv4f32(<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, ptr , i64)
5328 declare {<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, i64} @llvm.riscv.vlseg3ff.mask.nxv4f32(<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, ptr, <vscale x 4 x i1>, i64, i64)
5330 define <vscale x 4 x float> @test_vlseg3ff_nxv4f32(ptr %base, i64 %vl, ptr %outvl) {
5331 ; CHECK-LABEL: test_vlseg3ff_nxv4f32:
5332 ; CHECK: # %bb.0: # %entry
5333 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
5334 ; CHECK-NEXT: vlseg3e32ff.v v6, (a0)
5335 ; CHECK-NEXT: csrr a0, vl
5336 ; CHECK-NEXT: sd a0, 0(a2)
5339 %0 = tail call {<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, i64} @llvm.riscv.vlseg3ff.nxv4f32(<vscale x 4 x float> undef, <vscale x 4 x float> undef, <vscale x 4 x float> undef, ptr %base, i64 %vl)
5340 %1 = extractvalue {<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, i64} %0, 1
5341 %2 = extractvalue {<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, i64} %0, 3
5342 store i64 %2, ptr %outvl
5343 ret <vscale x 4 x float> %1
5346 define <vscale x 4 x float> @test_vlseg3ff_mask_nxv4f32(<vscale x 4 x float> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
5347 ; CHECK-LABEL: test_vlseg3ff_mask_nxv4f32:
5348 ; CHECK: # %bb.0: # %entry
5349 ; CHECK-NEXT: vmv2r.v v6, v8
5350 ; CHECK-NEXT: vmv2r.v v10, v8
5351 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu
5352 ; CHECK-NEXT: vlseg3e32ff.v v6, (a0), v0.t
5353 ; CHECK-NEXT: csrr a0, vl
5354 ; CHECK-NEXT: sd a0, 0(a2)
5357 %0 = tail call {<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, i64} @llvm.riscv.vlseg3ff.mask.nxv4f32(<vscale x 4 x float> %val,<vscale x 4 x float> %val,<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
5358 %1 = extractvalue {<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, i64} %0, 1
5359 %2 = extractvalue {<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, i64} %0, 3
5360 store i64 %2, ptr %outvl
5361 ret <vscale x 4 x float> %1
5364 declare {<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, i64} @llvm.riscv.vlseg4ff.nxv4f32(<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, ptr , i64)
5365 declare {<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, i64} @llvm.riscv.vlseg4ff.mask.nxv4f32(<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, ptr, <vscale x 4 x i1>, i64, i64)
5367 define <vscale x 4 x float> @test_vlseg4ff_nxv4f32(ptr %base, i64 %vl, ptr %outvl) {
5368 ; CHECK-LABEL: test_vlseg4ff_nxv4f32:
5369 ; CHECK: # %bb.0: # %entry
5370 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
5371 ; CHECK-NEXT: vlseg4e32ff.v v6, (a0)
5372 ; CHECK-NEXT: csrr a0, vl
5373 ; CHECK-NEXT: sd a0, 0(a2)
5376 %0 = tail call {<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, i64} @llvm.riscv.vlseg4ff.nxv4f32(<vscale x 4 x float> undef, <vscale x 4 x float> undef, <vscale x 4 x float> undef, <vscale x 4 x float> undef, ptr %base, i64 %vl)
5377 %1 = extractvalue {<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, i64} %0, 1
5378 %2 = extractvalue {<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, i64} %0, 4
5379 store i64 %2, ptr %outvl
5380 ret <vscale x 4 x float> %1
5383 define <vscale x 4 x float> @test_vlseg4ff_mask_nxv4f32(<vscale x 4 x float> %val, ptr %base, i64 %vl, <vscale x 4 x i1> %mask, ptr %outvl) {
5384 ; CHECK-LABEL: test_vlseg4ff_mask_nxv4f32:
5385 ; CHECK: # %bb.0: # %entry
5386 ; CHECK-NEXT: vmv2r.v v6, v8
5387 ; CHECK-NEXT: vmv2r.v v10, v8
5388 ; CHECK-NEXT: vmv2r.v v12, v8
5389 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, mu
5390 ; CHECK-NEXT: vlseg4e32ff.v v6, (a0), v0.t
5391 ; CHECK-NEXT: csrr a0, vl
5392 ; CHECK-NEXT: sd a0, 0(a2)
5395 %0 = tail call {<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, i64} @llvm.riscv.vlseg4ff.mask.nxv4f32(<vscale x 4 x float> %val,<vscale x 4 x float> %val,<vscale x 4 x float> %val,<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i1> %mask, i64 %vl, i64 1)
5396 %1 = extractvalue {<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, i64} %0, 1
5397 %2 = extractvalue {<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, i64} %0, 4
5398 store i64 %2, ptr %outvl
5399 ret <vscale x 4 x float> %1