1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc -mtriple=riscv32 -mattr=+zve64d,+f,+d,+zfh,+zvfh \
3 ; RUN: -verify-machineinstrs < %s | FileCheck %s
5 declare void @llvm.riscv.vsuxseg2.nxv16i16.nxv16i16(<vscale x 16 x i16>,<vscale x 16 x i16>, ptr, <vscale x 16 x i16>, i32)
6 declare void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i16(<vscale x 16 x i16>,<vscale x 16 x i16>, ptr, <vscale x 16 x i16>, <vscale x 16 x i1>, i32)
8 define void @test_vsuxseg2_nxv16i16_nxv16i16(<vscale x 16 x i16> %val, ptr %base, <vscale x 16 x i16> %index, i32 %vl) {
9 ; CHECK-LABEL: test_vsuxseg2_nxv16i16_nxv16i16:
10 ; CHECK: # %bb.0: # %entry
11 ; CHECK-NEXT: vmv4r.v v16, v12
12 ; CHECK-NEXT: vmv4r.v v12, v8
13 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, ma
14 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v16
17 tail call void @llvm.riscv.vsuxseg2.nxv16i16.nxv16i16(<vscale x 16 x i16> %val,<vscale x 16 x i16> %val, ptr %base, <vscale x 16 x i16> %index, i32 %vl)
21 define void @test_vsuxseg2_mask_nxv16i16_nxv16i16(<vscale x 16 x i16> %val, ptr %base, <vscale x 16 x i16> %index, <vscale x 16 x i1> %mask, i32 %vl) {
22 ; CHECK-LABEL: test_vsuxseg2_mask_nxv16i16_nxv16i16:
23 ; CHECK: # %bb.0: # %entry
24 ; CHECK-NEXT: vmv4r.v v16, v12
25 ; CHECK-NEXT: vmv4r.v v12, v8
26 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, ma
27 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v16, v0.t
30 tail call void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i16(<vscale x 16 x i16> %val,<vscale x 16 x i16> %val, ptr %base, <vscale x 16 x i16> %index, <vscale x 16 x i1> %mask, i32 %vl)
34 declare void @llvm.riscv.vsuxseg2.nxv16i16.nxv16i8(<vscale x 16 x i16>,<vscale x 16 x i16>, ptr, <vscale x 16 x i8>, i32)
35 declare void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i8(<vscale x 16 x i16>,<vscale x 16 x i16>, ptr, <vscale x 16 x i8>, <vscale x 16 x i1>, i32)
37 define void @test_vsuxseg2_nxv16i16_nxv16i8(<vscale x 16 x i16> %val, ptr %base, <vscale x 16 x i8> %index, i32 %vl) {
38 ; CHECK-LABEL: test_vsuxseg2_nxv16i16_nxv16i8:
39 ; CHECK: # %bb.0: # %entry
40 ; CHECK-NEXT: vmv2r.v v16, v12
41 ; CHECK-NEXT: vmv4r.v v12, v8
42 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, ma
43 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v16
46 tail call void @llvm.riscv.vsuxseg2.nxv16i16.nxv16i8(<vscale x 16 x i16> %val,<vscale x 16 x i16> %val, ptr %base, <vscale x 16 x i8> %index, i32 %vl)
50 define void @test_vsuxseg2_mask_nxv16i16_nxv16i8(<vscale x 16 x i16> %val, ptr %base, <vscale x 16 x i8> %index, <vscale x 16 x i1> %mask, i32 %vl) {
51 ; CHECK-LABEL: test_vsuxseg2_mask_nxv16i16_nxv16i8:
52 ; CHECK: # %bb.0: # %entry
53 ; CHECK-NEXT: vmv2r.v v16, v12
54 ; CHECK-NEXT: vmv4r.v v12, v8
55 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, ma
56 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v16, v0.t
59 tail call void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i8(<vscale x 16 x i16> %val,<vscale x 16 x i16> %val, ptr %base, <vscale x 16 x i8> %index, <vscale x 16 x i1> %mask, i32 %vl)
63 declare void @llvm.riscv.vsuxseg2.nxv16i16.nxv16i32(<vscale x 16 x i16>,<vscale x 16 x i16>, ptr, <vscale x 16 x i32>, i32)
64 declare void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i32(<vscale x 16 x i16>,<vscale x 16 x i16>, ptr, <vscale x 16 x i32>, <vscale x 16 x i1>, i32)
66 define void @test_vsuxseg2_nxv16i16_nxv16i32(<vscale x 16 x i16> %val, ptr %base, <vscale x 16 x i32> %index, i32 %vl) {
67 ; CHECK-LABEL: test_vsuxseg2_nxv16i16_nxv16i32:
68 ; CHECK: # %bb.0: # %entry
69 ; CHECK-NEXT: vmv4r.v v12, v8
70 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, ma
71 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16
74 tail call void @llvm.riscv.vsuxseg2.nxv16i16.nxv16i32(<vscale x 16 x i16> %val,<vscale x 16 x i16> %val, ptr %base, <vscale x 16 x i32> %index, i32 %vl)
78 define void @test_vsuxseg2_mask_nxv16i16_nxv16i32(<vscale x 16 x i16> %val, ptr %base, <vscale x 16 x i32> %index, <vscale x 16 x i1> %mask, i32 %vl) {
79 ; CHECK-LABEL: test_vsuxseg2_mask_nxv16i16_nxv16i32:
80 ; CHECK: # %bb.0: # %entry
81 ; CHECK-NEXT: vmv4r.v v12, v8
82 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, ma
83 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16, v0.t
86 tail call void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i32(<vscale x 16 x i16> %val,<vscale x 16 x i16> %val, ptr %base, <vscale x 16 x i32> %index, <vscale x 16 x i1> %mask, i32 %vl)
90 declare void @llvm.riscv.vsuxseg2.nxv1i8.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i8>, i32)
91 declare void @llvm.riscv.vsuxseg2.mask.nxv1i8.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
93 define void @test_vsuxseg2_nxv1i8_nxv1i8(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
94 ; CHECK-LABEL: test_vsuxseg2_nxv1i8_nxv1i8:
95 ; CHECK: # %bb.0: # %entry
96 ; CHECK-NEXT: vmv1r.v v10, v9
97 ; CHECK-NEXT: vmv1r.v v9, v8
98 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
99 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10
102 tail call void @llvm.riscv.vsuxseg2.nxv1i8.nxv1i8(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
106 define void @test_vsuxseg2_mask_nxv1i8_nxv1i8(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
107 ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i8_nxv1i8:
108 ; CHECK: # %bb.0: # %entry
109 ; CHECK-NEXT: vmv1r.v v10, v9
110 ; CHECK-NEXT: vmv1r.v v9, v8
111 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
112 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10, v0.t
115 tail call void @llvm.riscv.vsuxseg2.mask.nxv1i8.nxv1i8(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
119 declare void @llvm.riscv.vsuxseg2.nxv1i8.nxv1i32(<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i32>, i32)
120 declare void @llvm.riscv.vsuxseg2.mask.nxv1i8.nxv1i32(<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
122 define void @test_vsuxseg2_nxv1i8_nxv1i32(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
123 ; CHECK-LABEL: test_vsuxseg2_nxv1i8_nxv1i32:
124 ; CHECK: # %bb.0: # %entry
125 ; CHECK-NEXT: vmv1r.v v10, v9
126 ; CHECK-NEXT: vmv1r.v v9, v8
127 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
128 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10
131 tail call void @llvm.riscv.vsuxseg2.nxv1i8.nxv1i32(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
135 define void @test_vsuxseg2_mask_nxv1i8_nxv1i32(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
136 ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i8_nxv1i32:
137 ; CHECK: # %bb.0: # %entry
138 ; CHECK-NEXT: vmv1r.v v10, v9
139 ; CHECK-NEXT: vmv1r.v v9, v8
140 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
141 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10, v0.t
144 tail call void @llvm.riscv.vsuxseg2.mask.nxv1i8.nxv1i32(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
148 declare void @llvm.riscv.vsuxseg2.nxv1i8.nxv1i16(<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i16>, i32)
149 declare void @llvm.riscv.vsuxseg2.mask.nxv1i8.nxv1i16(<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
151 define void @test_vsuxseg2_nxv1i8_nxv1i16(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
152 ; CHECK-LABEL: test_vsuxseg2_nxv1i8_nxv1i16:
153 ; CHECK: # %bb.0: # %entry
154 ; CHECK-NEXT: vmv1r.v v10, v9
155 ; CHECK-NEXT: vmv1r.v v9, v8
156 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
157 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10
160 tail call void @llvm.riscv.vsuxseg2.nxv1i8.nxv1i16(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
164 define void @test_vsuxseg2_mask_nxv1i8_nxv1i16(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
165 ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i8_nxv1i16:
166 ; CHECK: # %bb.0: # %entry
167 ; CHECK-NEXT: vmv1r.v v10, v9
168 ; CHECK-NEXT: vmv1r.v v9, v8
169 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
170 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10, v0.t
173 tail call void @llvm.riscv.vsuxseg2.mask.nxv1i8.nxv1i16(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
177 declare void @llvm.riscv.vsuxseg3.nxv1i8.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i8>, i32)
178 declare void @llvm.riscv.vsuxseg3.mask.nxv1i8.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
180 define void @test_vsuxseg3_nxv1i8_nxv1i8(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
181 ; CHECK-LABEL: test_vsuxseg3_nxv1i8_nxv1i8:
182 ; CHECK: # %bb.0: # %entry
183 ; CHECK-NEXT: vmv1r.v v10, v8
184 ; CHECK-NEXT: vmv1r.v v11, v8
185 ; CHECK-NEXT: vmv1r.v v12, v8
186 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
187 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9
190 tail call void @llvm.riscv.vsuxseg3.nxv1i8.nxv1i8(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
194 define void @test_vsuxseg3_mask_nxv1i8_nxv1i8(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
195 ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i8_nxv1i8:
196 ; CHECK: # %bb.0: # %entry
197 ; CHECK-NEXT: vmv1r.v v10, v8
198 ; CHECK-NEXT: vmv1r.v v11, v8
199 ; CHECK-NEXT: vmv1r.v v12, v8
200 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
201 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t
204 tail call void @llvm.riscv.vsuxseg3.mask.nxv1i8.nxv1i8(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
208 declare void @llvm.riscv.vsuxseg3.nxv1i8.nxv1i32(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i32>, i32)
209 declare void @llvm.riscv.vsuxseg3.mask.nxv1i8.nxv1i32(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
211 define void @test_vsuxseg3_nxv1i8_nxv1i32(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
212 ; CHECK-LABEL: test_vsuxseg3_nxv1i8_nxv1i32:
213 ; CHECK: # %bb.0: # %entry
214 ; CHECK-NEXT: vmv1r.v v10, v8
215 ; CHECK-NEXT: vmv1r.v v11, v8
216 ; CHECK-NEXT: vmv1r.v v12, v8
217 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
218 ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9
221 tail call void @llvm.riscv.vsuxseg3.nxv1i8.nxv1i32(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
225 define void @test_vsuxseg3_mask_nxv1i8_nxv1i32(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
226 ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i8_nxv1i32:
227 ; CHECK: # %bb.0: # %entry
228 ; CHECK-NEXT: vmv1r.v v10, v8
229 ; CHECK-NEXT: vmv1r.v v11, v8
230 ; CHECK-NEXT: vmv1r.v v12, v8
231 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
232 ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t
235 tail call void @llvm.riscv.vsuxseg3.mask.nxv1i8.nxv1i32(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
239 declare void @llvm.riscv.vsuxseg3.nxv1i8.nxv1i16(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i16>, i32)
240 declare void @llvm.riscv.vsuxseg3.mask.nxv1i8.nxv1i16(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
242 define void @test_vsuxseg3_nxv1i8_nxv1i16(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
243 ; CHECK-LABEL: test_vsuxseg3_nxv1i8_nxv1i16:
244 ; CHECK: # %bb.0: # %entry
245 ; CHECK-NEXT: vmv1r.v v10, v8
246 ; CHECK-NEXT: vmv1r.v v11, v8
247 ; CHECK-NEXT: vmv1r.v v12, v8
248 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
249 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9
252 tail call void @llvm.riscv.vsuxseg3.nxv1i8.nxv1i16(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
256 define void @test_vsuxseg3_mask_nxv1i8_nxv1i16(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
257 ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i8_nxv1i16:
258 ; CHECK: # %bb.0: # %entry
259 ; CHECK-NEXT: vmv1r.v v10, v8
260 ; CHECK-NEXT: vmv1r.v v11, v8
261 ; CHECK-NEXT: vmv1r.v v12, v8
262 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
263 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t
266 tail call void @llvm.riscv.vsuxseg3.mask.nxv1i8.nxv1i16(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
270 declare void @llvm.riscv.vsuxseg4.nxv1i8.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i8>, i32)
271 declare void @llvm.riscv.vsuxseg4.mask.nxv1i8.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
273 define void @test_vsuxseg4_nxv1i8_nxv1i8(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
274 ; CHECK-LABEL: test_vsuxseg4_nxv1i8_nxv1i8:
275 ; CHECK: # %bb.0: # %entry
276 ; CHECK-NEXT: vmv1r.v v10, v8
277 ; CHECK-NEXT: vmv1r.v v11, v8
278 ; CHECK-NEXT: vmv1r.v v12, v8
279 ; CHECK-NEXT: vmv1r.v v13, v8
280 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
281 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9
284 tail call void @llvm.riscv.vsuxseg4.nxv1i8.nxv1i8(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
288 define void @test_vsuxseg4_mask_nxv1i8_nxv1i8(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
289 ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i8_nxv1i8:
290 ; CHECK: # %bb.0: # %entry
291 ; CHECK-NEXT: vmv1r.v v10, v8
292 ; CHECK-NEXT: vmv1r.v v11, v8
293 ; CHECK-NEXT: vmv1r.v v12, v8
294 ; CHECK-NEXT: vmv1r.v v13, v8
295 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
296 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t
299 tail call void @llvm.riscv.vsuxseg4.mask.nxv1i8.nxv1i8(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
303 declare void @llvm.riscv.vsuxseg4.nxv1i8.nxv1i32(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i32>, i32)
304 declare void @llvm.riscv.vsuxseg4.mask.nxv1i8.nxv1i32(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
306 define void @test_vsuxseg4_nxv1i8_nxv1i32(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
307 ; CHECK-LABEL: test_vsuxseg4_nxv1i8_nxv1i32:
308 ; CHECK: # %bb.0: # %entry
309 ; CHECK-NEXT: vmv1r.v v10, v8
310 ; CHECK-NEXT: vmv1r.v v11, v8
311 ; CHECK-NEXT: vmv1r.v v12, v8
312 ; CHECK-NEXT: vmv1r.v v13, v8
313 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
314 ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9
317 tail call void @llvm.riscv.vsuxseg4.nxv1i8.nxv1i32(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
321 define void @test_vsuxseg4_mask_nxv1i8_nxv1i32(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
322 ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i8_nxv1i32:
323 ; CHECK: # %bb.0: # %entry
324 ; CHECK-NEXT: vmv1r.v v10, v8
325 ; CHECK-NEXT: vmv1r.v v11, v8
326 ; CHECK-NEXT: vmv1r.v v12, v8
327 ; CHECK-NEXT: vmv1r.v v13, v8
328 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
329 ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t
332 tail call void @llvm.riscv.vsuxseg4.mask.nxv1i8.nxv1i32(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
336 declare void @llvm.riscv.vsuxseg4.nxv1i8.nxv1i16(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i16>, i32)
337 declare void @llvm.riscv.vsuxseg4.mask.nxv1i8.nxv1i16(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
339 define void @test_vsuxseg4_nxv1i8_nxv1i16(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
340 ; CHECK-LABEL: test_vsuxseg4_nxv1i8_nxv1i16:
341 ; CHECK: # %bb.0: # %entry
342 ; CHECK-NEXT: vmv1r.v v10, v8
343 ; CHECK-NEXT: vmv1r.v v11, v8
344 ; CHECK-NEXT: vmv1r.v v12, v8
345 ; CHECK-NEXT: vmv1r.v v13, v8
346 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
347 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9
350 tail call void @llvm.riscv.vsuxseg4.nxv1i8.nxv1i16(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
354 define void @test_vsuxseg4_mask_nxv1i8_nxv1i16(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
355 ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i8_nxv1i16:
356 ; CHECK: # %bb.0: # %entry
357 ; CHECK-NEXT: vmv1r.v v10, v8
358 ; CHECK-NEXT: vmv1r.v v11, v8
359 ; CHECK-NEXT: vmv1r.v v12, v8
360 ; CHECK-NEXT: vmv1r.v v13, v8
361 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
362 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t
365 tail call void @llvm.riscv.vsuxseg4.mask.nxv1i8.nxv1i16(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
369 declare void @llvm.riscv.vsuxseg5.nxv1i8.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i8>, i32)
370 declare void @llvm.riscv.vsuxseg5.mask.nxv1i8.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
372 define void @test_vsuxseg5_nxv1i8_nxv1i8(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
373 ; CHECK-LABEL: test_vsuxseg5_nxv1i8_nxv1i8:
374 ; CHECK: # %bb.0: # %entry
375 ; CHECK-NEXT: vmv1r.v v10, v8
376 ; CHECK-NEXT: vmv1r.v v11, v8
377 ; CHECK-NEXT: vmv1r.v v12, v8
378 ; CHECK-NEXT: vmv1r.v v13, v8
379 ; CHECK-NEXT: vmv1r.v v14, v8
380 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
381 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9
384 tail call void @llvm.riscv.vsuxseg5.nxv1i8.nxv1i8(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
388 define void @test_vsuxseg5_mask_nxv1i8_nxv1i8(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
389 ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i8_nxv1i8:
390 ; CHECK: # %bb.0: # %entry
391 ; CHECK-NEXT: vmv1r.v v10, v8
392 ; CHECK-NEXT: vmv1r.v v11, v8
393 ; CHECK-NEXT: vmv1r.v v12, v8
394 ; CHECK-NEXT: vmv1r.v v13, v8
395 ; CHECK-NEXT: vmv1r.v v14, v8
396 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
397 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t
400 tail call void @llvm.riscv.vsuxseg5.mask.nxv1i8.nxv1i8(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
404 declare void @llvm.riscv.vsuxseg5.nxv1i8.nxv1i32(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i32>, i32)
405 declare void @llvm.riscv.vsuxseg5.mask.nxv1i8.nxv1i32(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
407 define void @test_vsuxseg5_nxv1i8_nxv1i32(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
408 ; CHECK-LABEL: test_vsuxseg5_nxv1i8_nxv1i32:
409 ; CHECK: # %bb.0: # %entry
410 ; CHECK-NEXT: vmv1r.v v10, v8
411 ; CHECK-NEXT: vmv1r.v v11, v8
412 ; CHECK-NEXT: vmv1r.v v12, v8
413 ; CHECK-NEXT: vmv1r.v v13, v8
414 ; CHECK-NEXT: vmv1r.v v14, v8
415 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
416 ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9
419 tail call void @llvm.riscv.vsuxseg5.nxv1i8.nxv1i32(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
423 define void @test_vsuxseg5_mask_nxv1i8_nxv1i32(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
424 ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i8_nxv1i32:
425 ; CHECK: # %bb.0: # %entry
426 ; CHECK-NEXT: vmv1r.v v10, v8
427 ; CHECK-NEXT: vmv1r.v v11, v8
428 ; CHECK-NEXT: vmv1r.v v12, v8
429 ; CHECK-NEXT: vmv1r.v v13, v8
430 ; CHECK-NEXT: vmv1r.v v14, v8
431 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
432 ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t
435 tail call void @llvm.riscv.vsuxseg5.mask.nxv1i8.nxv1i32(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
439 declare void @llvm.riscv.vsuxseg5.nxv1i8.nxv1i16(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i16>, i32)
440 declare void @llvm.riscv.vsuxseg5.mask.nxv1i8.nxv1i16(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
442 define void @test_vsuxseg5_nxv1i8_nxv1i16(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
443 ; CHECK-LABEL: test_vsuxseg5_nxv1i8_nxv1i16:
444 ; CHECK: # %bb.0: # %entry
445 ; CHECK-NEXT: vmv1r.v v10, v8
446 ; CHECK-NEXT: vmv1r.v v11, v8
447 ; CHECK-NEXT: vmv1r.v v12, v8
448 ; CHECK-NEXT: vmv1r.v v13, v8
449 ; CHECK-NEXT: vmv1r.v v14, v8
450 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
451 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9
454 tail call void @llvm.riscv.vsuxseg5.nxv1i8.nxv1i16(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
458 define void @test_vsuxseg5_mask_nxv1i8_nxv1i16(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
459 ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i8_nxv1i16:
460 ; CHECK: # %bb.0: # %entry
461 ; CHECK-NEXT: vmv1r.v v10, v8
462 ; CHECK-NEXT: vmv1r.v v11, v8
463 ; CHECK-NEXT: vmv1r.v v12, v8
464 ; CHECK-NEXT: vmv1r.v v13, v8
465 ; CHECK-NEXT: vmv1r.v v14, v8
466 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
467 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t
470 tail call void @llvm.riscv.vsuxseg5.mask.nxv1i8.nxv1i16(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
474 declare void @llvm.riscv.vsuxseg6.nxv1i8.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i8>, i32)
475 declare void @llvm.riscv.vsuxseg6.mask.nxv1i8.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
477 define void @test_vsuxseg6_nxv1i8_nxv1i8(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
478 ; CHECK-LABEL: test_vsuxseg6_nxv1i8_nxv1i8:
479 ; CHECK: # %bb.0: # %entry
480 ; CHECK-NEXT: vmv1r.v v10, v8
481 ; CHECK-NEXT: vmv1r.v v11, v8
482 ; CHECK-NEXT: vmv1r.v v12, v8
483 ; CHECK-NEXT: vmv1r.v v13, v8
484 ; CHECK-NEXT: vmv1r.v v14, v8
485 ; CHECK-NEXT: vmv1r.v v15, v8
486 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
487 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9
490 tail call void @llvm.riscv.vsuxseg6.nxv1i8.nxv1i8(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
494 define void @test_vsuxseg6_mask_nxv1i8_nxv1i8(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
495 ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i8_nxv1i8:
496 ; CHECK: # %bb.0: # %entry
497 ; CHECK-NEXT: vmv1r.v v10, v8
498 ; CHECK-NEXT: vmv1r.v v11, v8
499 ; CHECK-NEXT: vmv1r.v v12, v8
500 ; CHECK-NEXT: vmv1r.v v13, v8
501 ; CHECK-NEXT: vmv1r.v v14, v8
502 ; CHECK-NEXT: vmv1r.v v15, v8
503 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
504 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t
507 tail call void @llvm.riscv.vsuxseg6.mask.nxv1i8.nxv1i8(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
511 declare void @llvm.riscv.vsuxseg6.nxv1i8.nxv1i32(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i32>, i32)
512 declare void @llvm.riscv.vsuxseg6.mask.nxv1i8.nxv1i32(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
514 define void @test_vsuxseg6_nxv1i8_nxv1i32(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
515 ; CHECK-LABEL: test_vsuxseg6_nxv1i8_nxv1i32:
516 ; CHECK: # %bb.0: # %entry
517 ; CHECK-NEXT: vmv1r.v v10, v8
518 ; CHECK-NEXT: vmv1r.v v11, v8
519 ; CHECK-NEXT: vmv1r.v v12, v8
520 ; CHECK-NEXT: vmv1r.v v13, v8
521 ; CHECK-NEXT: vmv1r.v v14, v8
522 ; CHECK-NEXT: vmv1r.v v15, v8
523 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
524 ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9
527 tail call void @llvm.riscv.vsuxseg6.nxv1i8.nxv1i32(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
531 define void @test_vsuxseg6_mask_nxv1i8_nxv1i32(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
532 ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i8_nxv1i32:
533 ; CHECK: # %bb.0: # %entry
534 ; CHECK-NEXT: vmv1r.v v10, v8
535 ; CHECK-NEXT: vmv1r.v v11, v8
536 ; CHECK-NEXT: vmv1r.v v12, v8
537 ; CHECK-NEXT: vmv1r.v v13, v8
538 ; CHECK-NEXT: vmv1r.v v14, v8
539 ; CHECK-NEXT: vmv1r.v v15, v8
540 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
541 ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t
544 tail call void @llvm.riscv.vsuxseg6.mask.nxv1i8.nxv1i32(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
548 declare void @llvm.riscv.vsuxseg6.nxv1i8.nxv1i16(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i16>, i32)
549 declare void @llvm.riscv.vsuxseg6.mask.nxv1i8.nxv1i16(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
551 define void @test_vsuxseg6_nxv1i8_nxv1i16(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
552 ; CHECK-LABEL: test_vsuxseg6_nxv1i8_nxv1i16:
553 ; CHECK: # %bb.0: # %entry
554 ; CHECK-NEXT: vmv1r.v v10, v8
555 ; CHECK-NEXT: vmv1r.v v11, v8
556 ; CHECK-NEXT: vmv1r.v v12, v8
557 ; CHECK-NEXT: vmv1r.v v13, v8
558 ; CHECK-NEXT: vmv1r.v v14, v8
559 ; CHECK-NEXT: vmv1r.v v15, v8
560 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
561 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9
564 tail call void @llvm.riscv.vsuxseg6.nxv1i8.nxv1i16(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
568 define void @test_vsuxseg6_mask_nxv1i8_nxv1i16(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
569 ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i8_nxv1i16:
570 ; CHECK: # %bb.0: # %entry
571 ; CHECK-NEXT: vmv1r.v v10, v8
572 ; CHECK-NEXT: vmv1r.v v11, v8
573 ; CHECK-NEXT: vmv1r.v v12, v8
574 ; CHECK-NEXT: vmv1r.v v13, v8
575 ; CHECK-NEXT: vmv1r.v v14, v8
576 ; CHECK-NEXT: vmv1r.v v15, v8
577 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
578 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t
581 tail call void @llvm.riscv.vsuxseg6.mask.nxv1i8.nxv1i16(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
585 declare void @llvm.riscv.vsuxseg7.nxv1i8.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i8>, i32)
586 declare void @llvm.riscv.vsuxseg7.mask.nxv1i8.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
588 define void @test_vsuxseg7_nxv1i8_nxv1i8(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
589 ; CHECK-LABEL: test_vsuxseg7_nxv1i8_nxv1i8:
590 ; CHECK: # %bb.0: # %entry
591 ; CHECK-NEXT: vmv1r.v v10, v8
592 ; CHECK-NEXT: vmv1r.v v11, v8
593 ; CHECK-NEXT: vmv1r.v v12, v8
594 ; CHECK-NEXT: vmv1r.v v13, v8
595 ; CHECK-NEXT: vmv1r.v v14, v8
596 ; CHECK-NEXT: vmv1r.v v15, v8
597 ; CHECK-NEXT: vmv1r.v v16, v8
598 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
599 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9
602 tail call void @llvm.riscv.vsuxseg7.nxv1i8.nxv1i8(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
606 define void @test_vsuxseg7_mask_nxv1i8_nxv1i8(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
607 ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i8_nxv1i8:
608 ; CHECK: # %bb.0: # %entry
609 ; CHECK-NEXT: vmv1r.v v10, v8
610 ; CHECK-NEXT: vmv1r.v v11, v8
611 ; CHECK-NEXT: vmv1r.v v12, v8
612 ; CHECK-NEXT: vmv1r.v v13, v8
613 ; CHECK-NEXT: vmv1r.v v14, v8
614 ; CHECK-NEXT: vmv1r.v v15, v8
615 ; CHECK-NEXT: vmv1r.v v16, v8
616 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
617 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t
620 tail call void @llvm.riscv.vsuxseg7.mask.nxv1i8.nxv1i8(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
624 declare void @llvm.riscv.vsuxseg7.nxv1i8.nxv1i32(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i32>, i32)
625 declare void @llvm.riscv.vsuxseg7.mask.nxv1i8.nxv1i32(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
627 define void @test_vsuxseg7_nxv1i8_nxv1i32(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
628 ; CHECK-LABEL: test_vsuxseg7_nxv1i8_nxv1i32:
629 ; CHECK: # %bb.0: # %entry
630 ; CHECK-NEXT: vmv1r.v v10, v8
631 ; CHECK-NEXT: vmv1r.v v11, v8
632 ; CHECK-NEXT: vmv1r.v v12, v8
633 ; CHECK-NEXT: vmv1r.v v13, v8
634 ; CHECK-NEXT: vmv1r.v v14, v8
635 ; CHECK-NEXT: vmv1r.v v15, v8
636 ; CHECK-NEXT: vmv1r.v v16, v8
637 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
638 ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9
641 tail call void @llvm.riscv.vsuxseg7.nxv1i8.nxv1i32(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
645 define void @test_vsuxseg7_mask_nxv1i8_nxv1i32(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
646 ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i8_nxv1i32:
647 ; CHECK: # %bb.0: # %entry
648 ; CHECK-NEXT: vmv1r.v v10, v8
649 ; CHECK-NEXT: vmv1r.v v11, v8
650 ; CHECK-NEXT: vmv1r.v v12, v8
651 ; CHECK-NEXT: vmv1r.v v13, v8
652 ; CHECK-NEXT: vmv1r.v v14, v8
653 ; CHECK-NEXT: vmv1r.v v15, v8
654 ; CHECK-NEXT: vmv1r.v v16, v8
655 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
656 ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t
659 tail call void @llvm.riscv.vsuxseg7.mask.nxv1i8.nxv1i32(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
663 declare void @llvm.riscv.vsuxseg7.nxv1i8.nxv1i16(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i16>, i32)
664 declare void @llvm.riscv.vsuxseg7.mask.nxv1i8.nxv1i16(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
666 define void @test_vsuxseg7_nxv1i8_nxv1i16(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
667 ; CHECK-LABEL: test_vsuxseg7_nxv1i8_nxv1i16:
668 ; CHECK: # %bb.0: # %entry
669 ; CHECK-NEXT: vmv1r.v v10, v8
670 ; CHECK-NEXT: vmv1r.v v11, v8
671 ; CHECK-NEXT: vmv1r.v v12, v8
672 ; CHECK-NEXT: vmv1r.v v13, v8
673 ; CHECK-NEXT: vmv1r.v v14, v8
674 ; CHECK-NEXT: vmv1r.v v15, v8
675 ; CHECK-NEXT: vmv1r.v v16, v8
676 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
677 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9
680 tail call void @llvm.riscv.vsuxseg7.nxv1i8.nxv1i16(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
684 define void @test_vsuxseg7_mask_nxv1i8_nxv1i16(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
685 ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i8_nxv1i16:
686 ; CHECK: # %bb.0: # %entry
687 ; CHECK-NEXT: vmv1r.v v10, v8
688 ; CHECK-NEXT: vmv1r.v v11, v8
689 ; CHECK-NEXT: vmv1r.v v12, v8
690 ; CHECK-NEXT: vmv1r.v v13, v8
691 ; CHECK-NEXT: vmv1r.v v14, v8
692 ; CHECK-NEXT: vmv1r.v v15, v8
693 ; CHECK-NEXT: vmv1r.v v16, v8
694 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
695 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t
698 tail call void @llvm.riscv.vsuxseg7.mask.nxv1i8.nxv1i16(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
702 declare void @llvm.riscv.vsuxseg8.nxv1i8.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i8>, i32)
703 declare void @llvm.riscv.vsuxseg8.mask.nxv1i8.nxv1i8(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
705 define void @test_vsuxseg8_nxv1i8_nxv1i8(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
706 ; CHECK-LABEL: test_vsuxseg8_nxv1i8_nxv1i8:
707 ; CHECK: # %bb.0: # %entry
708 ; CHECK-NEXT: vmv1r.v v10, v8
709 ; CHECK-NEXT: vmv1r.v v11, v8
710 ; CHECK-NEXT: vmv1r.v v12, v8
711 ; CHECK-NEXT: vmv1r.v v13, v8
712 ; CHECK-NEXT: vmv1r.v v14, v8
713 ; CHECK-NEXT: vmv1r.v v15, v8
714 ; CHECK-NEXT: vmv1r.v v16, v8
715 ; CHECK-NEXT: vmv1r.v v17, v8
716 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
717 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9
720 tail call void @llvm.riscv.vsuxseg8.nxv1i8.nxv1i8(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
724 define void @test_vsuxseg8_mask_nxv1i8_nxv1i8(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
725 ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i8_nxv1i8:
726 ; CHECK: # %bb.0: # %entry
727 ; CHECK-NEXT: vmv1r.v v10, v8
728 ; CHECK-NEXT: vmv1r.v v11, v8
729 ; CHECK-NEXT: vmv1r.v v12, v8
730 ; CHECK-NEXT: vmv1r.v v13, v8
731 ; CHECK-NEXT: vmv1r.v v14, v8
732 ; CHECK-NEXT: vmv1r.v v15, v8
733 ; CHECK-NEXT: vmv1r.v v16, v8
734 ; CHECK-NEXT: vmv1r.v v17, v8
735 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
736 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t
739 tail call void @llvm.riscv.vsuxseg8.mask.nxv1i8.nxv1i8(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
743 declare void @llvm.riscv.vsuxseg8.nxv1i8.nxv1i32(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i32>, i32)
744 declare void @llvm.riscv.vsuxseg8.mask.nxv1i8.nxv1i32(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
746 define void @test_vsuxseg8_nxv1i8_nxv1i32(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
747 ; CHECK-LABEL: test_vsuxseg8_nxv1i8_nxv1i32:
748 ; CHECK: # %bb.0: # %entry
749 ; CHECK-NEXT: vmv1r.v v10, v8
750 ; CHECK-NEXT: vmv1r.v v11, v8
751 ; CHECK-NEXT: vmv1r.v v12, v8
752 ; CHECK-NEXT: vmv1r.v v13, v8
753 ; CHECK-NEXT: vmv1r.v v14, v8
754 ; CHECK-NEXT: vmv1r.v v15, v8
755 ; CHECK-NEXT: vmv1r.v v16, v8
756 ; CHECK-NEXT: vmv1r.v v17, v8
757 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
758 ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9
761 tail call void @llvm.riscv.vsuxseg8.nxv1i8.nxv1i32(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
765 define void @test_vsuxseg8_mask_nxv1i8_nxv1i32(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
766 ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i8_nxv1i32:
767 ; CHECK: # %bb.0: # %entry
768 ; CHECK-NEXT: vmv1r.v v10, v8
769 ; CHECK-NEXT: vmv1r.v v11, v8
770 ; CHECK-NEXT: vmv1r.v v12, v8
771 ; CHECK-NEXT: vmv1r.v v13, v8
772 ; CHECK-NEXT: vmv1r.v v14, v8
773 ; CHECK-NEXT: vmv1r.v v15, v8
774 ; CHECK-NEXT: vmv1r.v v16, v8
775 ; CHECK-NEXT: vmv1r.v v17, v8
776 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
777 ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t
780 tail call void @llvm.riscv.vsuxseg8.mask.nxv1i8.nxv1i32(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
784 declare void @llvm.riscv.vsuxseg8.nxv1i8.nxv1i16(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i16>, i32)
785 declare void @llvm.riscv.vsuxseg8.mask.nxv1i8.nxv1i16(<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>,<vscale x 1 x i8>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
787 define void @test_vsuxseg8_nxv1i8_nxv1i16(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
788 ; CHECK-LABEL: test_vsuxseg8_nxv1i8_nxv1i16:
789 ; CHECK: # %bb.0: # %entry
790 ; CHECK-NEXT: vmv1r.v v10, v8
791 ; CHECK-NEXT: vmv1r.v v11, v8
792 ; CHECK-NEXT: vmv1r.v v12, v8
793 ; CHECK-NEXT: vmv1r.v v13, v8
794 ; CHECK-NEXT: vmv1r.v v14, v8
795 ; CHECK-NEXT: vmv1r.v v15, v8
796 ; CHECK-NEXT: vmv1r.v v16, v8
797 ; CHECK-NEXT: vmv1r.v v17, v8
798 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
799 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9
802 tail call void @llvm.riscv.vsuxseg8.nxv1i8.nxv1i16(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
806 define void @test_vsuxseg8_mask_nxv1i8_nxv1i16(<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
807 ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i8_nxv1i16:
808 ; CHECK: # %bb.0: # %entry
809 ; CHECK-NEXT: vmv1r.v v10, v8
810 ; CHECK-NEXT: vmv1r.v v11, v8
811 ; CHECK-NEXT: vmv1r.v v12, v8
812 ; CHECK-NEXT: vmv1r.v v13, v8
813 ; CHECK-NEXT: vmv1r.v v14, v8
814 ; CHECK-NEXT: vmv1r.v v15, v8
815 ; CHECK-NEXT: vmv1r.v v16, v8
816 ; CHECK-NEXT: vmv1r.v v17, v8
817 ; CHECK-NEXT: vsetvli zero, a1, e8, mf8, ta, ma
818 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t
821 tail call void @llvm.riscv.vsuxseg8.mask.nxv1i8.nxv1i16(<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val,<vscale x 1 x i8> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
825 declare void @llvm.riscv.vsuxseg2.nxv16i8.nxv16i16(<vscale x 16 x i8>,<vscale x 16 x i8>, ptr, <vscale x 16 x i16>, i32)
826 declare void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i16(<vscale x 16 x i8>,<vscale x 16 x i8>, ptr, <vscale x 16 x i16>, <vscale x 16 x i1>, i32)
828 define void @test_vsuxseg2_nxv16i8_nxv16i16(<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i16> %index, i32 %vl) {
829 ; CHECK-LABEL: test_vsuxseg2_nxv16i8_nxv16i16:
830 ; CHECK: # %bb.0: # %entry
831 ; CHECK-NEXT: vmv2r.v v10, v8
832 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, ma
833 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v12
836 tail call void @llvm.riscv.vsuxseg2.nxv16i8.nxv16i16(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i16> %index, i32 %vl)
840 define void @test_vsuxseg2_mask_nxv16i8_nxv16i16(<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i16> %index, <vscale x 16 x i1> %mask, i32 %vl) {
841 ; CHECK-LABEL: test_vsuxseg2_mask_nxv16i8_nxv16i16:
842 ; CHECK: # %bb.0: # %entry
843 ; CHECK-NEXT: vmv2r.v v10, v8
844 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, ma
845 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v12, v0.t
848 tail call void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i16(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i16> %index, <vscale x 16 x i1> %mask, i32 %vl)
852 declare void @llvm.riscv.vsuxseg2.nxv16i8.nxv16i8(<vscale x 16 x i8>,<vscale x 16 x i8>, ptr, <vscale x 16 x i8>, i32)
853 declare void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i8(<vscale x 16 x i8>,<vscale x 16 x i8>, ptr, <vscale x 16 x i8>, <vscale x 16 x i1>, i32)
855 define void @test_vsuxseg2_nxv16i8_nxv16i8(<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i8> %index, i32 %vl) {
856 ; CHECK-LABEL: test_vsuxseg2_nxv16i8_nxv16i8:
857 ; CHECK: # %bb.0: # %entry
858 ; CHECK-NEXT: vmv2r.v v12, v10
859 ; CHECK-NEXT: vmv2r.v v10, v8
860 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, ma
861 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v12
864 tail call void @llvm.riscv.vsuxseg2.nxv16i8.nxv16i8(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i8> %index, i32 %vl)
868 define void @test_vsuxseg2_mask_nxv16i8_nxv16i8(<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i8> %index, <vscale x 16 x i1> %mask, i32 %vl) {
869 ; CHECK-LABEL: test_vsuxseg2_mask_nxv16i8_nxv16i8:
870 ; CHECK: # %bb.0: # %entry
871 ; CHECK-NEXT: vmv2r.v v12, v10
872 ; CHECK-NEXT: vmv2r.v v10, v8
873 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, ma
874 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v12, v0.t
877 tail call void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i8(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i8> %index, <vscale x 16 x i1> %mask, i32 %vl)
881 declare void @llvm.riscv.vsuxseg2.nxv16i8.nxv16i32(<vscale x 16 x i8>,<vscale x 16 x i8>, ptr, <vscale x 16 x i32>, i32)
882 declare void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i32(<vscale x 16 x i8>,<vscale x 16 x i8>, ptr, <vscale x 16 x i32>, <vscale x 16 x i1>, i32)
884 define void @test_vsuxseg2_nxv16i8_nxv16i32(<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i32> %index, i32 %vl) {
885 ; CHECK-LABEL: test_vsuxseg2_nxv16i8_nxv16i32:
886 ; CHECK: # %bb.0: # %entry
887 ; CHECK-NEXT: vmv2r.v v10, v8
888 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, ma
889 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16
892 tail call void @llvm.riscv.vsuxseg2.nxv16i8.nxv16i32(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i32> %index, i32 %vl)
896 define void @test_vsuxseg2_mask_nxv16i8_nxv16i32(<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i32> %index, <vscale x 16 x i1> %mask, i32 %vl) {
897 ; CHECK-LABEL: test_vsuxseg2_mask_nxv16i8_nxv16i32:
898 ; CHECK: # %bb.0: # %entry
899 ; CHECK-NEXT: vmv2r.v v10, v8
900 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, ma
901 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16, v0.t
904 tail call void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i32(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i32> %index, <vscale x 16 x i1> %mask, i32 %vl)
908 declare void @llvm.riscv.vsuxseg3.nxv16i8.nxv16i16(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, ptr, <vscale x 16 x i16>, i32)
909 declare void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i16(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, ptr, <vscale x 16 x i16>, <vscale x 16 x i1>, i32)
911 define void @test_vsuxseg3_nxv16i8_nxv16i16(<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i16> %index, i32 %vl) {
912 ; CHECK-LABEL: test_vsuxseg3_nxv16i8_nxv16i16:
913 ; CHECK: # %bb.0: # %entry
914 ; CHECK-NEXT: vmv2r.v v10, v8
915 ; CHECK-NEXT: vmv4r.v v16, v12
916 ; CHECK-NEXT: vmv2r.v v12, v8
917 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, ma
918 ; CHECK-NEXT: vsuxseg3ei16.v v8, (a0), v16
921 tail call void @llvm.riscv.vsuxseg3.nxv16i8.nxv16i16(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i16> %index, i32 %vl)
925 define void @test_vsuxseg3_mask_nxv16i8_nxv16i16(<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i16> %index, <vscale x 16 x i1> %mask, i32 %vl) {
926 ; CHECK-LABEL: test_vsuxseg3_mask_nxv16i8_nxv16i16:
927 ; CHECK: # %bb.0: # %entry
928 ; CHECK-NEXT: vmv2r.v v10, v8
929 ; CHECK-NEXT: vmv4r.v v16, v12
930 ; CHECK-NEXT: vmv2r.v v12, v8
931 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, ma
932 ; CHECK-NEXT: vsuxseg3ei16.v v8, (a0), v16, v0.t
935 tail call void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i16(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i16> %index, <vscale x 16 x i1> %mask, i32 %vl)
939 declare void @llvm.riscv.vsuxseg3.nxv16i8.nxv16i8(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, ptr, <vscale x 16 x i8>, i32)
940 declare void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i8(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, ptr, <vscale x 16 x i8>, <vscale x 16 x i1>, i32)
942 define void @test_vsuxseg3_nxv16i8_nxv16i8(<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i8> %index, i32 %vl) {
943 ; CHECK-LABEL: test_vsuxseg3_nxv16i8_nxv16i8:
944 ; CHECK: # %bb.0: # %entry
945 ; CHECK-NEXT: vmv2r.v v12, v8
946 ; CHECK-NEXT: vmv2r.v v14, v8
947 ; CHECK-NEXT: vmv2r.v v16, v8
948 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, ma
949 ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10
952 tail call void @llvm.riscv.vsuxseg3.nxv16i8.nxv16i8(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i8> %index, i32 %vl)
956 define void @test_vsuxseg3_mask_nxv16i8_nxv16i8(<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i8> %index, <vscale x 16 x i1> %mask, i32 %vl) {
957 ; CHECK-LABEL: test_vsuxseg3_mask_nxv16i8_nxv16i8:
958 ; CHECK: # %bb.0: # %entry
959 ; CHECK-NEXT: vmv2r.v v12, v8
960 ; CHECK-NEXT: vmv2r.v v14, v8
961 ; CHECK-NEXT: vmv2r.v v16, v8
962 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, ma
963 ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10, v0.t
966 tail call void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i8(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i8> %index, <vscale x 16 x i1> %mask, i32 %vl)
970 declare void @llvm.riscv.vsuxseg3.nxv16i8.nxv16i32(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, ptr, <vscale x 16 x i32>, i32)
971 declare void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i32(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, ptr, <vscale x 16 x i32>, <vscale x 16 x i1>, i32)
973 define void @test_vsuxseg3_nxv16i8_nxv16i32(<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i32> %index, i32 %vl) {
974 ; CHECK-LABEL: test_vsuxseg3_nxv16i8_nxv16i32:
975 ; CHECK: # %bb.0: # %entry
976 ; CHECK-NEXT: vmv2r.v v10, v8
977 ; CHECK-NEXT: vmv2r.v v12, v8
978 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, ma
979 ; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v16
982 tail call void @llvm.riscv.vsuxseg3.nxv16i8.nxv16i32(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i32> %index, i32 %vl)
986 define void @test_vsuxseg3_mask_nxv16i8_nxv16i32(<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i32> %index, <vscale x 16 x i1> %mask, i32 %vl) {
987 ; CHECK-LABEL: test_vsuxseg3_mask_nxv16i8_nxv16i32:
988 ; CHECK: # %bb.0: # %entry
989 ; CHECK-NEXT: vmv2r.v v10, v8
990 ; CHECK-NEXT: vmv2r.v v12, v8
991 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, ma
992 ; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v16, v0.t
995 tail call void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i32(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i32> %index, <vscale x 16 x i1> %mask, i32 %vl)
999 declare void @llvm.riscv.vsuxseg4.nxv16i8.nxv16i16(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, ptr, <vscale x 16 x i16>, i32)
1000 declare void @llvm.riscv.vsuxseg4.mask.nxv16i8.nxv16i16(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, ptr, <vscale x 16 x i16>, <vscale x 16 x i1>, i32)
1002 define void @test_vsuxseg4_nxv16i8_nxv16i16(<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i16> %index, i32 %vl) {
1003 ; CHECK-LABEL: test_vsuxseg4_nxv16i8_nxv16i16:
1004 ; CHECK: # %bb.0: # %entry
1005 ; CHECK-NEXT: vmv2r.v v16, v8
1006 ; CHECK-NEXT: vmv2r.v v18, v8
1007 ; CHECK-NEXT: vmv2r.v v20, v8
1008 ; CHECK-NEXT: vmv2r.v v22, v8
1009 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, ma
1010 ; CHECK-NEXT: vsuxseg4ei16.v v16, (a0), v12
1013 tail call void @llvm.riscv.vsuxseg4.nxv16i8.nxv16i16(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i16> %index, i32 %vl)
1017 define void @test_vsuxseg4_mask_nxv16i8_nxv16i16(<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i16> %index, <vscale x 16 x i1> %mask, i32 %vl) {
1018 ; CHECK-LABEL: test_vsuxseg4_mask_nxv16i8_nxv16i16:
1019 ; CHECK: # %bb.0: # %entry
1020 ; CHECK-NEXT: vmv2r.v v16, v8
1021 ; CHECK-NEXT: vmv2r.v v18, v8
1022 ; CHECK-NEXT: vmv2r.v v20, v8
1023 ; CHECK-NEXT: vmv2r.v v22, v8
1024 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, ma
1025 ; CHECK-NEXT: vsuxseg4ei16.v v16, (a0), v12, v0.t
1028 tail call void @llvm.riscv.vsuxseg4.mask.nxv16i8.nxv16i16(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i16> %index, <vscale x 16 x i1> %mask, i32 %vl)
1032 declare void @llvm.riscv.vsuxseg4.nxv16i8.nxv16i8(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, ptr, <vscale x 16 x i8>, i32)
1033 declare void @llvm.riscv.vsuxseg4.mask.nxv16i8.nxv16i8(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, ptr, <vscale x 16 x i8>, <vscale x 16 x i1>, i32)
1035 define void @test_vsuxseg4_nxv16i8_nxv16i8(<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i8> %index, i32 %vl) {
1036 ; CHECK-LABEL: test_vsuxseg4_nxv16i8_nxv16i8:
1037 ; CHECK: # %bb.0: # %entry
1038 ; CHECK-NEXT: vmv2r.v v12, v8
1039 ; CHECK-NEXT: vmv2r.v v14, v8
1040 ; CHECK-NEXT: vmv2r.v v16, v8
1041 ; CHECK-NEXT: vmv2r.v v18, v8
1042 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, ma
1043 ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10
1046 tail call void @llvm.riscv.vsuxseg4.nxv16i8.nxv16i8(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i8> %index, i32 %vl)
1050 define void @test_vsuxseg4_mask_nxv16i8_nxv16i8(<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i8> %index, <vscale x 16 x i1> %mask, i32 %vl) {
1051 ; CHECK-LABEL: test_vsuxseg4_mask_nxv16i8_nxv16i8:
1052 ; CHECK: # %bb.0: # %entry
1053 ; CHECK-NEXT: vmv2r.v v12, v8
1054 ; CHECK-NEXT: vmv2r.v v14, v8
1055 ; CHECK-NEXT: vmv2r.v v16, v8
1056 ; CHECK-NEXT: vmv2r.v v18, v8
1057 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, ma
1058 ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10, v0.t
1061 tail call void @llvm.riscv.vsuxseg4.mask.nxv16i8.nxv16i8(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i8> %index, <vscale x 16 x i1> %mask, i32 %vl)
1065 declare void @llvm.riscv.vsuxseg4.nxv16i8.nxv16i32(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, ptr, <vscale x 16 x i32>, i32)
1066 declare void @llvm.riscv.vsuxseg4.mask.nxv16i8.nxv16i32(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, ptr, <vscale x 16 x i32>, <vscale x 16 x i1>, i32)
1068 define void @test_vsuxseg4_nxv16i8_nxv16i32(<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i32> %index, i32 %vl) {
1069 ; CHECK-LABEL: test_vsuxseg4_nxv16i8_nxv16i32:
1070 ; CHECK: # %bb.0: # %entry
1071 ; CHECK-NEXT: vmv2r.v v10, v8
1072 ; CHECK-NEXT: vmv2r.v v12, v8
1073 ; CHECK-NEXT: vmv2r.v v14, v8
1074 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, ma
1075 ; CHECK-NEXT: vsuxseg4ei32.v v8, (a0), v16
1078 tail call void @llvm.riscv.vsuxseg4.nxv16i8.nxv16i32(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i32> %index, i32 %vl)
1082 define void @test_vsuxseg4_mask_nxv16i8_nxv16i32(<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i32> %index, <vscale x 16 x i1> %mask, i32 %vl) {
1083 ; CHECK-LABEL: test_vsuxseg4_mask_nxv16i8_nxv16i32:
1084 ; CHECK: # %bb.0: # %entry
1085 ; CHECK-NEXT: vmv2r.v v10, v8
1086 ; CHECK-NEXT: vmv2r.v v12, v8
1087 ; CHECK-NEXT: vmv2r.v v14, v8
1088 ; CHECK-NEXT: vsetvli zero, a1, e8, m2, ta, ma
1089 ; CHECK-NEXT: vsuxseg4ei32.v v8, (a0), v16, v0.t
1092 tail call void @llvm.riscv.vsuxseg4.mask.nxv16i8.nxv16i32(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, ptr %base, <vscale x 16 x i32> %index, <vscale x 16 x i1> %mask, i32 %vl)
1096 declare void @llvm.riscv.vsuxseg2.nxv2i32.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i32>, i32)
1097 declare void @llvm.riscv.vsuxseg2.mask.nxv2i32.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
1099 define void @test_vsuxseg2_nxv2i32_nxv2i32(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
1100 ; CHECK-LABEL: test_vsuxseg2_nxv2i32_nxv2i32:
1101 ; CHECK: # %bb.0: # %entry
1102 ; CHECK-NEXT: vmv1r.v v10, v9
1103 ; CHECK-NEXT: vmv1r.v v9, v8
1104 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1105 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10
1108 tail call void @llvm.riscv.vsuxseg2.nxv2i32.nxv2i32(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
1112 define void @test_vsuxseg2_mask_nxv2i32_nxv2i32(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
1113 ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i32_nxv2i32:
1114 ; CHECK: # %bb.0: # %entry
1115 ; CHECK-NEXT: vmv1r.v v10, v9
1116 ; CHECK-NEXT: vmv1r.v v9, v8
1117 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1118 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10, v0.t
1121 tail call void @llvm.riscv.vsuxseg2.mask.nxv2i32.nxv2i32(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
1125 declare void @llvm.riscv.vsuxseg2.nxv2i32.nxv2i8(<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i8>, i32)
1126 declare void @llvm.riscv.vsuxseg2.mask.nxv2i32.nxv2i8(<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
1128 define void @test_vsuxseg2_nxv2i32_nxv2i8(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
1129 ; CHECK-LABEL: test_vsuxseg2_nxv2i32_nxv2i8:
1130 ; CHECK: # %bb.0: # %entry
1131 ; CHECK-NEXT: vmv1r.v v10, v9
1132 ; CHECK-NEXT: vmv1r.v v9, v8
1133 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1134 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10
1137 tail call void @llvm.riscv.vsuxseg2.nxv2i32.nxv2i8(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
1141 define void @test_vsuxseg2_mask_nxv2i32_nxv2i8(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
1142 ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i32_nxv2i8:
1143 ; CHECK: # %bb.0: # %entry
1144 ; CHECK-NEXT: vmv1r.v v10, v9
1145 ; CHECK-NEXT: vmv1r.v v9, v8
1146 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1147 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10, v0.t
1150 tail call void @llvm.riscv.vsuxseg2.mask.nxv2i32.nxv2i8(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
1154 declare void @llvm.riscv.vsuxseg2.nxv2i32.nxv2i16(<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i16>, i32)
1155 declare void @llvm.riscv.vsuxseg2.mask.nxv2i32.nxv2i16(<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
1157 define void @test_vsuxseg2_nxv2i32_nxv2i16(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
1158 ; CHECK-LABEL: test_vsuxseg2_nxv2i32_nxv2i16:
1159 ; CHECK: # %bb.0: # %entry
1160 ; CHECK-NEXT: vmv1r.v v10, v9
1161 ; CHECK-NEXT: vmv1r.v v9, v8
1162 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1163 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10
1166 tail call void @llvm.riscv.vsuxseg2.nxv2i32.nxv2i16(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
1170 define void @test_vsuxseg2_mask_nxv2i32_nxv2i16(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
1171 ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i32_nxv2i16:
1172 ; CHECK: # %bb.0: # %entry
1173 ; CHECK-NEXT: vmv1r.v v10, v9
1174 ; CHECK-NEXT: vmv1r.v v9, v8
1175 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1176 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10, v0.t
1179 tail call void @llvm.riscv.vsuxseg2.mask.nxv2i32.nxv2i16(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
1183 declare void @llvm.riscv.vsuxseg3.nxv2i32.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i32>, i32)
1184 declare void @llvm.riscv.vsuxseg3.mask.nxv2i32.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
1186 define void @test_vsuxseg3_nxv2i32_nxv2i32(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
1187 ; CHECK-LABEL: test_vsuxseg3_nxv2i32_nxv2i32:
1188 ; CHECK: # %bb.0: # %entry
1189 ; CHECK-NEXT: vmv1r.v v10, v8
1190 ; CHECK-NEXT: vmv1r.v v11, v8
1191 ; CHECK-NEXT: vmv1r.v v12, v8
1192 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1193 ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9
1196 tail call void @llvm.riscv.vsuxseg3.nxv2i32.nxv2i32(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
1200 define void @test_vsuxseg3_mask_nxv2i32_nxv2i32(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
1201 ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i32_nxv2i32:
1202 ; CHECK: # %bb.0: # %entry
1203 ; CHECK-NEXT: vmv1r.v v10, v8
1204 ; CHECK-NEXT: vmv1r.v v11, v8
1205 ; CHECK-NEXT: vmv1r.v v12, v8
1206 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1207 ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t
1210 tail call void @llvm.riscv.vsuxseg3.mask.nxv2i32.nxv2i32(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
1214 declare void @llvm.riscv.vsuxseg3.nxv2i32.nxv2i8(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i8>, i32)
1215 declare void @llvm.riscv.vsuxseg3.mask.nxv2i32.nxv2i8(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
1217 define void @test_vsuxseg3_nxv2i32_nxv2i8(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
1218 ; CHECK-LABEL: test_vsuxseg3_nxv2i32_nxv2i8:
1219 ; CHECK: # %bb.0: # %entry
1220 ; CHECK-NEXT: vmv1r.v v10, v8
1221 ; CHECK-NEXT: vmv1r.v v11, v8
1222 ; CHECK-NEXT: vmv1r.v v12, v8
1223 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1224 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9
1227 tail call void @llvm.riscv.vsuxseg3.nxv2i32.nxv2i8(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
1231 define void @test_vsuxseg3_mask_nxv2i32_nxv2i8(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
1232 ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i32_nxv2i8:
1233 ; CHECK: # %bb.0: # %entry
1234 ; CHECK-NEXT: vmv1r.v v10, v8
1235 ; CHECK-NEXT: vmv1r.v v11, v8
1236 ; CHECK-NEXT: vmv1r.v v12, v8
1237 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1238 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t
1241 tail call void @llvm.riscv.vsuxseg3.mask.nxv2i32.nxv2i8(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
1245 declare void @llvm.riscv.vsuxseg3.nxv2i32.nxv2i16(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i16>, i32)
1246 declare void @llvm.riscv.vsuxseg3.mask.nxv2i32.nxv2i16(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
1248 define void @test_vsuxseg3_nxv2i32_nxv2i16(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
1249 ; CHECK-LABEL: test_vsuxseg3_nxv2i32_nxv2i16:
1250 ; CHECK: # %bb.0: # %entry
1251 ; CHECK-NEXT: vmv1r.v v10, v8
1252 ; CHECK-NEXT: vmv1r.v v11, v8
1253 ; CHECK-NEXT: vmv1r.v v12, v8
1254 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1255 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9
1258 tail call void @llvm.riscv.vsuxseg3.nxv2i32.nxv2i16(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
1262 define void @test_vsuxseg3_mask_nxv2i32_nxv2i16(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
1263 ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i32_nxv2i16:
1264 ; CHECK: # %bb.0: # %entry
1265 ; CHECK-NEXT: vmv1r.v v10, v8
1266 ; CHECK-NEXT: vmv1r.v v11, v8
1267 ; CHECK-NEXT: vmv1r.v v12, v8
1268 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1269 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t
1272 tail call void @llvm.riscv.vsuxseg3.mask.nxv2i32.nxv2i16(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
1276 declare void @llvm.riscv.vsuxseg4.nxv2i32.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i32>, i32)
1277 declare void @llvm.riscv.vsuxseg4.mask.nxv2i32.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
1279 define void @test_vsuxseg4_nxv2i32_nxv2i32(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
1280 ; CHECK-LABEL: test_vsuxseg4_nxv2i32_nxv2i32:
1281 ; CHECK: # %bb.0: # %entry
1282 ; CHECK-NEXT: vmv1r.v v10, v8
1283 ; CHECK-NEXT: vmv1r.v v11, v8
1284 ; CHECK-NEXT: vmv1r.v v12, v8
1285 ; CHECK-NEXT: vmv1r.v v13, v8
1286 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1287 ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9
1290 tail call void @llvm.riscv.vsuxseg4.nxv2i32.nxv2i32(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
1294 define void @test_vsuxseg4_mask_nxv2i32_nxv2i32(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
1295 ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i32_nxv2i32:
1296 ; CHECK: # %bb.0: # %entry
1297 ; CHECK-NEXT: vmv1r.v v10, v8
1298 ; CHECK-NEXT: vmv1r.v v11, v8
1299 ; CHECK-NEXT: vmv1r.v v12, v8
1300 ; CHECK-NEXT: vmv1r.v v13, v8
1301 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1302 ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t
1305 tail call void @llvm.riscv.vsuxseg4.mask.nxv2i32.nxv2i32(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
1309 declare void @llvm.riscv.vsuxseg4.nxv2i32.nxv2i8(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i8>, i32)
1310 declare void @llvm.riscv.vsuxseg4.mask.nxv2i32.nxv2i8(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
1312 define void @test_vsuxseg4_nxv2i32_nxv2i8(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
1313 ; CHECK-LABEL: test_vsuxseg4_nxv2i32_nxv2i8:
1314 ; CHECK: # %bb.0: # %entry
1315 ; CHECK-NEXT: vmv1r.v v10, v8
1316 ; CHECK-NEXT: vmv1r.v v11, v8
1317 ; CHECK-NEXT: vmv1r.v v12, v8
1318 ; CHECK-NEXT: vmv1r.v v13, v8
1319 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1320 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9
1323 tail call void @llvm.riscv.vsuxseg4.nxv2i32.nxv2i8(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
1327 define void @test_vsuxseg4_mask_nxv2i32_nxv2i8(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
1328 ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i32_nxv2i8:
1329 ; CHECK: # %bb.0: # %entry
1330 ; CHECK-NEXT: vmv1r.v v10, v8
1331 ; CHECK-NEXT: vmv1r.v v11, v8
1332 ; CHECK-NEXT: vmv1r.v v12, v8
1333 ; CHECK-NEXT: vmv1r.v v13, v8
1334 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1335 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t
1338 tail call void @llvm.riscv.vsuxseg4.mask.nxv2i32.nxv2i8(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
1342 declare void @llvm.riscv.vsuxseg4.nxv2i32.nxv2i16(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i16>, i32)
1343 declare void @llvm.riscv.vsuxseg4.mask.nxv2i32.nxv2i16(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
1345 define void @test_vsuxseg4_nxv2i32_nxv2i16(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
1346 ; CHECK-LABEL: test_vsuxseg4_nxv2i32_nxv2i16:
1347 ; CHECK: # %bb.0: # %entry
1348 ; CHECK-NEXT: vmv1r.v v10, v8
1349 ; CHECK-NEXT: vmv1r.v v11, v8
1350 ; CHECK-NEXT: vmv1r.v v12, v8
1351 ; CHECK-NEXT: vmv1r.v v13, v8
1352 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1353 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9
1356 tail call void @llvm.riscv.vsuxseg4.nxv2i32.nxv2i16(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
1360 define void @test_vsuxseg4_mask_nxv2i32_nxv2i16(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
1361 ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i32_nxv2i16:
1362 ; CHECK: # %bb.0: # %entry
1363 ; CHECK-NEXT: vmv1r.v v10, v8
1364 ; CHECK-NEXT: vmv1r.v v11, v8
1365 ; CHECK-NEXT: vmv1r.v v12, v8
1366 ; CHECK-NEXT: vmv1r.v v13, v8
1367 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1368 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t
1371 tail call void @llvm.riscv.vsuxseg4.mask.nxv2i32.nxv2i16(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
1375 declare void @llvm.riscv.vsuxseg5.nxv2i32.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i32>, i32)
1376 declare void @llvm.riscv.vsuxseg5.mask.nxv2i32.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
1378 define void @test_vsuxseg5_nxv2i32_nxv2i32(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
1379 ; CHECK-LABEL: test_vsuxseg5_nxv2i32_nxv2i32:
1380 ; CHECK: # %bb.0: # %entry
1381 ; CHECK-NEXT: vmv1r.v v10, v8
1382 ; CHECK-NEXT: vmv1r.v v11, v8
1383 ; CHECK-NEXT: vmv1r.v v12, v8
1384 ; CHECK-NEXT: vmv1r.v v13, v8
1385 ; CHECK-NEXT: vmv1r.v v14, v8
1386 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1387 ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9
1390 tail call void @llvm.riscv.vsuxseg5.nxv2i32.nxv2i32(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
1394 define void @test_vsuxseg5_mask_nxv2i32_nxv2i32(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
1395 ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i32_nxv2i32:
1396 ; CHECK: # %bb.0: # %entry
1397 ; CHECK-NEXT: vmv1r.v v10, v8
1398 ; CHECK-NEXT: vmv1r.v v11, v8
1399 ; CHECK-NEXT: vmv1r.v v12, v8
1400 ; CHECK-NEXT: vmv1r.v v13, v8
1401 ; CHECK-NEXT: vmv1r.v v14, v8
1402 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1403 ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t
1406 tail call void @llvm.riscv.vsuxseg5.mask.nxv2i32.nxv2i32(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
1410 declare void @llvm.riscv.vsuxseg5.nxv2i32.nxv2i8(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i8>, i32)
1411 declare void @llvm.riscv.vsuxseg5.mask.nxv2i32.nxv2i8(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
1413 define void @test_vsuxseg5_nxv2i32_nxv2i8(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
1414 ; CHECK-LABEL: test_vsuxseg5_nxv2i32_nxv2i8:
1415 ; CHECK: # %bb.0: # %entry
1416 ; CHECK-NEXT: vmv1r.v v10, v8
1417 ; CHECK-NEXT: vmv1r.v v11, v8
1418 ; CHECK-NEXT: vmv1r.v v12, v8
1419 ; CHECK-NEXT: vmv1r.v v13, v8
1420 ; CHECK-NEXT: vmv1r.v v14, v8
1421 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1422 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9
1425 tail call void @llvm.riscv.vsuxseg5.nxv2i32.nxv2i8(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
1429 define void @test_vsuxseg5_mask_nxv2i32_nxv2i8(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
1430 ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i32_nxv2i8:
1431 ; CHECK: # %bb.0: # %entry
1432 ; CHECK-NEXT: vmv1r.v v10, v8
1433 ; CHECK-NEXT: vmv1r.v v11, v8
1434 ; CHECK-NEXT: vmv1r.v v12, v8
1435 ; CHECK-NEXT: vmv1r.v v13, v8
1436 ; CHECK-NEXT: vmv1r.v v14, v8
1437 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1438 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t
1441 tail call void @llvm.riscv.vsuxseg5.mask.nxv2i32.nxv2i8(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
1445 declare void @llvm.riscv.vsuxseg5.nxv2i32.nxv2i16(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i16>, i32)
1446 declare void @llvm.riscv.vsuxseg5.mask.nxv2i32.nxv2i16(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
1448 define void @test_vsuxseg5_nxv2i32_nxv2i16(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
1449 ; CHECK-LABEL: test_vsuxseg5_nxv2i32_nxv2i16:
1450 ; CHECK: # %bb.0: # %entry
1451 ; CHECK-NEXT: vmv1r.v v10, v8
1452 ; CHECK-NEXT: vmv1r.v v11, v8
1453 ; CHECK-NEXT: vmv1r.v v12, v8
1454 ; CHECK-NEXT: vmv1r.v v13, v8
1455 ; CHECK-NEXT: vmv1r.v v14, v8
1456 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1457 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9
1460 tail call void @llvm.riscv.vsuxseg5.nxv2i32.nxv2i16(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
1464 define void @test_vsuxseg5_mask_nxv2i32_nxv2i16(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
1465 ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i32_nxv2i16:
1466 ; CHECK: # %bb.0: # %entry
1467 ; CHECK-NEXT: vmv1r.v v10, v8
1468 ; CHECK-NEXT: vmv1r.v v11, v8
1469 ; CHECK-NEXT: vmv1r.v v12, v8
1470 ; CHECK-NEXT: vmv1r.v v13, v8
1471 ; CHECK-NEXT: vmv1r.v v14, v8
1472 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1473 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t
1476 tail call void @llvm.riscv.vsuxseg5.mask.nxv2i32.nxv2i16(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
1480 declare void @llvm.riscv.vsuxseg6.nxv2i32.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i32>, i32)
1481 declare void @llvm.riscv.vsuxseg6.mask.nxv2i32.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
1483 define void @test_vsuxseg6_nxv2i32_nxv2i32(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
1484 ; CHECK-LABEL: test_vsuxseg6_nxv2i32_nxv2i32:
1485 ; CHECK: # %bb.0: # %entry
1486 ; CHECK-NEXT: vmv1r.v v10, v8
1487 ; CHECK-NEXT: vmv1r.v v11, v8
1488 ; CHECK-NEXT: vmv1r.v v12, v8
1489 ; CHECK-NEXT: vmv1r.v v13, v8
1490 ; CHECK-NEXT: vmv1r.v v14, v8
1491 ; CHECK-NEXT: vmv1r.v v15, v8
1492 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1493 ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9
1496 tail call void @llvm.riscv.vsuxseg6.nxv2i32.nxv2i32(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
1500 define void @test_vsuxseg6_mask_nxv2i32_nxv2i32(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
1501 ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i32_nxv2i32:
1502 ; CHECK: # %bb.0: # %entry
1503 ; CHECK-NEXT: vmv1r.v v10, v8
1504 ; CHECK-NEXT: vmv1r.v v11, v8
1505 ; CHECK-NEXT: vmv1r.v v12, v8
1506 ; CHECK-NEXT: vmv1r.v v13, v8
1507 ; CHECK-NEXT: vmv1r.v v14, v8
1508 ; CHECK-NEXT: vmv1r.v v15, v8
1509 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1510 ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t
1513 tail call void @llvm.riscv.vsuxseg6.mask.nxv2i32.nxv2i32(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
1517 declare void @llvm.riscv.vsuxseg6.nxv2i32.nxv2i8(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i8>, i32)
1518 declare void @llvm.riscv.vsuxseg6.mask.nxv2i32.nxv2i8(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
1520 define void @test_vsuxseg6_nxv2i32_nxv2i8(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
1521 ; CHECK-LABEL: test_vsuxseg6_nxv2i32_nxv2i8:
1522 ; CHECK: # %bb.0: # %entry
1523 ; CHECK-NEXT: vmv1r.v v10, v8
1524 ; CHECK-NEXT: vmv1r.v v11, v8
1525 ; CHECK-NEXT: vmv1r.v v12, v8
1526 ; CHECK-NEXT: vmv1r.v v13, v8
1527 ; CHECK-NEXT: vmv1r.v v14, v8
1528 ; CHECK-NEXT: vmv1r.v v15, v8
1529 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1530 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9
1533 tail call void @llvm.riscv.vsuxseg6.nxv2i32.nxv2i8(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
1537 define void @test_vsuxseg6_mask_nxv2i32_nxv2i8(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
1538 ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i32_nxv2i8:
1539 ; CHECK: # %bb.0: # %entry
1540 ; CHECK-NEXT: vmv1r.v v10, v8
1541 ; CHECK-NEXT: vmv1r.v v11, v8
1542 ; CHECK-NEXT: vmv1r.v v12, v8
1543 ; CHECK-NEXT: vmv1r.v v13, v8
1544 ; CHECK-NEXT: vmv1r.v v14, v8
1545 ; CHECK-NEXT: vmv1r.v v15, v8
1546 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1547 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t
1550 tail call void @llvm.riscv.vsuxseg6.mask.nxv2i32.nxv2i8(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
1554 declare void @llvm.riscv.vsuxseg6.nxv2i32.nxv2i16(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i16>, i32)
1555 declare void @llvm.riscv.vsuxseg6.mask.nxv2i32.nxv2i16(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
1557 define void @test_vsuxseg6_nxv2i32_nxv2i16(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
1558 ; CHECK-LABEL: test_vsuxseg6_nxv2i32_nxv2i16:
1559 ; CHECK: # %bb.0: # %entry
1560 ; CHECK-NEXT: vmv1r.v v10, v8
1561 ; CHECK-NEXT: vmv1r.v v11, v8
1562 ; CHECK-NEXT: vmv1r.v v12, v8
1563 ; CHECK-NEXT: vmv1r.v v13, v8
1564 ; CHECK-NEXT: vmv1r.v v14, v8
1565 ; CHECK-NEXT: vmv1r.v v15, v8
1566 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1567 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9
1570 tail call void @llvm.riscv.vsuxseg6.nxv2i32.nxv2i16(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
1574 define void @test_vsuxseg6_mask_nxv2i32_nxv2i16(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
1575 ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i32_nxv2i16:
1576 ; CHECK: # %bb.0: # %entry
1577 ; CHECK-NEXT: vmv1r.v v10, v8
1578 ; CHECK-NEXT: vmv1r.v v11, v8
1579 ; CHECK-NEXT: vmv1r.v v12, v8
1580 ; CHECK-NEXT: vmv1r.v v13, v8
1581 ; CHECK-NEXT: vmv1r.v v14, v8
1582 ; CHECK-NEXT: vmv1r.v v15, v8
1583 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1584 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t
1587 tail call void @llvm.riscv.vsuxseg6.mask.nxv2i32.nxv2i16(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
1591 declare void @llvm.riscv.vsuxseg7.nxv2i32.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i32>, i32)
1592 declare void @llvm.riscv.vsuxseg7.mask.nxv2i32.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
1594 define void @test_vsuxseg7_nxv2i32_nxv2i32(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
1595 ; CHECK-LABEL: test_vsuxseg7_nxv2i32_nxv2i32:
1596 ; CHECK: # %bb.0: # %entry
1597 ; CHECK-NEXT: vmv1r.v v10, v8
1598 ; CHECK-NEXT: vmv1r.v v11, v8
1599 ; CHECK-NEXT: vmv1r.v v12, v8
1600 ; CHECK-NEXT: vmv1r.v v13, v8
1601 ; CHECK-NEXT: vmv1r.v v14, v8
1602 ; CHECK-NEXT: vmv1r.v v15, v8
1603 ; CHECK-NEXT: vmv1r.v v16, v8
1604 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1605 ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9
1608 tail call void @llvm.riscv.vsuxseg7.nxv2i32.nxv2i32(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
1612 define void @test_vsuxseg7_mask_nxv2i32_nxv2i32(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
1613 ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i32_nxv2i32:
1614 ; CHECK: # %bb.0: # %entry
1615 ; CHECK-NEXT: vmv1r.v v10, v8
1616 ; CHECK-NEXT: vmv1r.v v11, v8
1617 ; CHECK-NEXT: vmv1r.v v12, v8
1618 ; CHECK-NEXT: vmv1r.v v13, v8
1619 ; CHECK-NEXT: vmv1r.v v14, v8
1620 ; CHECK-NEXT: vmv1r.v v15, v8
1621 ; CHECK-NEXT: vmv1r.v v16, v8
1622 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1623 ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t
1626 tail call void @llvm.riscv.vsuxseg7.mask.nxv2i32.nxv2i32(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
1630 declare void @llvm.riscv.vsuxseg7.nxv2i32.nxv2i8(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i8>, i32)
1631 declare void @llvm.riscv.vsuxseg7.mask.nxv2i32.nxv2i8(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
1633 define void @test_vsuxseg7_nxv2i32_nxv2i8(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
1634 ; CHECK-LABEL: test_vsuxseg7_nxv2i32_nxv2i8:
1635 ; CHECK: # %bb.0: # %entry
1636 ; CHECK-NEXT: vmv1r.v v10, v8
1637 ; CHECK-NEXT: vmv1r.v v11, v8
1638 ; CHECK-NEXT: vmv1r.v v12, v8
1639 ; CHECK-NEXT: vmv1r.v v13, v8
1640 ; CHECK-NEXT: vmv1r.v v14, v8
1641 ; CHECK-NEXT: vmv1r.v v15, v8
1642 ; CHECK-NEXT: vmv1r.v v16, v8
1643 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1644 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9
1647 tail call void @llvm.riscv.vsuxseg7.nxv2i32.nxv2i8(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
1651 define void @test_vsuxseg7_mask_nxv2i32_nxv2i8(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
1652 ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i32_nxv2i8:
1653 ; CHECK: # %bb.0: # %entry
1654 ; CHECK-NEXT: vmv1r.v v10, v8
1655 ; CHECK-NEXT: vmv1r.v v11, v8
1656 ; CHECK-NEXT: vmv1r.v v12, v8
1657 ; CHECK-NEXT: vmv1r.v v13, v8
1658 ; CHECK-NEXT: vmv1r.v v14, v8
1659 ; CHECK-NEXT: vmv1r.v v15, v8
1660 ; CHECK-NEXT: vmv1r.v v16, v8
1661 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1662 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t
1665 tail call void @llvm.riscv.vsuxseg7.mask.nxv2i32.nxv2i8(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
1669 declare void @llvm.riscv.vsuxseg7.nxv2i32.nxv2i16(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i16>, i32)
1670 declare void @llvm.riscv.vsuxseg7.mask.nxv2i32.nxv2i16(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
1672 define void @test_vsuxseg7_nxv2i32_nxv2i16(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
1673 ; CHECK-LABEL: test_vsuxseg7_nxv2i32_nxv2i16:
1674 ; CHECK: # %bb.0: # %entry
1675 ; CHECK-NEXT: vmv1r.v v10, v8
1676 ; CHECK-NEXT: vmv1r.v v11, v8
1677 ; CHECK-NEXT: vmv1r.v v12, v8
1678 ; CHECK-NEXT: vmv1r.v v13, v8
1679 ; CHECK-NEXT: vmv1r.v v14, v8
1680 ; CHECK-NEXT: vmv1r.v v15, v8
1681 ; CHECK-NEXT: vmv1r.v v16, v8
1682 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1683 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9
1686 tail call void @llvm.riscv.vsuxseg7.nxv2i32.nxv2i16(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
1690 define void @test_vsuxseg7_mask_nxv2i32_nxv2i16(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
1691 ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i32_nxv2i16:
1692 ; CHECK: # %bb.0: # %entry
1693 ; CHECK-NEXT: vmv1r.v v10, v8
1694 ; CHECK-NEXT: vmv1r.v v11, v8
1695 ; CHECK-NEXT: vmv1r.v v12, v8
1696 ; CHECK-NEXT: vmv1r.v v13, v8
1697 ; CHECK-NEXT: vmv1r.v v14, v8
1698 ; CHECK-NEXT: vmv1r.v v15, v8
1699 ; CHECK-NEXT: vmv1r.v v16, v8
1700 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1701 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t
1704 tail call void @llvm.riscv.vsuxseg7.mask.nxv2i32.nxv2i16(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
1708 declare void @llvm.riscv.vsuxseg8.nxv2i32.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i32>, i32)
1709 declare void @llvm.riscv.vsuxseg8.mask.nxv2i32.nxv2i32(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
1711 define void @test_vsuxseg8_nxv2i32_nxv2i32(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
1712 ; CHECK-LABEL: test_vsuxseg8_nxv2i32_nxv2i32:
1713 ; CHECK: # %bb.0: # %entry
1714 ; CHECK-NEXT: vmv1r.v v10, v8
1715 ; CHECK-NEXT: vmv1r.v v11, v8
1716 ; CHECK-NEXT: vmv1r.v v12, v8
1717 ; CHECK-NEXT: vmv1r.v v13, v8
1718 ; CHECK-NEXT: vmv1r.v v14, v8
1719 ; CHECK-NEXT: vmv1r.v v15, v8
1720 ; CHECK-NEXT: vmv1r.v v16, v8
1721 ; CHECK-NEXT: vmv1r.v v17, v8
1722 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1723 ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9
1726 tail call void @llvm.riscv.vsuxseg8.nxv2i32.nxv2i32(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
1730 define void @test_vsuxseg8_mask_nxv2i32_nxv2i32(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
1731 ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i32_nxv2i32:
1732 ; CHECK: # %bb.0: # %entry
1733 ; CHECK-NEXT: vmv1r.v v10, v8
1734 ; CHECK-NEXT: vmv1r.v v11, v8
1735 ; CHECK-NEXT: vmv1r.v v12, v8
1736 ; CHECK-NEXT: vmv1r.v v13, v8
1737 ; CHECK-NEXT: vmv1r.v v14, v8
1738 ; CHECK-NEXT: vmv1r.v v15, v8
1739 ; CHECK-NEXT: vmv1r.v v16, v8
1740 ; CHECK-NEXT: vmv1r.v v17, v8
1741 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1742 ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t
1745 tail call void @llvm.riscv.vsuxseg8.mask.nxv2i32.nxv2i32(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
1749 declare void @llvm.riscv.vsuxseg8.nxv2i32.nxv2i8(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i8>, i32)
1750 declare void @llvm.riscv.vsuxseg8.mask.nxv2i32.nxv2i8(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
1752 define void @test_vsuxseg8_nxv2i32_nxv2i8(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
1753 ; CHECK-LABEL: test_vsuxseg8_nxv2i32_nxv2i8:
1754 ; CHECK: # %bb.0: # %entry
1755 ; CHECK-NEXT: vmv1r.v v10, v8
1756 ; CHECK-NEXT: vmv1r.v v11, v8
1757 ; CHECK-NEXT: vmv1r.v v12, v8
1758 ; CHECK-NEXT: vmv1r.v v13, v8
1759 ; CHECK-NEXT: vmv1r.v v14, v8
1760 ; CHECK-NEXT: vmv1r.v v15, v8
1761 ; CHECK-NEXT: vmv1r.v v16, v8
1762 ; CHECK-NEXT: vmv1r.v v17, v8
1763 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1764 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9
1767 tail call void @llvm.riscv.vsuxseg8.nxv2i32.nxv2i8(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
1771 define void @test_vsuxseg8_mask_nxv2i32_nxv2i8(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
1772 ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i32_nxv2i8:
1773 ; CHECK: # %bb.0: # %entry
1774 ; CHECK-NEXT: vmv1r.v v10, v8
1775 ; CHECK-NEXT: vmv1r.v v11, v8
1776 ; CHECK-NEXT: vmv1r.v v12, v8
1777 ; CHECK-NEXT: vmv1r.v v13, v8
1778 ; CHECK-NEXT: vmv1r.v v14, v8
1779 ; CHECK-NEXT: vmv1r.v v15, v8
1780 ; CHECK-NEXT: vmv1r.v v16, v8
1781 ; CHECK-NEXT: vmv1r.v v17, v8
1782 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1783 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t
1786 tail call void @llvm.riscv.vsuxseg8.mask.nxv2i32.nxv2i8(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
1790 declare void @llvm.riscv.vsuxseg8.nxv2i32.nxv2i16(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i16>, i32)
1791 declare void @llvm.riscv.vsuxseg8.mask.nxv2i32.nxv2i16(<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>,<vscale x 2 x i32>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
1793 define void @test_vsuxseg8_nxv2i32_nxv2i16(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
1794 ; CHECK-LABEL: test_vsuxseg8_nxv2i32_nxv2i16:
1795 ; CHECK: # %bb.0: # %entry
1796 ; CHECK-NEXT: vmv1r.v v10, v8
1797 ; CHECK-NEXT: vmv1r.v v11, v8
1798 ; CHECK-NEXT: vmv1r.v v12, v8
1799 ; CHECK-NEXT: vmv1r.v v13, v8
1800 ; CHECK-NEXT: vmv1r.v v14, v8
1801 ; CHECK-NEXT: vmv1r.v v15, v8
1802 ; CHECK-NEXT: vmv1r.v v16, v8
1803 ; CHECK-NEXT: vmv1r.v v17, v8
1804 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1805 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9
1808 tail call void @llvm.riscv.vsuxseg8.nxv2i32.nxv2i16(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
1812 define void @test_vsuxseg8_mask_nxv2i32_nxv2i16(<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
1813 ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i32_nxv2i16:
1814 ; CHECK: # %bb.0: # %entry
1815 ; CHECK-NEXT: vmv1r.v v10, v8
1816 ; CHECK-NEXT: vmv1r.v v11, v8
1817 ; CHECK-NEXT: vmv1r.v v12, v8
1818 ; CHECK-NEXT: vmv1r.v v13, v8
1819 ; CHECK-NEXT: vmv1r.v v14, v8
1820 ; CHECK-NEXT: vmv1r.v v15, v8
1821 ; CHECK-NEXT: vmv1r.v v16, v8
1822 ; CHECK-NEXT: vmv1r.v v17, v8
1823 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
1824 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t
1827 tail call void @llvm.riscv.vsuxseg8.mask.nxv2i32.nxv2i16(<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val,<vscale x 2 x i32> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
1831 declare void @llvm.riscv.vsuxseg2.nxv4i16.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i16>, i32)
1832 declare void @llvm.riscv.vsuxseg2.mask.nxv4i16.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
1834 define void @test_vsuxseg2_nxv4i16_nxv4i16(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
1835 ; CHECK-LABEL: test_vsuxseg2_nxv4i16_nxv4i16:
1836 ; CHECK: # %bb.0: # %entry
1837 ; CHECK-NEXT: vmv1r.v v10, v9
1838 ; CHECK-NEXT: vmv1r.v v9, v8
1839 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
1840 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10
1843 tail call void @llvm.riscv.vsuxseg2.nxv4i16.nxv4i16(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
1847 define void @test_vsuxseg2_mask_nxv4i16_nxv4i16(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
1848 ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i16_nxv4i16:
1849 ; CHECK: # %bb.0: # %entry
1850 ; CHECK-NEXT: vmv1r.v v10, v9
1851 ; CHECK-NEXT: vmv1r.v v9, v8
1852 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
1853 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10, v0.t
1856 tail call void @llvm.riscv.vsuxseg2.mask.nxv4i16.nxv4i16(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
1860 declare void @llvm.riscv.vsuxseg2.nxv4i16.nxv4i8(<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i8>, i32)
1861 declare void @llvm.riscv.vsuxseg2.mask.nxv4i16.nxv4i8(<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
1863 define void @test_vsuxseg2_nxv4i16_nxv4i8(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
1864 ; CHECK-LABEL: test_vsuxseg2_nxv4i16_nxv4i8:
1865 ; CHECK: # %bb.0: # %entry
1866 ; CHECK-NEXT: vmv1r.v v10, v9
1867 ; CHECK-NEXT: vmv1r.v v9, v8
1868 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
1869 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10
1872 tail call void @llvm.riscv.vsuxseg2.nxv4i16.nxv4i8(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
1876 define void @test_vsuxseg2_mask_nxv4i16_nxv4i8(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
1877 ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i16_nxv4i8:
1878 ; CHECK: # %bb.0: # %entry
1879 ; CHECK-NEXT: vmv1r.v v10, v9
1880 ; CHECK-NEXT: vmv1r.v v9, v8
1881 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
1882 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10, v0.t
1885 tail call void @llvm.riscv.vsuxseg2.mask.nxv4i16.nxv4i8(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
1889 declare void @llvm.riscv.vsuxseg2.nxv4i16.nxv4i32(<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i32>, i32)
1890 declare void @llvm.riscv.vsuxseg2.mask.nxv4i16.nxv4i32(<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
1892 define void @test_vsuxseg2_nxv4i16_nxv4i32(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
1893 ; CHECK-LABEL: test_vsuxseg2_nxv4i16_nxv4i32:
1894 ; CHECK: # %bb.0: # %entry
1895 ; CHECK-NEXT: vmv1r.v v9, v8
1896 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
1897 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10
1900 tail call void @llvm.riscv.vsuxseg2.nxv4i16.nxv4i32(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
1904 define void @test_vsuxseg2_mask_nxv4i16_nxv4i32(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
1905 ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i16_nxv4i32:
1906 ; CHECK: # %bb.0: # %entry
1907 ; CHECK-NEXT: vmv1r.v v9, v8
1908 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
1909 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10, v0.t
1912 tail call void @llvm.riscv.vsuxseg2.mask.nxv4i16.nxv4i32(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
1916 declare void @llvm.riscv.vsuxseg3.nxv4i16.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i16>, i32)
1917 declare void @llvm.riscv.vsuxseg3.mask.nxv4i16.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
1919 define void @test_vsuxseg3_nxv4i16_nxv4i16(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
1920 ; CHECK-LABEL: test_vsuxseg3_nxv4i16_nxv4i16:
1921 ; CHECK: # %bb.0: # %entry
1922 ; CHECK-NEXT: vmv1r.v v10, v8
1923 ; CHECK-NEXT: vmv1r.v v11, v8
1924 ; CHECK-NEXT: vmv1r.v v12, v8
1925 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
1926 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9
1929 tail call void @llvm.riscv.vsuxseg3.nxv4i16.nxv4i16(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
1933 define void @test_vsuxseg3_mask_nxv4i16_nxv4i16(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
1934 ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i16_nxv4i16:
1935 ; CHECK: # %bb.0: # %entry
1936 ; CHECK-NEXT: vmv1r.v v10, v8
1937 ; CHECK-NEXT: vmv1r.v v11, v8
1938 ; CHECK-NEXT: vmv1r.v v12, v8
1939 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
1940 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t
1943 tail call void @llvm.riscv.vsuxseg3.mask.nxv4i16.nxv4i16(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
1947 declare void @llvm.riscv.vsuxseg3.nxv4i16.nxv4i8(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i8>, i32)
1948 declare void @llvm.riscv.vsuxseg3.mask.nxv4i16.nxv4i8(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
1950 define void @test_vsuxseg3_nxv4i16_nxv4i8(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
1951 ; CHECK-LABEL: test_vsuxseg3_nxv4i16_nxv4i8:
1952 ; CHECK: # %bb.0: # %entry
1953 ; CHECK-NEXT: vmv1r.v v10, v8
1954 ; CHECK-NEXT: vmv1r.v v11, v8
1955 ; CHECK-NEXT: vmv1r.v v12, v8
1956 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
1957 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9
1960 tail call void @llvm.riscv.vsuxseg3.nxv4i16.nxv4i8(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
1964 define void @test_vsuxseg3_mask_nxv4i16_nxv4i8(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
1965 ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i16_nxv4i8:
1966 ; CHECK: # %bb.0: # %entry
1967 ; CHECK-NEXT: vmv1r.v v10, v8
1968 ; CHECK-NEXT: vmv1r.v v11, v8
1969 ; CHECK-NEXT: vmv1r.v v12, v8
1970 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
1971 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t
1974 tail call void @llvm.riscv.vsuxseg3.mask.nxv4i16.nxv4i8(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
1978 declare void @llvm.riscv.vsuxseg3.nxv4i16.nxv4i32(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i32>, i32)
1979 declare void @llvm.riscv.vsuxseg3.mask.nxv4i16.nxv4i32(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
1981 define void @test_vsuxseg3_nxv4i16_nxv4i32(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
1982 ; CHECK-LABEL: test_vsuxseg3_nxv4i16_nxv4i32:
1983 ; CHECK: # %bb.0: # %entry
1984 ; CHECK-NEXT: vmv1r.v v9, v8
1985 ; CHECK-NEXT: vmv2r.v v12, v10
1986 ; CHECK-NEXT: vmv1r.v v10, v8
1987 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
1988 ; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v12
1991 tail call void @llvm.riscv.vsuxseg3.nxv4i16.nxv4i32(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
1995 define void @test_vsuxseg3_mask_nxv4i16_nxv4i32(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
1996 ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i16_nxv4i32:
1997 ; CHECK: # %bb.0: # %entry
1998 ; CHECK-NEXT: vmv1r.v v9, v8
1999 ; CHECK-NEXT: vmv2r.v v12, v10
2000 ; CHECK-NEXT: vmv1r.v v10, v8
2001 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2002 ; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v12, v0.t
2005 tail call void @llvm.riscv.vsuxseg3.mask.nxv4i16.nxv4i32(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
2009 declare void @llvm.riscv.vsuxseg4.nxv4i16.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i16>, i32)
2010 declare void @llvm.riscv.vsuxseg4.mask.nxv4i16.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
2012 define void @test_vsuxseg4_nxv4i16_nxv4i16(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
2013 ; CHECK-LABEL: test_vsuxseg4_nxv4i16_nxv4i16:
2014 ; CHECK: # %bb.0: # %entry
2015 ; CHECK-NEXT: vmv1r.v v10, v8
2016 ; CHECK-NEXT: vmv1r.v v11, v8
2017 ; CHECK-NEXT: vmv1r.v v12, v8
2018 ; CHECK-NEXT: vmv1r.v v13, v8
2019 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2020 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9
2023 tail call void @llvm.riscv.vsuxseg4.nxv4i16.nxv4i16(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
2027 define void @test_vsuxseg4_mask_nxv4i16_nxv4i16(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
2028 ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i16_nxv4i16:
2029 ; CHECK: # %bb.0: # %entry
2030 ; CHECK-NEXT: vmv1r.v v10, v8
2031 ; CHECK-NEXT: vmv1r.v v11, v8
2032 ; CHECK-NEXT: vmv1r.v v12, v8
2033 ; CHECK-NEXT: vmv1r.v v13, v8
2034 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2035 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t
2038 tail call void @llvm.riscv.vsuxseg4.mask.nxv4i16.nxv4i16(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
2042 declare void @llvm.riscv.vsuxseg4.nxv4i16.nxv4i8(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i8>, i32)
2043 declare void @llvm.riscv.vsuxseg4.mask.nxv4i16.nxv4i8(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
2045 define void @test_vsuxseg4_nxv4i16_nxv4i8(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
2046 ; CHECK-LABEL: test_vsuxseg4_nxv4i16_nxv4i8:
2047 ; CHECK: # %bb.0: # %entry
2048 ; CHECK-NEXT: vmv1r.v v10, v8
2049 ; CHECK-NEXT: vmv1r.v v11, v8
2050 ; CHECK-NEXT: vmv1r.v v12, v8
2051 ; CHECK-NEXT: vmv1r.v v13, v8
2052 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2053 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9
2056 tail call void @llvm.riscv.vsuxseg4.nxv4i16.nxv4i8(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
2060 define void @test_vsuxseg4_mask_nxv4i16_nxv4i8(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
2061 ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i16_nxv4i8:
2062 ; CHECK: # %bb.0: # %entry
2063 ; CHECK-NEXT: vmv1r.v v10, v8
2064 ; CHECK-NEXT: vmv1r.v v11, v8
2065 ; CHECK-NEXT: vmv1r.v v12, v8
2066 ; CHECK-NEXT: vmv1r.v v13, v8
2067 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2068 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t
2071 tail call void @llvm.riscv.vsuxseg4.mask.nxv4i16.nxv4i8(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
2075 declare void @llvm.riscv.vsuxseg4.nxv4i16.nxv4i32(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i32>, i32)
2076 declare void @llvm.riscv.vsuxseg4.mask.nxv4i16.nxv4i32(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
2078 define void @test_vsuxseg4_nxv4i16_nxv4i32(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
2079 ; CHECK-LABEL: test_vsuxseg4_nxv4i16_nxv4i32:
2080 ; CHECK: # %bb.0: # %entry
2081 ; CHECK-NEXT: vmv1r.v v12, v8
2082 ; CHECK-NEXT: vmv1r.v v13, v8
2083 ; CHECK-NEXT: vmv1r.v v14, v8
2084 ; CHECK-NEXT: vmv1r.v v15, v8
2085 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2086 ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10
2089 tail call void @llvm.riscv.vsuxseg4.nxv4i16.nxv4i32(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
2093 define void @test_vsuxseg4_mask_nxv4i16_nxv4i32(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
2094 ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i16_nxv4i32:
2095 ; CHECK: # %bb.0: # %entry
2096 ; CHECK-NEXT: vmv1r.v v12, v8
2097 ; CHECK-NEXT: vmv1r.v v13, v8
2098 ; CHECK-NEXT: vmv1r.v v14, v8
2099 ; CHECK-NEXT: vmv1r.v v15, v8
2100 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2101 ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10, v0.t
2104 tail call void @llvm.riscv.vsuxseg4.mask.nxv4i16.nxv4i32(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
2108 declare void @llvm.riscv.vsuxseg5.nxv4i16.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i16>, i32)
2109 declare void @llvm.riscv.vsuxseg5.mask.nxv4i16.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
2111 define void @test_vsuxseg5_nxv4i16_nxv4i16(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
2112 ; CHECK-LABEL: test_vsuxseg5_nxv4i16_nxv4i16:
2113 ; CHECK: # %bb.0: # %entry
2114 ; CHECK-NEXT: vmv1r.v v10, v8
2115 ; CHECK-NEXT: vmv1r.v v11, v8
2116 ; CHECK-NEXT: vmv1r.v v12, v8
2117 ; CHECK-NEXT: vmv1r.v v13, v8
2118 ; CHECK-NEXT: vmv1r.v v14, v8
2119 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2120 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9
2123 tail call void @llvm.riscv.vsuxseg5.nxv4i16.nxv4i16(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
2127 define void @test_vsuxseg5_mask_nxv4i16_nxv4i16(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
2128 ; CHECK-LABEL: test_vsuxseg5_mask_nxv4i16_nxv4i16:
2129 ; CHECK: # %bb.0: # %entry
2130 ; CHECK-NEXT: vmv1r.v v10, v8
2131 ; CHECK-NEXT: vmv1r.v v11, v8
2132 ; CHECK-NEXT: vmv1r.v v12, v8
2133 ; CHECK-NEXT: vmv1r.v v13, v8
2134 ; CHECK-NEXT: vmv1r.v v14, v8
2135 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2136 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t
2139 tail call void @llvm.riscv.vsuxseg5.mask.nxv4i16.nxv4i16(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
2143 declare void @llvm.riscv.vsuxseg5.nxv4i16.nxv4i8(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i8>, i32)
2144 declare void @llvm.riscv.vsuxseg5.mask.nxv4i16.nxv4i8(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
2146 define void @test_vsuxseg5_nxv4i16_nxv4i8(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
2147 ; CHECK-LABEL: test_vsuxseg5_nxv4i16_nxv4i8:
2148 ; CHECK: # %bb.0: # %entry
2149 ; CHECK-NEXT: vmv1r.v v10, v8
2150 ; CHECK-NEXT: vmv1r.v v11, v8
2151 ; CHECK-NEXT: vmv1r.v v12, v8
2152 ; CHECK-NEXT: vmv1r.v v13, v8
2153 ; CHECK-NEXT: vmv1r.v v14, v8
2154 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2155 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9
2158 tail call void @llvm.riscv.vsuxseg5.nxv4i16.nxv4i8(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
2162 define void @test_vsuxseg5_mask_nxv4i16_nxv4i8(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
2163 ; CHECK-LABEL: test_vsuxseg5_mask_nxv4i16_nxv4i8:
2164 ; CHECK: # %bb.0: # %entry
2165 ; CHECK-NEXT: vmv1r.v v10, v8
2166 ; CHECK-NEXT: vmv1r.v v11, v8
2167 ; CHECK-NEXT: vmv1r.v v12, v8
2168 ; CHECK-NEXT: vmv1r.v v13, v8
2169 ; CHECK-NEXT: vmv1r.v v14, v8
2170 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2171 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t
2174 tail call void @llvm.riscv.vsuxseg5.mask.nxv4i16.nxv4i8(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
2178 declare void @llvm.riscv.vsuxseg5.nxv4i16.nxv4i32(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i32>, i32)
2179 declare void @llvm.riscv.vsuxseg5.mask.nxv4i16.nxv4i32(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
2181 define void @test_vsuxseg5_nxv4i16_nxv4i32(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
2182 ; CHECK-LABEL: test_vsuxseg5_nxv4i16_nxv4i32:
2183 ; CHECK: # %bb.0: # %entry
2184 ; CHECK-NEXT: vmv1r.v v12, v8
2185 ; CHECK-NEXT: vmv1r.v v13, v8
2186 ; CHECK-NEXT: vmv1r.v v14, v8
2187 ; CHECK-NEXT: vmv1r.v v15, v8
2188 ; CHECK-NEXT: vmv1r.v v16, v8
2189 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2190 ; CHECK-NEXT: vsuxseg5ei32.v v12, (a0), v10
2193 tail call void @llvm.riscv.vsuxseg5.nxv4i16.nxv4i32(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
2197 define void @test_vsuxseg5_mask_nxv4i16_nxv4i32(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
2198 ; CHECK-LABEL: test_vsuxseg5_mask_nxv4i16_nxv4i32:
2199 ; CHECK: # %bb.0: # %entry
2200 ; CHECK-NEXT: vmv1r.v v12, v8
2201 ; CHECK-NEXT: vmv1r.v v13, v8
2202 ; CHECK-NEXT: vmv1r.v v14, v8
2203 ; CHECK-NEXT: vmv1r.v v15, v8
2204 ; CHECK-NEXT: vmv1r.v v16, v8
2205 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2206 ; CHECK-NEXT: vsuxseg5ei32.v v12, (a0), v10, v0.t
2209 tail call void @llvm.riscv.vsuxseg5.mask.nxv4i16.nxv4i32(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
2213 declare void @llvm.riscv.vsuxseg6.nxv4i16.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i16>, i32)
2214 declare void @llvm.riscv.vsuxseg6.mask.nxv4i16.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
2216 define void @test_vsuxseg6_nxv4i16_nxv4i16(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
2217 ; CHECK-LABEL: test_vsuxseg6_nxv4i16_nxv4i16:
2218 ; CHECK: # %bb.0: # %entry
2219 ; CHECK-NEXT: vmv1r.v v10, v8
2220 ; CHECK-NEXT: vmv1r.v v11, v8
2221 ; CHECK-NEXT: vmv1r.v v12, v8
2222 ; CHECK-NEXT: vmv1r.v v13, v8
2223 ; CHECK-NEXT: vmv1r.v v14, v8
2224 ; CHECK-NEXT: vmv1r.v v15, v8
2225 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2226 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9
2229 tail call void @llvm.riscv.vsuxseg6.nxv4i16.nxv4i16(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
2233 define void @test_vsuxseg6_mask_nxv4i16_nxv4i16(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
2234 ; CHECK-LABEL: test_vsuxseg6_mask_nxv4i16_nxv4i16:
2235 ; CHECK: # %bb.0: # %entry
2236 ; CHECK-NEXT: vmv1r.v v10, v8
2237 ; CHECK-NEXT: vmv1r.v v11, v8
2238 ; CHECK-NEXT: vmv1r.v v12, v8
2239 ; CHECK-NEXT: vmv1r.v v13, v8
2240 ; CHECK-NEXT: vmv1r.v v14, v8
2241 ; CHECK-NEXT: vmv1r.v v15, v8
2242 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2243 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t
2246 tail call void @llvm.riscv.vsuxseg6.mask.nxv4i16.nxv4i16(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
2250 declare void @llvm.riscv.vsuxseg6.nxv4i16.nxv4i8(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i8>, i32)
2251 declare void @llvm.riscv.vsuxseg6.mask.nxv4i16.nxv4i8(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
2253 define void @test_vsuxseg6_nxv4i16_nxv4i8(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
2254 ; CHECK-LABEL: test_vsuxseg6_nxv4i16_nxv4i8:
2255 ; CHECK: # %bb.0: # %entry
2256 ; CHECK-NEXT: vmv1r.v v10, v8
2257 ; CHECK-NEXT: vmv1r.v v11, v8
2258 ; CHECK-NEXT: vmv1r.v v12, v8
2259 ; CHECK-NEXT: vmv1r.v v13, v8
2260 ; CHECK-NEXT: vmv1r.v v14, v8
2261 ; CHECK-NEXT: vmv1r.v v15, v8
2262 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2263 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9
2266 tail call void @llvm.riscv.vsuxseg6.nxv4i16.nxv4i8(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
2270 define void @test_vsuxseg6_mask_nxv4i16_nxv4i8(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
2271 ; CHECK-LABEL: test_vsuxseg6_mask_nxv4i16_nxv4i8:
2272 ; CHECK: # %bb.0: # %entry
2273 ; CHECK-NEXT: vmv1r.v v10, v8
2274 ; CHECK-NEXT: vmv1r.v v11, v8
2275 ; CHECK-NEXT: vmv1r.v v12, v8
2276 ; CHECK-NEXT: vmv1r.v v13, v8
2277 ; CHECK-NEXT: vmv1r.v v14, v8
2278 ; CHECK-NEXT: vmv1r.v v15, v8
2279 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2280 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t
2283 tail call void @llvm.riscv.vsuxseg6.mask.nxv4i16.nxv4i8(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
2287 declare void @llvm.riscv.vsuxseg6.nxv4i16.nxv4i32(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i32>, i32)
2288 declare void @llvm.riscv.vsuxseg6.mask.nxv4i16.nxv4i32(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
2290 define void @test_vsuxseg6_nxv4i16_nxv4i32(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
2291 ; CHECK-LABEL: test_vsuxseg6_nxv4i16_nxv4i32:
2292 ; CHECK: # %bb.0: # %entry
2293 ; CHECK-NEXT: vmv1r.v v12, v8
2294 ; CHECK-NEXT: vmv1r.v v13, v8
2295 ; CHECK-NEXT: vmv1r.v v14, v8
2296 ; CHECK-NEXT: vmv1r.v v15, v8
2297 ; CHECK-NEXT: vmv1r.v v16, v8
2298 ; CHECK-NEXT: vmv1r.v v17, v8
2299 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2300 ; CHECK-NEXT: vsuxseg6ei32.v v12, (a0), v10
2303 tail call void @llvm.riscv.vsuxseg6.nxv4i16.nxv4i32(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
2307 define void @test_vsuxseg6_mask_nxv4i16_nxv4i32(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
2308 ; CHECK-LABEL: test_vsuxseg6_mask_nxv4i16_nxv4i32:
2309 ; CHECK: # %bb.0: # %entry
2310 ; CHECK-NEXT: vmv1r.v v12, v8
2311 ; CHECK-NEXT: vmv1r.v v13, v8
2312 ; CHECK-NEXT: vmv1r.v v14, v8
2313 ; CHECK-NEXT: vmv1r.v v15, v8
2314 ; CHECK-NEXT: vmv1r.v v16, v8
2315 ; CHECK-NEXT: vmv1r.v v17, v8
2316 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2317 ; CHECK-NEXT: vsuxseg6ei32.v v12, (a0), v10, v0.t
2320 tail call void @llvm.riscv.vsuxseg6.mask.nxv4i16.nxv4i32(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
2324 declare void @llvm.riscv.vsuxseg7.nxv4i16.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i16>, i32)
2325 declare void @llvm.riscv.vsuxseg7.mask.nxv4i16.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
2327 define void @test_vsuxseg7_nxv4i16_nxv4i16(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
2328 ; CHECK-LABEL: test_vsuxseg7_nxv4i16_nxv4i16:
2329 ; CHECK: # %bb.0: # %entry
2330 ; CHECK-NEXT: vmv1r.v v10, v8
2331 ; CHECK-NEXT: vmv1r.v v11, v8
2332 ; CHECK-NEXT: vmv1r.v v12, v8
2333 ; CHECK-NEXT: vmv1r.v v13, v8
2334 ; CHECK-NEXT: vmv1r.v v14, v8
2335 ; CHECK-NEXT: vmv1r.v v15, v8
2336 ; CHECK-NEXT: vmv1r.v v16, v8
2337 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2338 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9
2341 tail call void @llvm.riscv.vsuxseg7.nxv4i16.nxv4i16(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
2345 define void @test_vsuxseg7_mask_nxv4i16_nxv4i16(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
2346 ; CHECK-LABEL: test_vsuxseg7_mask_nxv4i16_nxv4i16:
2347 ; CHECK: # %bb.0: # %entry
2348 ; CHECK-NEXT: vmv1r.v v10, v8
2349 ; CHECK-NEXT: vmv1r.v v11, v8
2350 ; CHECK-NEXT: vmv1r.v v12, v8
2351 ; CHECK-NEXT: vmv1r.v v13, v8
2352 ; CHECK-NEXT: vmv1r.v v14, v8
2353 ; CHECK-NEXT: vmv1r.v v15, v8
2354 ; CHECK-NEXT: vmv1r.v v16, v8
2355 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2356 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t
2359 tail call void @llvm.riscv.vsuxseg7.mask.nxv4i16.nxv4i16(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
2363 declare void @llvm.riscv.vsuxseg7.nxv4i16.nxv4i8(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i8>, i32)
2364 declare void @llvm.riscv.vsuxseg7.mask.nxv4i16.nxv4i8(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
2366 define void @test_vsuxseg7_nxv4i16_nxv4i8(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
2367 ; CHECK-LABEL: test_vsuxseg7_nxv4i16_nxv4i8:
2368 ; CHECK: # %bb.0: # %entry
2369 ; CHECK-NEXT: vmv1r.v v10, v8
2370 ; CHECK-NEXT: vmv1r.v v11, v8
2371 ; CHECK-NEXT: vmv1r.v v12, v8
2372 ; CHECK-NEXT: vmv1r.v v13, v8
2373 ; CHECK-NEXT: vmv1r.v v14, v8
2374 ; CHECK-NEXT: vmv1r.v v15, v8
2375 ; CHECK-NEXT: vmv1r.v v16, v8
2376 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2377 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9
2380 tail call void @llvm.riscv.vsuxseg7.nxv4i16.nxv4i8(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
2384 define void @test_vsuxseg7_mask_nxv4i16_nxv4i8(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
2385 ; CHECK-LABEL: test_vsuxseg7_mask_nxv4i16_nxv4i8:
2386 ; CHECK: # %bb.0: # %entry
2387 ; CHECK-NEXT: vmv1r.v v10, v8
2388 ; CHECK-NEXT: vmv1r.v v11, v8
2389 ; CHECK-NEXT: vmv1r.v v12, v8
2390 ; CHECK-NEXT: vmv1r.v v13, v8
2391 ; CHECK-NEXT: vmv1r.v v14, v8
2392 ; CHECK-NEXT: vmv1r.v v15, v8
2393 ; CHECK-NEXT: vmv1r.v v16, v8
2394 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2395 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t
2398 tail call void @llvm.riscv.vsuxseg7.mask.nxv4i16.nxv4i8(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
2402 declare void @llvm.riscv.vsuxseg7.nxv4i16.nxv4i32(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i32>, i32)
2403 declare void @llvm.riscv.vsuxseg7.mask.nxv4i16.nxv4i32(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
2405 define void @test_vsuxseg7_nxv4i16_nxv4i32(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
2406 ; CHECK-LABEL: test_vsuxseg7_nxv4i16_nxv4i32:
2407 ; CHECK: # %bb.0: # %entry
2408 ; CHECK-NEXT: vmv1r.v v12, v8
2409 ; CHECK-NEXT: vmv1r.v v13, v8
2410 ; CHECK-NEXT: vmv1r.v v14, v8
2411 ; CHECK-NEXT: vmv1r.v v15, v8
2412 ; CHECK-NEXT: vmv1r.v v16, v8
2413 ; CHECK-NEXT: vmv1r.v v17, v8
2414 ; CHECK-NEXT: vmv1r.v v18, v8
2415 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2416 ; CHECK-NEXT: vsuxseg7ei32.v v12, (a0), v10
2419 tail call void @llvm.riscv.vsuxseg7.nxv4i16.nxv4i32(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
2423 define void @test_vsuxseg7_mask_nxv4i16_nxv4i32(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
2424 ; CHECK-LABEL: test_vsuxseg7_mask_nxv4i16_nxv4i32:
2425 ; CHECK: # %bb.0: # %entry
2426 ; CHECK-NEXT: vmv1r.v v12, v8
2427 ; CHECK-NEXT: vmv1r.v v13, v8
2428 ; CHECK-NEXT: vmv1r.v v14, v8
2429 ; CHECK-NEXT: vmv1r.v v15, v8
2430 ; CHECK-NEXT: vmv1r.v v16, v8
2431 ; CHECK-NEXT: vmv1r.v v17, v8
2432 ; CHECK-NEXT: vmv1r.v v18, v8
2433 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2434 ; CHECK-NEXT: vsuxseg7ei32.v v12, (a0), v10, v0.t
2437 tail call void @llvm.riscv.vsuxseg7.mask.nxv4i16.nxv4i32(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
2441 declare void @llvm.riscv.vsuxseg8.nxv4i16.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i16>, i32)
2442 declare void @llvm.riscv.vsuxseg8.mask.nxv4i16.nxv4i16(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
2444 define void @test_vsuxseg8_nxv4i16_nxv4i16(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
2445 ; CHECK-LABEL: test_vsuxseg8_nxv4i16_nxv4i16:
2446 ; CHECK: # %bb.0: # %entry
2447 ; CHECK-NEXT: vmv1r.v v10, v8
2448 ; CHECK-NEXT: vmv1r.v v11, v8
2449 ; CHECK-NEXT: vmv1r.v v12, v8
2450 ; CHECK-NEXT: vmv1r.v v13, v8
2451 ; CHECK-NEXT: vmv1r.v v14, v8
2452 ; CHECK-NEXT: vmv1r.v v15, v8
2453 ; CHECK-NEXT: vmv1r.v v16, v8
2454 ; CHECK-NEXT: vmv1r.v v17, v8
2455 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2456 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9
2459 tail call void @llvm.riscv.vsuxseg8.nxv4i16.nxv4i16(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
2463 define void @test_vsuxseg8_mask_nxv4i16_nxv4i16(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
2464 ; CHECK-LABEL: test_vsuxseg8_mask_nxv4i16_nxv4i16:
2465 ; CHECK: # %bb.0: # %entry
2466 ; CHECK-NEXT: vmv1r.v v10, v8
2467 ; CHECK-NEXT: vmv1r.v v11, v8
2468 ; CHECK-NEXT: vmv1r.v v12, v8
2469 ; CHECK-NEXT: vmv1r.v v13, v8
2470 ; CHECK-NEXT: vmv1r.v v14, v8
2471 ; CHECK-NEXT: vmv1r.v v15, v8
2472 ; CHECK-NEXT: vmv1r.v v16, v8
2473 ; CHECK-NEXT: vmv1r.v v17, v8
2474 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2475 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t
2478 tail call void @llvm.riscv.vsuxseg8.mask.nxv4i16.nxv4i16(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
2482 declare void @llvm.riscv.vsuxseg8.nxv4i16.nxv4i8(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i8>, i32)
2483 declare void @llvm.riscv.vsuxseg8.mask.nxv4i16.nxv4i8(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
2485 define void @test_vsuxseg8_nxv4i16_nxv4i8(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
2486 ; CHECK-LABEL: test_vsuxseg8_nxv4i16_nxv4i8:
2487 ; CHECK: # %bb.0: # %entry
2488 ; CHECK-NEXT: vmv1r.v v10, v8
2489 ; CHECK-NEXT: vmv1r.v v11, v8
2490 ; CHECK-NEXT: vmv1r.v v12, v8
2491 ; CHECK-NEXT: vmv1r.v v13, v8
2492 ; CHECK-NEXT: vmv1r.v v14, v8
2493 ; CHECK-NEXT: vmv1r.v v15, v8
2494 ; CHECK-NEXT: vmv1r.v v16, v8
2495 ; CHECK-NEXT: vmv1r.v v17, v8
2496 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2497 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9
2500 tail call void @llvm.riscv.vsuxseg8.nxv4i16.nxv4i8(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
2504 define void @test_vsuxseg8_mask_nxv4i16_nxv4i8(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
2505 ; CHECK-LABEL: test_vsuxseg8_mask_nxv4i16_nxv4i8:
2506 ; CHECK: # %bb.0: # %entry
2507 ; CHECK-NEXT: vmv1r.v v10, v8
2508 ; CHECK-NEXT: vmv1r.v v11, v8
2509 ; CHECK-NEXT: vmv1r.v v12, v8
2510 ; CHECK-NEXT: vmv1r.v v13, v8
2511 ; CHECK-NEXT: vmv1r.v v14, v8
2512 ; CHECK-NEXT: vmv1r.v v15, v8
2513 ; CHECK-NEXT: vmv1r.v v16, v8
2514 ; CHECK-NEXT: vmv1r.v v17, v8
2515 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2516 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t
2519 tail call void @llvm.riscv.vsuxseg8.mask.nxv4i16.nxv4i8(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
2523 declare void @llvm.riscv.vsuxseg8.nxv4i16.nxv4i32(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i32>, i32)
2524 declare void @llvm.riscv.vsuxseg8.mask.nxv4i16.nxv4i32(<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>,<vscale x 4 x i16>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
2526 define void @test_vsuxseg8_nxv4i16_nxv4i32(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
2527 ; CHECK-LABEL: test_vsuxseg8_nxv4i16_nxv4i32:
2528 ; CHECK: # %bb.0: # %entry
2529 ; CHECK-NEXT: vmv1r.v v12, v8
2530 ; CHECK-NEXT: vmv1r.v v13, v8
2531 ; CHECK-NEXT: vmv1r.v v14, v8
2532 ; CHECK-NEXT: vmv1r.v v15, v8
2533 ; CHECK-NEXT: vmv1r.v v16, v8
2534 ; CHECK-NEXT: vmv1r.v v17, v8
2535 ; CHECK-NEXT: vmv1r.v v18, v8
2536 ; CHECK-NEXT: vmv1r.v v19, v8
2537 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2538 ; CHECK-NEXT: vsuxseg8ei32.v v12, (a0), v10
2541 tail call void @llvm.riscv.vsuxseg8.nxv4i16.nxv4i32(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
2545 define void @test_vsuxseg8_mask_nxv4i16_nxv4i32(<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
2546 ; CHECK-LABEL: test_vsuxseg8_mask_nxv4i16_nxv4i32:
2547 ; CHECK: # %bb.0: # %entry
2548 ; CHECK-NEXT: vmv1r.v v12, v8
2549 ; CHECK-NEXT: vmv1r.v v13, v8
2550 ; CHECK-NEXT: vmv1r.v v14, v8
2551 ; CHECK-NEXT: vmv1r.v v15, v8
2552 ; CHECK-NEXT: vmv1r.v v16, v8
2553 ; CHECK-NEXT: vmv1r.v v17, v8
2554 ; CHECK-NEXT: vmv1r.v v18, v8
2555 ; CHECK-NEXT: vmv1r.v v19, v8
2556 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
2557 ; CHECK-NEXT: vsuxseg8ei32.v v12, (a0), v10, v0.t
2560 tail call void @llvm.riscv.vsuxseg8.mask.nxv4i16.nxv4i32(<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val,<vscale x 4 x i16> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
2564 declare void @llvm.riscv.vsuxseg2.nxv1i32.nxv1i8(<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i8>, i32)
2565 declare void @llvm.riscv.vsuxseg2.mask.nxv1i32.nxv1i8(<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
2567 define void @test_vsuxseg2_nxv1i32_nxv1i8(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
2568 ; CHECK-LABEL: test_vsuxseg2_nxv1i32_nxv1i8:
2569 ; CHECK: # %bb.0: # %entry
2570 ; CHECK-NEXT: vmv1r.v v10, v9
2571 ; CHECK-NEXT: vmv1r.v v9, v8
2572 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2573 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10
2576 tail call void @llvm.riscv.vsuxseg2.nxv1i32.nxv1i8(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
2580 define void @test_vsuxseg2_mask_nxv1i32_nxv1i8(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
2581 ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i32_nxv1i8:
2582 ; CHECK: # %bb.0: # %entry
2583 ; CHECK-NEXT: vmv1r.v v10, v9
2584 ; CHECK-NEXT: vmv1r.v v9, v8
2585 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2586 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10, v0.t
2589 tail call void @llvm.riscv.vsuxseg2.mask.nxv1i32.nxv1i8(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
2593 declare void @llvm.riscv.vsuxseg2.nxv1i32.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i32>, i32)
2594 declare void @llvm.riscv.vsuxseg2.mask.nxv1i32.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
2596 define void @test_vsuxseg2_nxv1i32_nxv1i32(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
2597 ; CHECK-LABEL: test_vsuxseg2_nxv1i32_nxv1i32:
2598 ; CHECK: # %bb.0: # %entry
2599 ; CHECK-NEXT: vmv1r.v v10, v9
2600 ; CHECK-NEXT: vmv1r.v v9, v8
2601 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2602 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10
2605 tail call void @llvm.riscv.vsuxseg2.nxv1i32.nxv1i32(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
2609 define void @test_vsuxseg2_mask_nxv1i32_nxv1i32(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
2610 ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i32_nxv1i32:
2611 ; CHECK: # %bb.0: # %entry
2612 ; CHECK-NEXT: vmv1r.v v10, v9
2613 ; CHECK-NEXT: vmv1r.v v9, v8
2614 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2615 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10, v0.t
2618 tail call void @llvm.riscv.vsuxseg2.mask.nxv1i32.nxv1i32(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
2622 declare void @llvm.riscv.vsuxseg2.nxv1i32.nxv1i16(<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i16>, i32)
2623 declare void @llvm.riscv.vsuxseg2.mask.nxv1i32.nxv1i16(<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
2625 define void @test_vsuxseg2_nxv1i32_nxv1i16(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
2626 ; CHECK-LABEL: test_vsuxseg2_nxv1i32_nxv1i16:
2627 ; CHECK: # %bb.0: # %entry
2628 ; CHECK-NEXT: vmv1r.v v10, v9
2629 ; CHECK-NEXT: vmv1r.v v9, v8
2630 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2631 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10
2634 tail call void @llvm.riscv.vsuxseg2.nxv1i32.nxv1i16(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
2638 define void @test_vsuxseg2_mask_nxv1i32_nxv1i16(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
2639 ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i32_nxv1i16:
2640 ; CHECK: # %bb.0: # %entry
2641 ; CHECK-NEXT: vmv1r.v v10, v9
2642 ; CHECK-NEXT: vmv1r.v v9, v8
2643 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2644 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10, v0.t
2647 tail call void @llvm.riscv.vsuxseg2.mask.nxv1i32.nxv1i16(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
2651 declare void @llvm.riscv.vsuxseg3.nxv1i32.nxv1i8(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i8>, i32)
2652 declare void @llvm.riscv.vsuxseg3.mask.nxv1i32.nxv1i8(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
2654 define void @test_vsuxseg3_nxv1i32_nxv1i8(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
2655 ; CHECK-LABEL: test_vsuxseg3_nxv1i32_nxv1i8:
2656 ; CHECK: # %bb.0: # %entry
2657 ; CHECK-NEXT: vmv1r.v v10, v8
2658 ; CHECK-NEXT: vmv1r.v v11, v8
2659 ; CHECK-NEXT: vmv1r.v v12, v8
2660 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2661 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9
2664 tail call void @llvm.riscv.vsuxseg3.nxv1i32.nxv1i8(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
2668 define void @test_vsuxseg3_mask_nxv1i32_nxv1i8(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
2669 ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i32_nxv1i8:
2670 ; CHECK: # %bb.0: # %entry
2671 ; CHECK-NEXT: vmv1r.v v10, v8
2672 ; CHECK-NEXT: vmv1r.v v11, v8
2673 ; CHECK-NEXT: vmv1r.v v12, v8
2674 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2675 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t
2678 tail call void @llvm.riscv.vsuxseg3.mask.nxv1i32.nxv1i8(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
2682 declare void @llvm.riscv.vsuxseg3.nxv1i32.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i32>, i32)
2683 declare void @llvm.riscv.vsuxseg3.mask.nxv1i32.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
2685 define void @test_vsuxseg3_nxv1i32_nxv1i32(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
2686 ; CHECK-LABEL: test_vsuxseg3_nxv1i32_nxv1i32:
2687 ; CHECK: # %bb.0: # %entry
2688 ; CHECK-NEXT: vmv1r.v v10, v8
2689 ; CHECK-NEXT: vmv1r.v v11, v8
2690 ; CHECK-NEXT: vmv1r.v v12, v8
2691 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2692 ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9
2695 tail call void @llvm.riscv.vsuxseg3.nxv1i32.nxv1i32(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
2699 define void @test_vsuxseg3_mask_nxv1i32_nxv1i32(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
2700 ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i32_nxv1i32:
2701 ; CHECK: # %bb.0: # %entry
2702 ; CHECK-NEXT: vmv1r.v v10, v8
2703 ; CHECK-NEXT: vmv1r.v v11, v8
2704 ; CHECK-NEXT: vmv1r.v v12, v8
2705 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2706 ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t
2709 tail call void @llvm.riscv.vsuxseg3.mask.nxv1i32.nxv1i32(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
2713 declare void @llvm.riscv.vsuxseg3.nxv1i32.nxv1i16(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i16>, i32)
2714 declare void @llvm.riscv.vsuxseg3.mask.nxv1i32.nxv1i16(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
2716 define void @test_vsuxseg3_nxv1i32_nxv1i16(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
2717 ; CHECK-LABEL: test_vsuxseg3_nxv1i32_nxv1i16:
2718 ; CHECK: # %bb.0: # %entry
2719 ; CHECK-NEXT: vmv1r.v v10, v8
2720 ; CHECK-NEXT: vmv1r.v v11, v8
2721 ; CHECK-NEXT: vmv1r.v v12, v8
2722 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2723 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9
2726 tail call void @llvm.riscv.vsuxseg3.nxv1i32.nxv1i16(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
2730 define void @test_vsuxseg3_mask_nxv1i32_nxv1i16(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
2731 ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i32_nxv1i16:
2732 ; CHECK: # %bb.0: # %entry
2733 ; CHECK-NEXT: vmv1r.v v10, v8
2734 ; CHECK-NEXT: vmv1r.v v11, v8
2735 ; CHECK-NEXT: vmv1r.v v12, v8
2736 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2737 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t
2740 tail call void @llvm.riscv.vsuxseg3.mask.nxv1i32.nxv1i16(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
2744 declare void @llvm.riscv.vsuxseg4.nxv1i32.nxv1i8(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i8>, i32)
2745 declare void @llvm.riscv.vsuxseg4.mask.nxv1i32.nxv1i8(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
2747 define void @test_vsuxseg4_nxv1i32_nxv1i8(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
2748 ; CHECK-LABEL: test_vsuxseg4_nxv1i32_nxv1i8:
2749 ; CHECK: # %bb.0: # %entry
2750 ; CHECK-NEXT: vmv1r.v v10, v8
2751 ; CHECK-NEXT: vmv1r.v v11, v8
2752 ; CHECK-NEXT: vmv1r.v v12, v8
2753 ; CHECK-NEXT: vmv1r.v v13, v8
2754 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2755 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9
2758 tail call void @llvm.riscv.vsuxseg4.nxv1i32.nxv1i8(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
2762 define void @test_vsuxseg4_mask_nxv1i32_nxv1i8(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
2763 ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i32_nxv1i8:
2764 ; CHECK: # %bb.0: # %entry
2765 ; CHECK-NEXT: vmv1r.v v10, v8
2766 ; CHECK-NEXT: vmv1r.v v11, v8
2767 ; CHECK-NEXT: vmv1r.v v12, v8
2768 ; CHECK-NEXT: vmv1r.v v13, v8
2769 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2770 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t
2773 tail call void @llvm.riscv.vsuxseg4.mask.nxv1i32.nxv1i8(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
2777 declare void @llvm.riscv.vsuxseg4.nxv1i32.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i32>, i32)
2778 declare void @llvm.riscv.vsuxseg4.mask.nxv1i32.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
2780 define void @test_vsuxseg4_nxv1i32_nxv1i32(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
2781 ; CHECK-LABEL: test_vsuxseg4_nxv1i32_nxv1i32:
2782 ; CHECK: # %bb.0: # %entry
2783 ; CHECK-NEXT: vmv1r.v v10, v8
2784 ; CHECK-NEXT: vmv1r.v v11, v8
2785 ; CHECK-NEXT: vmv1r.v v12, v8
2786 ; CHECK-NEXT: vmv1r.v v13, v8
2787 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2788 ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9
2791 tail call void @llvm.riscv.vsuxseg4.nxv1i32.nxv1i32(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
2795 define void @test_vsuxseg4_mask_nxv1i32_nxv1i32(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
2796 ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i32_nxv1i32:
2797 ; CHECK: # %bb.0: # %entry
2798 ; CHECK-NEXT: vmv1r.v v10, v8
2799 ; CHECK-NEXT: vmv1r.v v11, v8
2800 ; CHECK-NEXT: vmv1r.v v12, v8
2801 ; CHECK-NEXT: vmv1r.v v13, v8
2802 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2803 ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t
2806 tail call void @llvm.riscv.vsuxseg4.mask.nxv1i32.nxv1i32(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
2810 declare void @llvm.riscv.vsuxseg4.nxv1i32.nxv1i16(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i16>, i32)
2811 declare void @llvm.riscv.vsuxseg4.mask.nxv1i32.nxv1i16(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
2813 define void @test_vsuxseg4_nxv1i32_nxv1i16(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
2814 ; CHECK-LABEL: test_vsuxseg4_nxv1i32_nxv1i16:
2815 ; CHECK: # %bb.0: # %entry
2816 ; CHECK-NEXT: vmv1r.v v10, v8
2817 ; CHECK-NEXT: vmv1r.v v11, v8
2818 ; CHECK-NEXT: vmv1r.v v12, v8
2819 ; CHECK-NEXT: vmv1r.v v13, v8
2820 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2821 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9
2824 tail call void @llvm.riscv.vsuxseg4.nxv1i32.nxv1i16(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
2828 define void @test_vsuxseg4_mask_nxv1i32_nxv1i16(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
2829 ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i32_nxv1i16:
2830 ; CHECK: # %bb.0: # %entry
2831 ; CHECK-NEXT: vmv1r.v v10, v8
2832 ; CHECK-NEXT: vmv1r.v v11, v8
2833 ; CHECK-NEXT: vmv1r.v v12, v8
2834 ; CHECK-NEXT: vmv1r.v v13, v8
2835 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2836 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t
2839 tail call void @llvm.riscv.vsuxseg4.mask.nxv1i32.nxv1i16(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
2843 declare void @llvm.riscv.vsuxseg5.nxv1i32.nxv1i8(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i8>, i32)
2844 declare void @llvm.riscv.vsuxseg5.mask.nxv1i32.nxv1i8(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
2846 define void @test_vsuxseg5_nxv1i32_nxv1i8(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
2847 ; CHECK-LABEL: test_vsuxseg5_nxv1i32_nxv1i8:
2848 ; CHECK: # %bb.0: # %entry
2849 ; CHECK-NEXT: vmv1r.v v10, v8
2850 ; CHECK-NEXT: vmv1r.v v11, v8
2851 ; CHECK-NEXT: vmv1r.v v12, v8
2852 ; CHECK-NEXT: vmv1r.v v13, v8
2853 ; CHECK-NEXT: vmv1r.v v14, v8
2854 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2855 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9
2858 tail call void @llvm.riscv.vsuxseg5.nxv1i32.nxv1i8(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
2862 define void @test_vsuxseg5_mask_nxv1i32_nxv1i8(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
2863 ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i32_nxv1i8:
2864 ; CHECK: # %bb.0: # %entry
2865 ; CHECK-NEXT: vmv1r.v v10, v8
2866 ; CHECK-NEXT: vmv1r.v v11, v8
2867 ; CHECK-NEXT: vmv1r.v v12, v8
2868 ; CHECK-NEXT: vmv1r.v v13, v8
2869 ; CHECK-NEXT: vmv1r.v v14, v8
2870 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2871 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t
2874 tail call void @llvm.riscv.vsuxseg5.mask.nxv1i32.nxv1i8(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
2878 declare void @llvm.riscv.vsuxseg5.nxv1i32.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i32>, i32)
2879 declare void @llvm.riscv.vsuxseg5.mask.nxv1i32.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
2881 define void @test_vsuxseg5_nxv1i32_nxv1i32(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
2882 ; CHECK-LABEL: test_vsuxseg5_nxv1i32_nxv1i32:
2883 ; CHECK: # %bb.0: # %entry
2884 ; CHECK-NEXT: vmv1r.v v10, v8
2885 ; CHECK-NEXT: vmv1r.v v11, v8
2886 ; CHECK-NEXT: vmv1r.v v12, v8
2887 ; CHECK-NEXT: vmv1r.v v13, v8
2888 ; CHECK-NEXT: vmv1r.v v14, v8
2889 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2890 ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9
2893 tail call void @llvm.riscv.vsuxseg5.nxv1i32.nxv1i32(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
2897 define void @test_vsuxseg5_mask_nxv1i32_nxv1i32(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
2898 ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i32_nxv1i32:
2899 ; CHECK: # %bb.0: # %entry
2900 ; CHECK-NEXT: vmv1r.v v10, v8
2901 ; CHECK-NEXT: vmv1r.v v11, v8
2902 ; CHECK-NEXT: vmv1r.v v12, v8
2903 ; CHECK-NEXT: vmv1r.v v13, v8
2904 ; CHECK-NEXT: vmv1r.v v14, v8
2905 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2906 ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t
2909 tail call void @llvm.riscv.vsuxseg5.mask.nxv1i32.nxv1i32(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
2913 declare void @llvm.riscv.vsuxseg5.nxv1i32.nxv1i16(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i16>, i32)
2914 declare void @llvm.riscv.vsuxseg5.mask.nxv1i32.nxv1i16(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
2916 define void @test_vsuxseg5_nxv1i32_nxv1i16(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
2917 ; CHECK-LABEL: test_vsuxseg5_nxv1i32_nxv1i16:
2918 ; CHECK: # %bb.0: # %entry
2919 ; CHECK-NEXT: vmv1r.v v10, v8
2920 ; CHECK-NEXT: vmv1r.v v11, v8
2921 ; CHECK-NEXT: vmv1r.v v12, v8
2922 ; CHECK-NEXT: vmv1r.v v13, v8
2923 ; CHECK-NEXT: vmv1r.v v14, v8
2924 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2925 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9
2928 tail call void @llvm.riscv.vsuxseg5.nxv1i32.nxv1i16(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
2932 define void @test_vsuxseg5_mask_nxv1i32_nxv1i16(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
2933 ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i32_nxv1i16:
2934 ; CHECK: # %bb.0: # %entry
2935 ; CHECK-NEXT: vmv1r.v v10, v8
2936 ; CHECK-NEXT: vmv1r.v v11, v8
2937 ; CHECK-NEXT: vmv1r.v v12, v8
2938 ; CHECK-NEXT: vmv1r.v v13, v8
2939 ; CHECK-NEXT: vmv1r.v v14, v8
2940 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2941 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t
2944 tail call void @llvm.riscv.vsuxseg5.mask.nxv1i32.nxv1i16(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
2948 declare void @llvm.riscv.vsuxseg6.nxv1i32.nxv1i8(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i8>, i32)
2949 declare void @llvm.riscv.vsuxseg6.mask.nxv1i32.nxv1i8(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
2951 define void @test_vsuxseg6_nxv1i32_nxv1i8(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
2952 ; CHECK-LABEL: test_vsuxseg6_nxv1i32_nxv1i8:
2953 ; CHECK: # %bb.0: # %entry
2954 ; CHECK-NEXT: vmv1r.v v10, v8
2955 ; CHECK-NEXT: vmv1r.v v11, v8
2956 ; CHECK-NEXT: vmv1r.v v12, v8
2957 ; CHECK-NEXT: vmv1r.v v13, v8
2958 ; CHECK-NEXT: vmv1r.v v14, v8
2959 ; CHECK-NEXT: vmv1r.v v15, v8
2960 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2961 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9
2964 tail call void @llvm.riscv.vsuxseg6.nxv1i32.nxv1i8(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
2968 define void @test_vsuxseg6_mask_nxv1i32_nxv1i8(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
2969 ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i32_nxv1i8:
2970 ; CHECK: # %bb.0: # %entry
2971 ; CHECK-NEXT: vmv1r.v v10, v8
2972 ; CHECK-NEXT: vmv1r.v v11, v8
2973 ; CHECK-NEXT: vmv1r.v v12, v8
2974 ; CHECK-NEXT: vmv1r.v v13, v8
2975 ; CHECK-NEXT: vmv1r.v v14, v8
2976 ; CHECK-NEXT: vmv1r.v v15, v8
2977 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2978 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t
2981 tail call void @llvm.riscv.vsuxseg6.mask.nxv1i32.nxv1i8(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
2985 declare void @llvm.riscv.vsuxseg6.nxv1i32.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i32>, i32)
2986 declare void @llvm.riscv.vsuxseg6.mask.nxv1i32.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
2988 define void @test_vsuxseg6_nxv1i32_nxv1i32(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
2989 ; CHECK-LABEL: test_vsuxseg6_nxv1i32_nxv1i32:
2990 ; CHECK: # %bb.0: # %entry
2991 ; CHECK-NEXT: vmv1r.v v10, v8
2992 ; CHECK-NEXT: vmv1r.v v11, v8
2993 ; CHECK-NEXT: vmv1r.v v12, v8
2994 ; CHECK-NEXT: vmv1r.v v13, v8
2995 ; CHECK-NEXT: vmv1r.v v14, v8
2996 ; CHECK-NEXT: vmv1r.v v15, v8
2997 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
2998 ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9
3001 tail call void @llvm.riscv.vsuxseg6.nxv1i32.nxv1i32(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
3005 define void @test_vsuxseg6_mask_nxv1i32_nxv1i32(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
3006 ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i32_nxv1i32:
3007 ; CHECK: # %bb.0: # %entry
3008 ; CHECK-NEXT: vmv1r.v v10, v8
3009 ; CHECK-NEXT: vmv1r.v v11, v8
3010 ; CHECK-NEXT: vmv1r.v v12, v8
3011 ; CHECK-NEXT: vmv1r.v v13, v8
3012 ; CHECK-NEXT: vmv1r.v v14, v8
3013 ; CHECK-NEXT: vmv1r.v v15, v8
3014 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
3015 ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t
3018 tail call void @llvm.riscv.vsuxseg6.mask.nxv1i32.nxv1i32(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
3022 declare void @llvm.riscv.vsuxseg6.nxv1i32.nxv1i16(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i16>, i32)
3023 declare void @llvm.riscv.vsuxseg6.mask.nxv1i32.nxv1i16(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
3025 define void @test_vsuxseg6_nxv1i32_nxv1i16(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
3026 ; CHECK-LABEL: test_vsuxseg6_nxv1i32_nxv1i16:
3027 ; CHECK: # %bb.0: # %entry
3028 ; CHECK-NEXT: vmv1r.v v10, v8
3029 ; CHECK-NEXT: vmv1r.v v11, v8
3030 ; CHECK-NEXT: vmv1r.v v12, v8
3031 ; CHECK-NEXT: vmv1r.v v13, v8
3032 ; CHECK-NEXT: vmv1r.v v14, v8
3033 ; CHECK-NEXT: vmv1r.v v15, v8
3034 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
3035 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9
3038 tail call void @llvm.riscv.vsuxseg6.nxv1i32.nxv1i16(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
3042 define void @test_vsuxseg6_mask_nxv1i32_nxv1i16(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
3043 ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i32_nxv1i16:
3044 ; CHECK: # %bb.0: # %entry
3045 ; CHECK-NEXT: vmv1r.v v10, v8
3046 ; CHECK-NEXT: vmv1r.v v11, v8
3047 ; CHECK-NEXT: vmv1r.v v12, v8
3048 ; CHECK-NEXT: vmv1r.v v13, v8
3049 ; CHECK-NEXT: vmv1r.v v14, v8
3050 ; CHECK-NEXT: vmv1r.v v15, v8
3051 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
3052 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t
3055 tail call void @llvm.riscv.vsuxseg6.mask.nxv1i32.nxv1i16(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
3059 declare void @llvm.riscv.vsuxseg7.nxv1i32.nxv1i8(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i8>, i32)
3060 declare void @llvm.riscv.vsuxseg7.mask.nxv1i32.nxv1i8(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
3062 define void @test_vsuxseg7_nxv1i32_nxv1i8(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
3063 ; CHECK-LABEL: test_vsuxseg7_nxv1i32_nxv1i8:
3064 ; CHECK: # %bb.0: # %entry
3065 ; CHECK-NEXT: vmv1r.v v10, v8
3066 ; CHECK-NEXT: vmv1r.v v11, v8
3067 ; CHECK-NEXT: vmv1r.v v12, v8
3068 ; CHECK-NEXT: vmv1r.v v13, v8
3069 ; CHECK-NEXT: vmv1r.v v14, v8
3070 ; CHECK-NEXT: vmv1r.v v15, v8
3071 ; CHECK-NEXT: vmv1r.v v16, v8
3072 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
3073 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9
3076 tail call void @llvm.riscv.vsuxseg7.nxv1i32.nxv1i8(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
3080 define void @test_vsuxseg7_mask_nxv1i32_nxv1i8(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
3081 ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i32_nxv1i8:
3082 ; CHECK: # %bb.0: # %entry
3083 ; CHECK-NEXT: vmv1r.v v10, v8
3084 ; CHECK-NEXT: vmv1r.v v11, v8
3085 ; CHECK-NEXT: vmv1r.v v12, v8
3086 ; CHECK-NEXT: vmv1r.v v13, v8
3087 ; CHECK-NEXT: vmv1r.v v14, v8
3088 ; CHECK-NEXT: vmv1r.v v15, v8
3089 ; CHECK-NEXT: vmv1r.v v16, v8
3090 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
3091 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t
3094 tail call void @llvm.riscv.vsuxseg7.mask.nxv1i32.nxv1i8(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
3098 declare void @llvm.riscv.vsuxseg7.nxv1i32.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i32>, i32)
3099 declare void @llvm.riscv.vsuxseg7.mask.nxv1i32.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
3101 define void @test_vsuxseg7_nxv1i32_nxv1i32(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
3102 ; CHECK-LABEL: test_vsuxseg7_nxv1i32_nxv1i32:
3103 ; CHECK: # %bb.0: # %entry
3104 ; CHECK-NEXT: vmv1r.v v10, v8
3105 ; CHECK-NEXT: vmv1r.v v11, v8
3106 ; CHECK-NEXT: vmv1r.v v12, v8
3107 ; CHECK-NEXT: vmv1r.v v13, v8
3108 ; CHECK-NEXT: vmv1r.v v14, v8
3109 ; CHECK-NEXT: vmv1r.v v15, v8
3110 ; CHECK-NEXT: vmv1r.v v16, v8
3111 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
3112 ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9
3115 tail call void @llvm.riscv.vsuxseg7.nxv1i32.nxv1i32(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
3119 define void @test_vsuxseg7_mask_nxv1i32_nxv1i32(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
3120 ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i32_nxv1i32:
3121 ; CHECK: # %bb.0: # %entry
3122 ; CHECK-NEXT: vmv1r.v v10, v8
3123 ; CHECK-NEXT: vmv1r.v v11, v8
3124 ; CHECK-NEXT: vmv1r.v v12, v8
3125 ; CHECK-NEXT: vmv1r.v v13, v8
3126 ; CHECK-NEXT: vmv1r.v v14, v8
3127 ; CHECK-NEXT: vmv1r.v v15, v8
3128 ; CHECK-NEXT: vmv1r.v v16, v8
3129 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
3130 ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t
3133 tail call void @llvm.riscv.vsuxseg7.mask.nxv1i32.nxv1i32(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
3137 declare void @llvm.riscv.vsuxseg7.nxv1i32.nxv1i16(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i16>, i32)
3138 declare void @llvm.riscv.vsuxseg7.mask.nxv1i32.nxv1i16(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
3140 define void @test_vsuxseg7_nxv1i32_nxv1i16(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
3141 ; CHECK-LABEL: test_vsuxseg7_nxv1i32_nxv1i16:
3142 ; CHECK: # %bb.0: # %entry
3143 ; CHECK-NEXT: vmv1r.v v10, v8
3144 ; CHECK-NEXT: vmv1r.v v11, v8
3145 ; CHECK-NEXT: vmv1r.v v12, v8
3146 ; CHECK-NEXT: vmv1r.v v13, v8
3147 ; CHECK-NEXT: vmv1r.v v14, v8
3148 ; CHECK-NEXT: vmv1r.v v15, v8
3149 ; CHECK-NEXT: vmv1r.v v16, v8
3150 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
3151 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9
3154 tail call void @llvm.riscv.vsuxseg7.nxv1i32.nxv1i16(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
3158 define void @test_vsuxseg7_mask_nxv1i32_nxv1i16(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
3159 ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i32_nxv1i16:
3160 ; CHECK: # %bb.0: # %entry
3161 ; CHECK-NEXT: vmv1r.v v10, v8
3162 ; CHECK-NEXT: vmv1r.v v11, v8
3163 ; CHECK-NEXT: vmv1r.v v12, v8
3164 ; CHECK-NEXT: vmv1r.v v13, v8
3165 ; CHECK-NEXT: vmv1r.v v14, v8
3166 ; CHECK-NEXT: vmv1r.v v15, v8
3167 ; CHECK-NEXT: vmv1r.v v16, v8
3168 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
3169 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t
3172 tail call void @llvm.riscv.vsuxseg7.mask.nxv1i32.nxv1i16(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
3176 declare void @llvm.riscv.vsuxseg8.nxv1i32.nxv1i8(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i8>, i32)
3177 declare void @llvm.riscv.vsuxseg8.mask.nxv1i32.nxv1i8(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
3179 define void @test_vsuxseg8_nxv1i32_nxv1i8(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
3180 ; CHECK-LABEL: test_vsuxseg8_nxv1i32_nxv1i8:
3181 ; CHECK: # %bb.0: # %entry
3182 ; CHECK-NEXT: vmv1r.v v10, v8
3183 ; CHECK-NEXT: vmv1r.v v11, v8
3184 ; CHECK-NEXT: vmv1r.v v12, v8
3185 ; CHECK-NEXT: vmv1r.v v13, v8
3186 ; CHECK-NEXT: vmv1r.v v14, v8
3187 ; CHECK-NEXT: vmv1r.v v15, v8
3188 ; CHECK-NEXT: vmv1r.v v16, v8
3189 ; CHECK-NEXT: vmv1r.v v17, v8
3190 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
3191 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9
3194 tail call void @llvm.riscv.vsuxseg8.nxv1i32.nxv1i8(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
3198 define void @test_vsuxseg8_mask_nxv1i32_nxv1i8(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
3199 ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i32_nxv1i8:
3200 ; CHECK: # %bb.0: # %entry
3201 ; CHECK-NEXT: vmv1r.v v10, v8
3202 ; CHECK-NEXT: vmv1r.v v11, v8
3203 ; CHECK-NEXT: vmv1r.v v12, v8
3204 ; CHECK-NEXT: vmv1r.v v13, v8
3205 ; CHECK-NEXT: vmv1r.v v14, v8
3206 ; CHECK-NEXT: vmv1r.v v15, v8
3207 ; CHECK-NEXT: vmv1r.v v16, v8
3208 ; CHECK-NEXT: vmv1r.v v17, v8
3209 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
3210 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t
3213 tail call void @llvm.riscv.vsuxseg8.mask.nxv1i32.nxv1i8(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
3217 declare void @llvm.riscv.vsuxseg8.nxv1i32.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i32>, i32)
3218 declare void @llvm.riscv.vsuxseg8.mask.nxv1i32.nxv1i32(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
3220 define void @test_vsuxseg8_nxv1i32_nxv1i32(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
3221 ; CHECK-LABEL: test_vsuxseg8_nxv1i32_nxv1i32:
3222 ; CHECK: # %bb.0: # %entry
3223 ; CHECK-NEXT: vmv1r.v v10, v8
3224 ; CHECK-NEXT: vmv1r.v v11, v8
3225 ; CHECK-NEXT: vmv1r.v v12, v8
3226 ; CHECK-NEXT: vmv1r.v v13, v8
3227 ; CHECK-NEXT: vmv1r.v v14, v8
3228 ; CHECK-NEXT: vmv1r.v v15, v8
3229 ; CHECK-NEXT: vmv1r.v v16, v8
3230 ; CHECK-NEXT: vmv1r.v v17, v8
3231 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
3232 ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9
3235 tail call void @llvm.riscv.vsuxseg8.nxv1i32.nxv1i32(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
3239 define void @test_vsuxseg8_mask_nxv1i32_nxv1i32(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
3240 ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i32_nxv1i32:
3241 ; CHECK: # %bb.0: # %entry
3242 ; CHECK-NEXT: vmv1r.v v10, v8
3243 ; CHECK-NEXT: vmv1r.v v11, v8
3244 ; CHECK-NEXT: vmv1r.v v12, v8
3245 ; CHECK-NEXT: vmv1r.v v13, v8
3246 ; CHECK-NEXT: vmv1r.v v14, v8
3247 ; CHECK-NEXT: vmv1r.v v15, v8
3248 ; CHECK-NEXT: vmv1r.v v16, v8
3249 ; CHECK-NEXT: vmv1r.v v17, v8
3250 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
3251 ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t
3254 tail call void @llvm.riscv.vsuxseg8.mask.nxv1i32.nxv1i32(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
3258 declare void @llvm.riscv.vsuxseg8.nxv1i32.nxv1i16(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i16>, i32)
3259 declare void @llvm.riscv.vsuxseg8.mask.nxv1i32.nxv1i16(<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>,<vscale x 1 x i32>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
3261 define void @test_vsuxseg8_nxv1i32_nxv1i16(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
3262 ; CHECK-LABEL: test_vsuxseg8_nxv1i32_nxv1i16:
3263 ; CHECK: # %bb.0: # %entry
3264 ; CHECK-NEXT: vmv1r.v v10, v8
3265 ; CHECK-NEXT: vmv1r.v v11, v8
3266 ; CHECK-NEXT: vmv1r.v v12, v8
3267 ; CHECK-NEXT: vmv1r.v v13, v8
3268 ; CHECK-NEXT: vmv1r.v v14, v8
3269 ; CHECK-NEXT: vmv1r.v v15, v8
3270 ; CHECK-NEXT: vmv1r.v v16, v8
3271 ; CHECK-NEXT: vmv1r.v v17, v8
3272 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
3273 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9
3276 tail call void @llvm.riscv.vsuxseg8.nxv1i32.nxv1i16(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
3280 define void @test_vsuxseg8_mask_nxv1i32_nxv1i16(<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
3281 ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i32_nxv1i16:
3282 ; CHECK: # %bb.0: # %entry
3283 ; CHECK-NEXT: vmv1r.v v10, v8
3284 ; CHECK-NEXT: vmv1r.v v11, v8
3285 ; CHECK-NEXT: vmv1r.v v12, v8
3286 ; CHECK-NEXT: vmv1r.v v13, v8
3287 ; CHECK-NEXT: vmv1r.v v14, v8
3288 ; CHECK-NEXT: vmv1r.v v15, v8
3289 ; CHECK-NEXT: vmv1r.v v16, v8
3290 ; CHECK-NEXT: vmv1r.v v17, v8
3291 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
3292 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t
3295 tail call void @llvm.riscv.vsuxseg8.mask.nxv1i32.nxv1i16(<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val,<vscale x 1 x i32> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
3299 declare void @llvm.riscv.vsuxseg2.nxv8i16.nxv8i16(<vscale x 8 x i16>,<vscale x 8 x i16>, ptr, <vscale x 8 x i16>, i32)
3300 declare void @llvm.riscv.vsuxseg2.mask.nxv8i16.nxv8i16(<vscale x 8 x i16>,<vscale x 8 x i16>, ptr, <vscale x 8 x i16>, <vscale x 8 x i1>, i32)
3302 define void @test_vsuxseg2_nxv8i16_nxv8i16(<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl) {
3303 ; CHECK-LABEL: test_vsuxseg2_nxv8i16_nxv8i16:
3304 ; CHECK: # %bb.0: # %entry
3305 ; CHECK-NEXT: vmv2r.v v12, v10
3306 ; CHECK-NEXT: vmv2r.v v10, v8
3307 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
3308 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v12
3311 tail call void @llvm.riscv.vsuxseg2.nxv8i16.nxv8i16(<vscale x 8 x i16> %val,<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl)
3315 define void @test_vsuxseg2_mask_nxv8i16_nxv8i16(<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl) {
3316 ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i16_nxv8i16:
3317 ; CHECK: # %bb.0: # %entry
3318 ; CHECK-NEXT: vmv2r.v v12, v10
3319 ; CHECK-NEXT: vmv2r.v v10, v8
3320 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
3321 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v12, v0.t
3324 tail call void @llvm.riscv.vsuxseg2.mask.nxv8i16.nxv8i16(<vscale x 8 x i16> %val,<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl)
3328 declare void @llvm.riscv.vsuxseg2.nxv8i16.nxv8i8(<vscale x 8 x i16>,<vscale x 8 x i16>, ptr, <vscale x 8 x i8>, i32)
3329 declare void @llvm.riscv.vsuxseg2.mask.nxv8i16.nxv8i8(<vscale x 8 x i16>,<vscale x 8 x i16>, ptr, <vscale x 8 x i8>, <vscale x 8 x i1>, i32)
3331 define void @test_vsuxseg2_nxv8i16_nxv8i8(<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl) {
3332 ; CHECK-LABEL: test_vsuxseg2_nxv8i16_nxv8i8:
3333 ; CHECK: # %bb.0: # %entry
3334 ; CHECK-NEXT: vmv1r.v v12, v10
3335 ; CHECK-NEXT: vmv2r.v v10, v8
3336 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
3337 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v12
3340 tail call void @llvm.riscv.vsuxseg2.nxv8i16.nxv8i8(<vscale x 8 x i16> %val,<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl)
3344 define void @test_vsuxseg2_mask_nxv8i16_nxv8i8(<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl) {
3345 ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i16_nxv8i8:
3346 ; CHECK: # %bb.0: # %entry
3347 ; CHECK-NEXT: vmv1r.v v12, v10
3348 ; CHECK-NEXT: vmv2r.v v10, v8
3349 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
3350 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v12, v0.t
3353 tail call void @llvm.riscv.vsuxseg2.mask.nxv8i16.nxv8i8(<vscale x 8 x i16> %val,<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl)
3357 declare void @llvm.riscv.vsuxseg2.nxv8i16.nxv8i32(<vscale x 8 x i16>,<vscale x 8 x i16>, ptr, <vscale x 8 x i32>, i32)
3358 declare void @llvm.riscv.vsuxseg2.mask.nxv8i16.nxv8i32(<vscale x 8 x i16>,<vscale x 8 x i16>, ptr, <vscale x 8 x i32>, <vscale x 8 x i1>, i32)
3360 define void @test_vsuxseg2_nxv8i16_nxv8i32(<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl) {
3361 ; CHECK-LABEL: test_vsuxseg2_nxv8i16_nxv8i32:
3362 ; CHECK: # %bb.0: # %entry
3363 ; CHECK-NEXT: vmv2r.v v10, v8
3364 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
3365 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12
3368 tail call void @llvm.riscv.vsuxseg2.nxv8i16.nxv8i32(<vscale x 8 x i16> %val,<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl)
3372 define void @test_vsuxseg2_mask_nxv8i16_nxv8i32(<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl) {
3373 ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i16_nxv8i32:
3374 ; CHECK: # %bb.0: # %entry
3375 ; CHECK-NEXT: vmv2r.v v10, v8
3376 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
3377 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12, v0.t
3380 tail call void @llvm.riscv.vsuxseg2.mask.nxv8i16.nxv8i32(<vscale x 8 x i16> %val,<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl)
3384 declare void @llvm.riscv.vsuxseg3.nxv8i16.nxv8i16(<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, ptr, <vscale x 8 x i16>, i32)
3385 declare void @llvm.riscv.vsuxseg3.mask.nxv8i16.nxv8i16(<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, ptr, <vscale x 8 x i16>, <vscale x 8 x i1>, i32)
3387 define void @test_vsuxseg3_nxv8i16_nxv8i16(<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl) {
3388 ; CHECK-LABEL: test_vsuxseg3_nxv8i16_nxv8i16:
3389 ; CHECK: # %bb.0: # %entry
3390 ; CHECK-NEXT: vmv2r.v v12, v8
3391 ; CHECK-NEXT: vmv2r.v v14, v8
3392 ; CHECK-NEXT: vmv2r.v v16, v8
3393 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
3394 ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10
3397 tail call void @llvm.riscv.vsuxseg3.nxv8i16.nxv8i16(<vscale x 8 x i16> %val,<vscale x 8 x i16> %val,<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl)
3401 define void @test_vsuxseg3_mask_nxv8i16_nxv8i16(<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl) {
3402 ; CHECK-LABEL: test_vsuxseg3_mask_nxv8i16_nxv8i16:
3403 ; CHECK: # %bb.0: # %entry
3404 ; CHECK-NEXT: vmv2r.v v12, v8
3405 ; CHECK-NEXT: vmv2r.v v14, v8
3406 ; CHECK-NEXT: vmv2r.v v16, v8
3407 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
3408 ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10, v0.t
3411 tail call void @llvm.riscv.vsuxseg3.mask.nxv8i16.nxv8i16(<vscale x 8 x i16> %val,<vscale x 8 x i16> %val,<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl)
3415 declare void @llvm.riscv.vsuxseg3.nxv8i16.nxv8i8(<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, ptr, <vscale x 8 x i8>, i32)
3416 declare void @llvm.riscv.vsuxseg3.mask.nxv8i16.nxv8i8(<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, ptr, <vscale x 8 x i8>, <vscale x 8 x i1>, i32)
3418 define void @test_vsuxseg3_nxv8i16_nxv8i8(<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl) {
3419 ; CHECK-LABEL: test_vsuxseg3_nxv8i16_nxv8i8:
3420 ; CHECK: # %bb.0: # %entry
3421 ; CHECK-NEXT: vmv2r.v v12, v8
3422 ; CHECK-NEXT: vmv2r.v v14, v8
3423 ; CHECK-NEXT: vmv2r.v v16, v8
3424 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
3425 ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10
3428 tail call void @llvm.riscv.vsuxseg3.nxv8i16.nxv8i8(<vscale x 8 x i16> %val,<vscale x 8 x i16> %val,<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl)
3432 define void @test_vsuxseg3_mask_nxv8i16_nxv8i8(<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl) {
3433 ; CHECK-LABEL: test_vsuxseg3_mask_nxv8i16_nxv8i8:
3434 ; CHECK: # %bb.0: # %entry
3435 ; CHECK-NEXT: vmv2r.v v12, v8
3436 ; CHECK-NEXT: vmv2r.v v14, v8
3437 ; CHECK-NEXT: vmv2r.v v16, v8
3438 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
3439 ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10, v0.t
3442 tail call void @llvm.riscv.vsuxseg3.mask.nxv8i16.nxv8i8(<vscale x 8 x i16> %val,<vscale x 8 x i16> %val,<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl)
3446 declare void @llvm.riscv.vsuxseg3.nxv8i16.nxv8i32(<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, ptr, <vscale x 8 x i32>, i32)
3447 declare void @llvm.riscv.vsuxseg3.mask.nxv8i16.nxv8i32(<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, ptr, <vscale x 8 x i32>, <vscale x 8 x i1>, i32)
3449 define void @test_vsuxseg3_nxv8i16_nxv8i32(<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl) {
3450 ; CHECK-LABEL: test_vsuxseg3_nxv8i16_nxv8i32:
3451 ; CHECK: # %bb.0: # %entry
3452 ; CHECK-NEXT: vmv2r.v v10, v8
3453 ; CHECK-NEXT: vmv4r.v v16, v12
3454 ; CHECK-NEXT: vmv2r.v v12, v8
3455 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
3456 ; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v16
3459 tail call void @llvm.riscv.vsuxseg3.nxv8i16.nxv8i32(<vscale x 8 x i16> %val,<vscale x 8 x i16> %val,<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl)
3463 define void @test_vsuxseg3_mask_nxv8i16_nxv8i32(<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl) {
3464 ; CHECK-LABEL: test_vsuxseg3_mask_nxv8i16_nxv8i32:
3465 ; CHECK: # %bb.0: # %entry
3466 ; CHECK-NEXT: vmv2r.v v10, v8
3467 ; CHECK-NEXT: vmv4r.v v16, v12
3468 ; CHECK-NEXT: vmv2r.v v12, v8
3469 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
3470 ; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v16, v0.t
3473 tail call void @llvm.riscv.vsuxseg3.mask.nxv8i16.nxv8i32(<vscale x 8 x i16> %val,<vscale x 8 x i16> %val,<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl)
3477 declare void @llvm.riscv.vsuxseg4.nxv8i16.nxv8i16(<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, ptr, <vscale x 8 x i16>, i32)
3478 declare void @llvm.riscv.vsuxseg4.mask.nxv8i16.nxv8i16(<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, ptr, <vscale x 8 x i16>, <vscale x 8 x i1>, i32)
3480 define void @test_vsuxseg4_nxv8i16_nxv8i16(<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl) {
3481 ; CHECK-LABEL: test_vsuxseg4_nxv8i16_nxv8i16:
3482 ; CHECK: # %bb.0: # %entry
3483 ; CHECK-NEXT: vmv2r.v v12, v8
3484 ; CHECK-NEXT: vmv2r.v v14, v8
3485 ; CHECK-NEXT: vmv2r.v v16, v8
3486 ; CHECK-NEXT: vmv2r.v v18, v8
3487 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
3488 ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10
3491 tail call void @llvm.riscv.vsuxseg4.nxv8i16.nxv8i16(<vscale x 8 x i16> %val,<vscale x 8 x i16> %val,<vscale x 8 x i16> %val,<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl)
3495 define void @test_vsuxseg4_mask_nxv8i16_nxv8i16(<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl) {
3496 ; CHECK-LABEL: test_vsuxseg4_mask_nxv8i16_nxv8i16:
3497 ; CHECK: # %bb.0: # %entry
3498 ; CHECK-NEXT: vmv2r.v v12, v8
3499 ; CHECK-NEXT: vmv2r.v v14, v8
3500 ; CHECK-NEXT: vmv2r.v v16, v8
3501 ; CHECK-NEXT: vmv2r.v v18, v8
3502 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
3503 ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10, v0.t
3506 tail call void @llvm.riscv.vsuxseg4.mask.nxv8i16.nxv8i16(<vscale x 8 x i16> %val,<vscale x 8 x i16> %val,<vscale x 8 x i16> %val,<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl)
3510 declare void @llvm.riscv.vsuxseg4.nxv8i16.nxv8i8(<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, ptr, <vscale x 8 x i8>, i32)
3511 declare void @llvm.riscv.vsuxseg4.mask.nxv8i16.nxv8i8(<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, ptr, <vscale x 8 x i8>, <vscale x 8 x i1>, i32)
3513 define void @test_vsuxseg4_nxv8i16_nxv8i8(<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl) {
3514 ; CHECK-LABEL: test_vsuxseg4_nxv8i16_nxv8i8:
3515 ; CHECK: # %bb.0: # %entry
3516 ; CHECK-NEXT: vmv2r.v v12, v8
3517 ; CHECK-NEXT: vmv2r.v v14, v8
3518 ; CHECK-NEXT: vmv2r.v v16, v8
3519 ; CHECK-NEXT: vmv2r.v v18, v8
3520 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
3521 ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10
3524 tail call void @llvm.riscv.vsuxseg4.nxv8i16.nxv8i8(<vscale x 8 x i16> %val,<vscale x 8 x i16> %val,<vscale x 8 x i16> %val,<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl)
3528 define void @test_vsuxseg4_mask_nxv8i16_nxv8i8(<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl) {
3529 ; CHECK-LABEL: test_vsuxseg4_mask_nxv8i16_nxv8i8:
3530 ; CHECK: # %bb.0: # %entry
3531 ; CHECK-NEXT: vmv2r.v v12, v8
3532 ; CHECK-NEXT: vmv2r.v v14, v8
3533 ; CHECK-NEXT: vmv2r.v v16, v8
3534 ; CHECK-NEXT: vmv2r.v v18, v8
3535 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
3536 ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10, v0.t
3539 tail call void @llvm.riscv.vsuxseg4.mask.nxv8i16.nxv8i8(<vscale x 8 x i16> %val,<vscale x 8 x i16> %val,<vscale x 8 x i16> %val,<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl)
3543 declare void @llvm.riscv.vsuxseg4.nxv8i16.nxv8i32(<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, ptr, <vscale x 8 x i32>, i32)
3544 declare void @llvm.riscv.vsuxseg4.mask.nxv8i16.nxv8i32(<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>,<vscale x 8 x i16>, ptr, <vscale x 8 x i32>, <vscale x 8 x i1>, i32)
3546 define void @test_vsuxseg4_nxv8i16_nxv8i32(<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl) {
3547 ; CHECK-LABEL: test_vsuxseg4_nxv8i16_nxv8i32:
3548 ; CHECK: # %bb.0: # %entry
3549 ; CHECK-NEXT: vmv2r.v v16, v8
3550 ; CHECK-NEXT: vmv2r.v v18, v8
3551 ; CHECK-NEXT: vmv2r.v v20, v8
3552 ; CHECK-NEXT: vmv2r.v v22, v8
3553 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
3554 ; CHECK-NEXT: vsuxseg4ei32.v v16, (a0), v12
3557 tail call void @llvm.riscv.vsuxseg4.nxv8i16.nxv8i32(<vscale x 8 x i16> %val,<vscale x 8 x i16> %val,<vscale x 8 x i16> %val,<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl)
3561 define void @test_vsuxseg4_mask_nxv8i16_nxv8i32(<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl) {
3562 ; CHECK-LABEL: test_vsuxseg4_mask_nxv8i16_nxv8i32:
3563 ; CHECK: # %bb.0: # %entry
3564 ; CHECK-NEXT: vmv2r.v v16, v8
3565 ; CHECK-NEXT: vmv2r.v v18, v8
3566 ; CHECK-NEXT: vmv2r.v v20, v8
3567 ; CHECK-NEXT: vmv2r.v v22, v8
3568 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
3569 ; CHECK-NEXT: vsuxseg4ei32.v v16, (a0), v12, v0.t
3572 tail call void @llvm.riscv.vsuxseg4.mask.nxv8i16.nxv8i32(<vscale x 8 x i16> %val,<vscale x 8 x i16> %val,<vscale x 8 x i16> %val,<vscale x 8 x i16> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl)
3576 declare void @llvm.riscv.vsuxseg2.nxv8i8.nxv8i16(<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i16>, i32)
3577 declare void @llvm.riscv.vsuxseg2.mask.nxv8i8.nxv8i16(<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i16>, <vscale x 8 x i1>, i32)
3579 define void @test_vsuxseg2_nxv8i8_nxv8i16(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl) {
3580 ; CHECK-LABEL: test_vsuxseg2_nxv8i8_nxv8i16:
3581 ; CHECK: # %bb.0: # %entry
3582 ; CHECK-NEXT: vmv1r.v v9, v8
3583 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3584 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10
3587 tail call void @llvm.riscv.vsuxseg2.nxv8i8.nxv8i16(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl)
3591 define void @test_vsuxseg2_mask_nxv8i8_nxv8i16(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl) {
3592 ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i8_nxv8i16:
3593 ; CHECK: # %bb.0: # %entry
3594 ; CHECK-NEXT: vmv1r.v v9, v8
3595 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3596 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10, v0.t
3599 tail call void @llvm.riscv.vsuxseg2.mask.nxv8i8.nxv8i16(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl)
3603 declare void @llvm.riscv.vsuxseg2.nxv8i8.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i8>, i32)
3604 declare void @llvm.riscv.vsuxseg2.mask.nxv8i8.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i8>, <vscale x 8 x i1>, i32)
3606 define void @test_vsuxseg2_nxv8i8_nxv8i8(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl) {
3607 ; CHECK-LABEL: test_vsuxseg2_nxv8i8_nxv8i8:
3608 ; CHECK: # %bb.0: # %entry
3609 ; CHECK-NEXT: vmv1r.v v10, v9
3610 ; CHECK-NEXT: vmv1r.v v9, v8
3611 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3612 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10
3615 tail call void @llvm.riscv.vsuxseg2.nxv8i8.nxv8i8(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl)
3619 define void @test_vsuxseg2_mask_nxv8i8_nxv8i8(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl) {
3620 ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i8_nxv8i8:
3621 ; CHECK: # %bb.0: # %entry
3622 ; CHECK-NEXT: vmv1r.v v10, v9
3623 ; CHECK-NEXT: vmv1r.v v9, v8
3624 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3625 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10, v0.t
3628 tail call void @llvm.riscv.vsuxseg2.mask.nxv8i8.nxv8i8(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl)
3632 declare void @llvm.riscv.vsuxseg2.nxv8i8.nxv8i32(<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i32>, i32)
3633 declare void @llvm.riscv.vsuxseg2.mask.nxv8i8.nxv8i32(<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i32>, <vscale x 8 x i1>, i32)
3635 define void @test_vsuxseg2_nxv8i8_nxv8i32(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl) {
3636 ; CHECK-LABEL: test_vsuxseg2_nxv8i8_nxv8i32:
3637 ; CHECK: # %bb.0: # %entry
3638 ; CHECK-NEXT: vmv1r.v v9, v8
3639 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3640 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12
3643 tail call void @llvm.riscv.vsuxseg2.nxv8i8.nxv8i32(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl)
3647 define void @test_vsuxseg2_mask_nxv8i8_nxv8i32(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl) {
3648 ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i8_nxv8i32:
3649 ; CHECK: # %bb.0: # %entry
3650 ; CHECK-NEXT: vmv1r.v v9, v8
3651 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3652 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12, v0.t
3655 tail call void @llvm.riscv.vsuxseg2.mask.nxv8i8.nxv8i32(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl)
3659 declare void @llvm.riscv.vsuxseg3.nxv8i8.nxv8i16(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i16>, i32)
3660 declare void @llvm.riscv.vsuxseg3.mask.nxv8i8.nxv8i16(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i16>, <vscale x 8 x i1>, i32)
3662 define void @test_vsuxseg3_nxv8i8_nxv8i16(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl) {
3663 ; CHECK-LABEL: test_vsuxseg3_nxv8i8_nxv8i16:
3664 ; CHECK: # %bb.0: # %entry
3665 ; CHECK-NEXT: vmv1r.v v9, v8
3666 ; CHECK-NEXT: vmv2r.v v12, v10
3667 ; CHECK-NEXT: vmv1r.v v10, v8
3668 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3669 ; CHECK-NEXT: vsuxseg3ei16.v v8, (a0), v12
3672 tail call void @llvm.riscv.vsuxseg3.nxv8i8.nxv8i16(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl)
3676 define void @test_vsuxseg3_mask_nxv8i8_nxv8i16(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl) {
3677 ; CHECK-LABEL: test_vsuxseg3_mask_nxv8i8_nxv8i16:
3678 ; CHECK: # %bb.0: # %entry
3679 ; CHECK-NEXT: vmv1r.v v9, v8
3680 ; CHECK-NEXT: vmv2r.v v12, v10
3681 ; CHECK-NEXT: vmv1r.v v10, v8
3682 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3683 ; CHECK-NEXT: vsuxseg3ei16.v v8, (a0), v12, v0.t
3686 tail call void @llvm.riscv.vsuxseg3.mask.nxv8i8.nxv8i16(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl)
3690 declare void @llvm.riscv.vsuxseg3.nxv8i8.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i8>, i32)
3691 declare void @llvm.riscv.vsuxseg3.mask.nxv8i8.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i8>, <vscale x 8 x i1>, i32)
3693 define void @test_vsuxseg3_nxv8i8_nxv8i8(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl) {
3694 ; CHECK-LABEL: test_vsuxseg3_nxv8i8_nxv8i8:
3695 ; CHECK: # %bb.0: # %entry
3696 ; CHECK-NEXT: vmv1r.v v10, v8
3697 ; CHECK-NEXT: vmv1r.v v11, v8
3698 ; CHECK-NEXT: vmv1r.v v12, v8
3699 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3700 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9
3703 tail call void @llvm.riscv.vsuxseg3.nxv8i8.nxv8i8(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl)
3707 define void @test_vsuxseg3_mask_nxv8i8_nxv8i8(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl) {
3708 ; CHECK-LABEL: test_vsuxseg3_mask_nxv8i8_nxv8i8:
3709 ; CHECK: # %bb.0: # %entry
3710 ; CHECK-NEXT: vmv1r.v v10, v8
3711 ; CHECK-NEXT: vmv1r.v v11, v8
3712 ; CHECK-NEXT: vmv1r.v v12, v8
3713 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3714 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t
3717 tail call void @llvm.riscv.vsuxseg3.mask.nxv8i8.nxv8i8(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl)
3721 declare void @llvm.riscv.vsuxseg3.nxv8i8.nxv8i32(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i32>, i32)
3722 declare void @llvm.riscv.vsuxseg3.mask.nxv8i8.nxv8i32(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i32>, <vscale x 8 x i1>, i32)
3724 define void @test_vsuxseg3_nxv8i8_nxv8i32(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl) {
3725 ; CHECK-LABEL: test_vsuxseg3_nxv8i8_nxv8i32:
3726 ; CHECK: # %bb.0: # %entry
3727 ; CHECK-NEXT: vmv1r.v v9, v8
3728 ; CHECK-NEXT: vmv1r.v v10, v8
3729 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3730 ; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v12
3733 tail call void @llvm.riscv.vsuxseg3.nxv8i8.nxv8i32(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl)
3737 define void @test_vsuxseg3_mask_nxv8i8_nxv8i32(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl) {
3738 ; CHECK-LABEL: test_vsuxseg3_mask_nxv8i8_nxv8i32:
3739 ; CHECK: # %bb.0: # %entry
3740 ; CHECK-NEXT: vmv1r.v v9, v8
3741 ; CHECK-NEXT: vmv1r.v v10, v8
3742 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3743 ; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v12, v0.t
3746 tail call void @llvm.riscv.vsuxseg3.mask.nxv8i8.nxv8i32(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl)
3750 declare void @llvm.riscv.vsuxseg4.nxv8i8.nxv8i16(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i16>, i32)
3751 declare void @llvm.riscv.vsuxseg4.mask.nxv8i8.nxv8i16(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i16>, <vscale x 8 x i1>, i32)
3753 define void @test_vsuxseg4_nxv8i8_nxv8i16(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl) {
3754 ; CHECK-LABEL: test_vsuxseg4_nxv8i8_nxv8i16:
3755 ; CHECK: # %bb.0: # %entry
3756 ; CHECK-NEXT: vmv1r.v v12, v8
3757 ; CHECK-NEXT: vmv1r.v v13, v8
3758 ; CHECK-NEXT: vmv1r.v v14, v8
3759 ; CHECK-NEXT: vmv1r.v v15, v8
3760 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3761 ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10
3764 tail call void @llvm.riscv.vsuxseg4.nxv8i8.nxv8i16(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl)
3768 define void @test_vsuxseg4_mask_nxv8i8_nxv8i16(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl) {
3769 ; CHECK-LABEL: test_vsuxseg4_mask_nxv8i8_nxv8i16:
3770 ; CHECK: # %bb.0: # %entry
3771 ; CHECK-NEXT: vmv1r.v v12, v8
3772 ; CHECK-NEXT: vmv1r.v v13, v8
3773 ; CHECK-NEXT: vmv1r.v v14, v8
3774 ; CHECK-NEXT: vmv1r.v v15, v8
3775 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3776 ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10, v0.t
3779 tail call void @llvm.riscv.vsuxseg4.mask.nxv8i8.nxv8i16(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl)
3783 declare void @llvm.riscv.vsuxseg4.nxv8i8.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i8>, i32)
3784 declare void @llvm.riscv.vsuxseg4.mask.nxv8i8.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i8>, <vscale x 8 x i1>, i32)
3786 define void @test_vsuxseg4_nxv8i8_nxv8i8(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl) {
3787 ; CHECK-LABEL: test_vsuxseg4_nxv8i8_nxv8i8:
3788 ; CHECK: # %bb.0: # %entry
3789 ; CHECK-NEXT: vmv1r.v v10, v8
3790 ; CHECK-NEXT: vmv1r.v v11, v8
3791 ; CHECK-NEXT: vmv1r.v v12, v8
3792 ; CHECK-NEXT: vmv1r.v v13, v8
3793 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3794 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9
3797 tail call void @llvm.riscv.vsuxseg4.nxv8i8.nxv8i8(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl)
3801 define void @test_vsuxseg4_mask_nxv8i8_nxv8i8(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl) {
3802 ; CHECK-LABEL: test_vsuxseg4_mask_nxv8i8_nxv8i8:
3803 ; CHECK: # %bb.0: # %entry
3804 ; CHECK-NEXT: vmv1r.v v10, v8
3805 ; CHECK-NEXT: vmv1r.v v11, v8
3806 ; CHECK-NEXT: vmv1r.v v12, v8
3807 ; CHECK-NEXT: vmv1r.v v13, v8
3808 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3809 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t
3812 tail call void @llvm.riscv.vsuxseg4.mask.nxv8i8.nxv8i8(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl)
3816 declare void @llvm.riscv.vsuxseg4.nxv8i8.nxv8i32(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i32>, i32)
3817 declare void @llvm.riscv.vsuxseg4.mask.nxv8i8.nxv8i32(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i32>, <vscale x 8 x i1>, i32)
3819 define void @test_vsuxseg4_nxv8i8_nxv8i32(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl) {
3820 ; CHECK-LABEL: test_vsuxseg4_nxv8i8_nxv8i32:
3821 ; CHECK: # %bb.0: # %entry
3822 ; CHECK-NEXT: vmv1r.v v9, v8
3823 ; CHECK-NEXT: vmv1r.v v10, v8
3824 ; CHECK-NEXT: vmv1r.v v11, v8
3825 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3826 ; CHECK-NEXT: vsuxseg4ei32.v v8, (a0), v12
3829 tail call void @llvm.riscv.vsuxseg4.nxv8i8.nxv8i32(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl)
3833 define void @test_vsuxseg4_mask_nxv8i8_nxv8i32(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl) {
3834 ; CHECK-LABEL: test_vsuxseg4_mask_nxv8i8_nxv8i32:
3835 ; CHECK: # %bb.0: # %entry
3836 ; CHECK-NEXT: vmv1r.v v9, v8
3837 ; CHECK-NEXT: vmv1r.v v10, v8
3838 ; CHECK-NEXT: vmv1r.v v11, v8
3839 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3840 ; CHECK-NEXT: vsuxseg4ei32.v v8, (a0), v12, v0.t
3843 tail call void @llvm.riscv.vsuxseg4.mask.nxv8i8.nxv8i32(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl)
3847 declare void @llvm.riscv.vsuxseg5.nxv8i8.nxv8i16(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i16>, i32)
3848 declare void @llvm.riscv.vsuxseg5.mask.nxv8i8.nxv8i16(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i16>, <vscale x 8 x i1>, i32)
3850 define void @test_vsuxseg5_nxv8i8_nxv8i16(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl) {
3851 ; CHECK-LABEL: test_vsuxseg5_nxv8i8_nxv8i16:
3852 ; CHECK: # %bb.0: # %entry
3853 ; CHECK-NEXT: vmv1r.v v12, v8
3854 ; CHECK-NEXT: vmv1r.v v13, v8
3855 ; CHECK-NEXT: vmv1r.v v14, v8
3856 ; CHECK-NEXT: vmv1r.v v15, v8
3857 ; CHECK-NEXT: vmv1r.v v16, v8
3858 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3859 ; CHECK-NEXT: vsuxseg5ei16.v v12, (a0), v10
3862 tail call void @llvm.riscv.vsuxseg5.nxv8i8.nxv8i16(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl)
3866 define void @test_vsuxseg5_mask_nxv8i8_nxv8i16(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl) {
3867 ; CHECK-LABEL: test_vsuxseg5_mask_nxv8i8_nxv8i16:
3868 ; CHECK: # %bb.0: # %entry
3869 ; CHECK-NEXT: vmv1r.v v12, v8
3870 ; CHECK-NEXT: vmv1r.v v13, v8
3871 ; CHECK-NEXT: vmv1r.v v14, v8
3872 ; CHECK-NEXT: vmv1r.v v15, v8
3873 ; CHECK-NEXT: vmv1r.v v16, v8
3874 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3875 ; CHECK-NEXT: vsuxseg5ei16.v v12, (a0), v10, v0.t
3878 tail call void @llvm.riscv.vsuxseg5.mask.nxv8i8.nxv8i16(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl)
3882 declare void @llvm.riscv.vsuxseg5.nxv8i8.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i8>, i32)
3883 declare void @llvm.riscv.vsuxseg5.mask.nxv8i8.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i8>, <vscale x 8 x i1>, i32)
3885 define void @test_vsuxseg5_nxv8i8_nxv8i8(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl) {
3886 ; CHECK-LABEL: test_vsuxseg5_nxv8i8_nxv8i8:
3887 ; CHECK: # %bb.0: # %entry
3888 ; CHECK-NEXT: vmv1r.v v10, v8
3889 ; CHECK-NEXT: vmv1r.v v11, v8
3890 ; CHECK-NEXT: vmv1r.v v12, v8
3891 ; CHECK-NEXT: vmv1r.v v13, v8
3892 ; CHECK-NEXT: vmv1r.v v14, v8
3893 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3894 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9
3897 tail call void @llvm.riscv.vsuxseg5.nxv8i8.nxv8i8(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl)
3901 define void @test_vsuxseg5_mask_nxv8i8_nxv8i8(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl) {
3902 ; CHECK-LABEL: test_vsuxseg5_mask_nxv8i8_nxv8i8:
3903 ; CHECK: # %bb.0: # %entry
3904 ; CHECK-NEXT: vmv1r.v v10, v8
3905 ; CHECK-NEXT: vmv1r.v v11, v8
3906 ; CHECK-NEXT: vmv1r.v v12, v8
3907 ; CHECK-NEXT: vmv1r.v v13, v8
3908 ; CHECK-NEXT: vmv1r.v v14, v8
3909 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3910 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t
3913 tail call void @llvm.riscv.vsuxseg5.mask.nxv8i8.nxv8i8(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl)
3917 declare void @llvm.riscv.vsuxseg5.nxv8i8.nxv8i32(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i32>, i32)
3918 declare void @llvm.riscv.vsuxseg5.mask.nxv8i8.nxv8i32(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i32>, <vscale x 8 x i1>, i32)
3920 define void @test_vsuxseg5_nxv8i8_nxv8i32(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl) {
3921 ; CHECK-LABEL: test_vsuxseg5_nxv8i8_nxv8i32:
3922 ; CHECK: # %bb.0: # %entry
3923 ; CHECK-NEXT: vmv1r.v v9, v8
3924 ; CHECK-NEXT: vmv1r.v v10, v8
3925 ; CHECK-NEXT: vmv1r.v v11, v8
3926 ; CHECK-NEXT: vmv4r.v v16, v12
3927 ; CHECK-NEXT: vmv1r.v v12, v8
3928 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3929 ; CHECK-NEXT: vsuxseg5ei32.v v8, (a0), v16
3932 tail call void @llvm.riscv.vsuxseg5.nxv8i8.nxv8i32(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl)
3936 define void @test_vsuxseg5_mask_nxv8i8_nxv8i32(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl) {
3937 ; CHECK-LABEL: test_vsuxseg5_mask_nxv8i8_nxv8i32:
3938 ; CHECK: # %bb.0: # %entry
3939 ; CHECK-NEXT: vmv1r.v v9, v8
3940 ; CHECK-NEXT: vmv1r.v v10, v8
3941 ; CHECK-NEXT: vmv1r.v v11, v8
3942 ; CHECK-NEXT: vmv4r.v v16, v12
3943 ; CHECK-NEXT: vmv1r.v v12, v8
3944 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3945 ; CHECK-NEXT: vsuxseg5ei32.v v8, (a0), v16, v0.t
3948 tail call void @llvm.riscv.vsuxseg5.mask.nxv8i8.nxv8i32(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl)
3952 declare void @llvm.riscv.vsuxseg6.nxv8i8.nxv8i16(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i16>, i32)
3953 declare void @llvm.riscv.vsuxseg6.mask.nxv8i8.nxv8i16(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i16>, <vscale x 8 x i1>, i32)
3955 define void @test_vsuxseg6_nxv8i8_nxv8i16(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl) {
3956 ; CHECK-LABEL: test_vsuxseg6_nxv8i8_nxv8i16:
3957 ; CHECK: # %bb.0: # %entry
3958 ; CHECK-NEXT: vmv1r.v v12, v8
3959 ; CHECK-NEXT: vmv1r.v v13, v8
3960 ; CHECK-NEXT: vmv1r.v v14, v8
3961 ; CHECK-NEXT: vmv1r.v v15, v8
3962 ; CHECK-NEXT: vmv1r.v v16, v8
3963 ; CHECK-NEXT: vmv1r.v v17, v8
3964 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3965 ; CHECK-NEXT: vsuxseg6ei16.v v12, (a0), v10
3968 tail call void @llvm.riscv.vsuxseg6.nxv8i8.nxv8i16(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl)
3972 define void @test_vsuxseg6_mask_nxv8i8_nxv8i16(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl) {
3973 ; CHECK-LABEL: test_vsuxseg6_mask_nxv8i8_nxv8i16:
3974 ; CHECK: # %bb.0: # %entry
3975 ; CHECK-NEXT: vmv1r.v v12, v8
3976 ; CHECK-NEXT: vmv1r.v v13, v8
3977 ; CHECK-NEXT: vmv1r.v v14, v8
3978 ; CHECK-NEXT: vmv1r.v v15, v8
3979 ; CHECK-NEXT: vmv1r.v v16, v8
3980 ; CHECK-NEXT: vmv1r.v v17, v8
3981 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
3982 ; CHECK-NEXT: vsuxseg6ei16.v v12, (a0), v10, v0.t
3985 tail call void @llvm.riscv.vsuxseg6.mask.nxv8i8.nxv8i16(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl)
3989 declare void @llvm.riscv.vsuxseg6.nxv8i8.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i8>, i32)
3990 declare void @llvm.riscv.vsuxseg6.mask.nxv8i8.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i8>, <vscale x 8 x i1>, i32)
3992 define void @test_vsuxseg6_nxv8i8_nxv8i8(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl) {
3993 ; CHECK-LABEL: test_vsuxseg6_nxv8i8_nxv8i8:
3994 ; CHECK: # %bb.0: # %entry
3995 ; CHECK-NEXT: vmv1r.v v10, v8
3996 ; CHECK-NEXT: vmv1r.v v11, v8
3997 ; CHECK-NEXT: vmv1r.v v12, v8
3998 ; CHECK-NEXT: vmv1r.v v13, v8
3999 ; CHECK-NEXT: vmv1r.v v14, v8
4000 ; CHECK-NEXT: vmv1r.v v15, v8
4001 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
4002 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9
4005 tail call void @llvm.riscv.vsuxseg6.nxv8i8.nxv8i8(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl)
4009 define void @test_vsuxseg6_mask_nxv8i8_nxv8i8(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl) {
4010 ; CHECK-LABEL: test_vsuxseg6_mask_nxv8i8_nxv8i8:
4011 ; CHECK: # %bb.0: # %entry
4012 ; CHECK-NEXT: vmv1r.v v10, v8
4013 ; CHECK-NEXT: vmv1r.v v11, v8
4014 ; CHECK-NEXT: vmv1r.v v12, v8
4015 ; CHECK-NEXT: vmv1r.v v13, v8
4016 ; CHECK-NEXT: vmv1r.v v14, v8
4017 ; CHECK-NEXT: vmv1r.v v15, v8
4018 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
4019 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t
4022 tail call void @llvm.riscv.vsuxseg6.mask.nxv8i8.nxv8i8(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl)
4026 declare void @llvm.riscv.vsuxseg6.nxv8i8.nxv8i32(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i32>, i32)
4027 declare void @llvm.riscv.vsuxseg6.mask.nxv8i8.nxv8i32(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i32>, <vscale x 8 x i1>, i32)
4029 define void @test_vsuxseg6_nxv8i8_nxv8i32(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl) {
4030 ; CHECK-LABEL: test_vsuxseg6_nxv8i8_nxv8i32:
4031 ; CHECK: # %bb.0: # %entry
4032 ; CHECK-NEXT: vmv1r.v v16, v8
4033 ; CHECK-NEXT: vmv1r.v v17, v8
4034 ; CHECK-NEXT: vmv1r.v v18, v8
4035 ; CHECK-NEXT: vmv1r.v v19, v8
4036 ; CHECK-NEXT: vmv1r.v v20, v8
4037 ; CHECK-NEXT: vmv1r.v v21, v8
4038 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
4039 ; CHECK-NEXT: vsuxseg6ei32.v v16, (a0), v12
4042 tail call void @llvm.riscv.vsuxseg6.nxv8i8.nxv8i32(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl)
4046 define void @test_vsuxseg6_mask_nxv8i8_nxv8i32(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl) {
4047 ; CHECK-LABEL: test_vsuxseg6_mask_nxv8i8_nxv8i32:
4048 ; CHECK: # %bb.0: # %entry
4049 ; CHECK-NEXT: vmv1r.v v16, v8
4050 ; CHECK-NEXT: vmv1r.v v17, v8
4051 ; CHECK-NEXT: vmv1r.v v18, v8
4052 ; CHECK-NEXT: vmv1r.v v19, v8
4053 ; CHECK-NEXT: vmv1r.v v20, v8
4054 ; CHECK-NEXT: vmv1r.v v21, v8
4055 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
4056 ; CHECK-NEXT: vsuxseg6ei32.v v16, (a0), v12, v0.t
4059 tail call void @llvm.riscv.vsuxseg6.mask.nxv8i8.nxv8i32(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl)
4063 declare void @llvm.riscv.vsuxseg7.nxv8i8.nxv8i16(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i16>, i32)
4064 declare void @llvm.riscv.vsuxseg7.mask.nxv8i8.nxv8i16(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i16>, <vscale x 8 x i1>, i32)
4066 define void @test_vsuxseg7_nxv8i8_nxv8i16(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl) {
4067 ; CHECK-LABEL: test_vsuxseg7_nxv8i8_nxv8i16:
4068 ; CHECK: # %bb.0: # %entry
4069 ; CHECK-NEXT: vmv1r.v v12, v8
4070 ; CHECK-NEXT: vmv1r.v v13, v8
4071 ; CHECK-NEXT: vmv1r.v v14, v8
4072 ; CHECK-NEXT: vmv1r.v v15, v8
4073 ; CHECK-NEXT: vmv1r.v v16, v8
4074 ; CHECK-NEXT: vmv1r.v v17, v8
4075 ; CHECK-NEXT: vmv1r.v v18, v8
4076 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
4077 ; CHECK-NEXT: vsuxseg7ei16.v v12, (a0), v10
4080 tail call void @llvm.riscv.vsuxseg7.nxv8i8.nxv8i16(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl)
4084 define void @test_vsuxseg7_mask_nxv8i8_nxv8i16(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl) {
4085 ; CHECK-LABEL: test_vsuxseg7_mask_nxv8i8_nxv8i16:
4086 ; CHECK: # %bb.0: # %entry
4087 ; CHECK-NEXT: vmv1r.v v12, v8
4088 ; CHECK-NEXT: vmv1r.v v13, v8
4089 ; CHECK-NEXT: vmv1r.v v14, v8
4090 ; CHECK-NEXT: vmv1r.v v15, v8
4091 ; CHECK-NEXT: vmv1r.v v16, v8
4092 ; CHECK-NEXT: vmv1r.v v17, v8
4093 ; CHECK-NEXT: vmv1r.v v18, v8
4094 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
4095 ; CHECK-NEXT: vsuxseg7ei16.v v12, (a0), v10, v0.t
4098 tail call void @llvm.riscv.vsuxseg7.mask.nxv8i8.nxv8i16(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl)
4102 declare void @llvm.riscv.vsuxseg7.nxv8i8.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i8>, i32)
4103 declare void @llvm.riscv.vsuxseg7.mask.nxv8i8.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i8>, <vscale x 8 x i1>, i32)
4105 define void @test_vsuxseg7_nxv8i8_nxv8i8(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl) {
4106 ; CHECK-LABEL: test_vsuxseg7_nxv8i8_nxv8i8:
4107 ; CHECK: # %bb.0: # %entry
4108 ; CHECK-NEXT: vmv1r.v v10, v8
4109 ; CHECK-NEXT: vmv1r.v v11, v8
4110 ; CHECK-NEXT: vmv1r.v v12, v8
4111 ; CHECK-NEXT: vmv1r.v v13, v8
4112 ; CHECK-NEXT: vmv1r.v v14, v8
4113 ; CHECK-NEXT: vmv1r.v v15, v8
4114 ; CHECK-NEXT: vmv1r.v v16, v8
4115 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
4116 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9
4119 tail call void @llvm.riscv.vsuxseg7.nxv8i8.nxv8i8(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl)
4123 define void @test_vsuxseg7_mask_nxv8i8_nxv8i8(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl) {
4124 ; CHECK-LABEL: test_vsuxseg7_mask_nxv8i8_nxv8i8:
4125 ; CHECK: # %bb.0: # %entry
4126 ; CHECK-NEXT: vmv1r.v v10, v8
4127 ; CHECK-NEXT: vmv1r.v v11, v8
4128 ; CHECK-NEXT: vmv1r.v v12, v8
4129 ; CHECK-NEXT: vmv1r.v v13, v8
4130 ; CHECK-NEXT: vmv1r.v v14, v8
4131 ; CHECK-NEXT: vmv1r.v v15, v8
4132 ; CHECK-NEXT: vmv1r.v v16, v8
4133 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
4134 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t
4137 tail call void @llvm.riscv.vsuxseg7.mask.nxv8i8.nxv8i8(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl)
4141 declare void @llvm.riscv.vsuxseg7.nxv8i8.nxv8i32(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i32>, i32)
4142 declare void @llvm.riscv.vsuxseg7.mask.nxv8i8.nxv8i32(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i32>, <vscale x 8 x i1>, i32)
4144 define void @test_vsuxseg7_nxv8i8_nxv8i32(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl) {
4145 ; CHECK-LABEL: test_vsuxseg7_nxv8i8_nxv8i32:
4146 ; CHECK: # %bb.0: # %entry
4147 ; CHECK-NEXT: vmv1r.v v16, v8
4148 ; CHECK-NEXT: vmv1r.v v17, v8
4149 ; CHECK-NEXT: vmv1r.v v18, v8
4150 ; CHECK-NEXT: vmv1r.v v19, v8
4151 ; CHECK-NEXT: vmv1r.v v20, v8
4152 ; CHECK-NEXT: vmv1r.v v21, v8
4153 ; CHECK-NEXT: vmv1r.v v22, v8
4154 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
4155 ; CHECK-NEXT: vsuxseg7ei32.v v16, (a0), v12
4158 tail call void @llvm.riscv.vsuxseg7.nxv8i8.nxv8i32(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl)
4162 define void @test_vsuxseg7_mask_nxv8i8_nxv8i32(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl) {
4163 ; CHECK-LABEL: test_vsuxseg7_mask_nxv8i8_nxv8i32:
4164 ; CHECK: # %bb.0: # %entry
4165 ; CHECK-NEXT: vmv1r.v v16, v8
4166 ; CHECK-NEXT: vmv1r.v v17, v8
4167 ; CHECK-NEXT: vmv1r.v v18, v8
4168 ; CHECK-NEXT: vmv1r.v v19, v8
4169 ; CHECK-NEXT: vmv1r.v v20, v8
4170 ; CHECK-NEXT: vmv1r.v v21, v8
4171 ; CHECK-NEXT: vmv1r.v v22, v8
4172 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
4173 ; CHECK-NEXT: vsuxseg7ei32.v v16, (a0), v12, v0.t
4176 tail call void @llvm.riscv.vsuxseg7.mask.nxv8i8.nxv8i32(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl)
4180 declare void @llvm.riscv.vsuxseg8.nxv8i8.nxv8i16(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i16>, i32)
4181 declare void @llvm.riscv.vsuxseg8.mask.nxv8i8.nxv8i16(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i16>, <vscale x 8 x i1>, i32)
4183 define void @test_vsuxseg8_nxv8i8_nxv8i16(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl) {
4184 ; CHECK-LABEL: test_vsuxseg8_nxv8i8_nxv8i16:
4185 ; CHECK: # %bb.0: # %entry
4186 ; CHECK-NEXT: vmv1r.v v12, v8
4187 ; CHECK-NEXT: vmv1r.v v13, v8
4188 ; CHECK-NEXT: vmv1r.v v14, v8
4189 ; CHECK-NEXT: vmv1r.v v15, v8
4190 ; CHECK-NEXT: vmv1r.v v16, v8
4191 ; CHECK-NEXT: vmv1r.v v17, v8
4192 ; CHECK-NEXT: vmv1r.v v18, v8
4193 ; CHECK-NEXT: vmv1r.v v19, v8
4194 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
4195 ; CHECK-NEXT: vsuxseg8ei16.v v12, (a0), v10
4198 tail call void @llvm.riscv.vsuxseg8.nxv8i8.nxv8i16(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl)
4202 define void @test_vsuxseg8_mask_nxv8i8_nxv8i16(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl) {
4203 ; CHECK-LABEL: test_vsuxseg8_mask_nxv8i8_nxv8i16:
4204 ; CHECK: # %bb.0: # %entry
4205 ; CHECK-NEXT: vmv1r.v v12, v8
4206 ; CHECK-NEXT: vmv1r.v v13, v8
4207 ; CHECK-NEXT: vmv1r.v v14, v8
4208 ; CHECK-NEXT: vmv1r.v v15, v8
4209 ; CHECK-NEXT: vmv1r.v v16, v8
4210 ; CHECK-NEXT: vmv1r.v v17, v8
4211 ; CHECK-NEXT: vmv1r.v v18, v8
4212 ; CHECK-NEXT: vmv1r.v v19, v8
4213 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
4214 ; CHECK-NEXT: vsuxseg8ei16.v v12, (a0), v10, v0.t
4217 tail call void @llvm.riscv.vsuxseg8.mask.nxv8i8.nxv8i16(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl)
4221 declare void @llvm.riscv.vsuxseg8.nxv8i8.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i8>, i32)
4222 declare void @llvm.riscv.vsuxseg8.mask.nxv8i8.nxv8i8(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i8>, <vscale x 8 x i1>, i32)
4224 define void @test_vsuxseg8_nxv8i8_nxv8i8(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl) {
4225 ; CHECK-LABEL: test_vsuxseg8_nxv8i8_nxv8i8:
4226 ; CHECK: # %bb.0: # %entry
4227 ; CHECK-NEXT: vmv1r.v v10, v8
4228 ; CHECK-NEXT: vmv1r.v v11, v8
4229 ; CHECK-NEXT: vmv1r.v v12, v8
4230 ; CHECK-NEXT: vmv1r.v v13, v8
4231 ; CHECK-NEXT: vmv1r.v v14, v8
4232 ; CHECK-NEXT: vmv1r.v v15, v8
4233 ; CHECK-NEXT: vmv1r.v v16, v8
4234 ; CHECK-NEXT: vmv1r.v v17, v8
4235 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
4236 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9
4239 tail call void @llvm.riscv.vsuxseg8.nxv8i8.nxv8i8(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl)
4243 define void @test_vsuxseg8_mask_nxv8i8_nxv8i8(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl) {
4244 ; CHECK-LABEL: test_vsuxseg8_mask_nxv8i8_nxv8i8:
4245 ; CHECK: # %bb.0: # %entry
4246 ; CHECK-NEXT: vmv1r.v v10, v8
4247 ; CHECK-NEXT: vmv1r.v v11, v8
4248 ; CHECK-NEXT: vmv1r.v v12, v8
4249 ; CHECK-NEXT: vmv1r.v v13, v8
4250 ; CHECK-NEXT: vmv1r.v v14, v8
4251 ; CHECK-NEXT: vmv1r.v v15, v8
4252 ; CHECK-NEXT: vmv1r.v v16, v8
4253 ; CHECK-NEXT: vmv1r.v v17, v8
4254 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
4255 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t
4258 tail call void @llvm.riscv.vsuxseg8.mask.nxv8i8.nxv8i8(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl)
4262 declare void @llvm.riscv.vsuxseg8.nxv8i8.nxv8i32(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i32>, i32)
4263 declare void @llvm.riscv.vsuxseg8.mask.nxv8i8.nxv8i32(<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>,<vscale x 8 x i8>, ptr, <vscale x 8 x i32>, <vscale x 8 x i1>, i32)
4265 define void @test_vsuxseg8_nxv8i8_nxv8i32(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl) {
4266 ; CHECK-LABEL: test_vsuxseg8_nxv8i8_nxv8i32:
4267 ; CHECK: # %bb.0: # %entry
4268 ; CHECK-NEXT: vmv1r.v v16, v8
4269 ; CHECK-NEXT: vmv1r.v v17, v8
4270 ; CHECK-NEXT: vmv1r.v v18, v8
4271 ; CHECK-NEXT: vmv1r.v v19, v8
4272 ; CHECK-NEXT: vmv1r.v v20, v8
4273 ; CHECK-NEXT: vmv1r.v v21, v8
4274 ; CHECK-NEXT: vmv1r.v v22, v8
4275 ; CHECK-NEXT: vmv1r.v v23, v8
4276 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
4277 ; CHECK-NEXT: vsuxseg8ei32.v v16, (a0), v12
4280 tail call void @llvm.riscv.vsuxseg8.nxv8i8.nxv8i32(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl)
4284 define void @test_vsuxseg8_mask_nxv8i8_nxv8i32(<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl) {
4285 ; CHECK-LABEL: test_vsuxseg8_mask_nxv8i8_nxv8i32:
4286 ; CHECK: # %bb.0: # %entry
4287 ; CHECK-NEXT: vmv1r.v v16, v8
4288 ; CHECK-NEXT: vmv1r.v v17, v8
4289 ; CHECK-NEXT: vmv1r.v v18, v8
4290 ; CHECK-NEXT: vmv1r.v v19, v8
4291 ; CHECK-NEXT: vmv1r.v v20, v8
4292 ; CHECK-NEXT: vmv1r.v v21, v8
4293 ; CHECK-NEXT: vmv1r.v v22, v8
4294 ; CHECK-NEXT: vmv1r.v v23, v8
4295 ; CHECK-NEXT: vsetvli zero, a1, e8, m1, ta, ma
4296 ; CHECK-NEXT: vsuxseg8ei32.v v16, (a0), v12, v0.t
4299 tail call void @llvm.riscv.vsuxseg8.mask.nxv8i8.nxv8i32(<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val,<vscale x 8 x i8> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl)
4303 declare void @llvm.riscv.vsuxseg2.nxv8i32.nxv8i16(<vscale x 8 x i32>,<vscale x 8 x i32>, ptr, <vscale x 8 x i16>, i32)
4304 declare void @llvm.riscv.vsuxseg2.mask.nxv8i32.nxv8i16(<vscale x 8 x i32>,<vscale x 8 x i32>, ptr, <vscale x 8 x i16>, <vscale x 8 x i1>, i32)
4306 define void @test_vsuxseg2_nxv8i32_nxv8i16(<vscale x 8 x i32> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl) {
4307 ; CHECK-LABEL: test_vsuxseg2_nxv8i32_nxv8i16:
4308 ; CHECK: # %bb.0: # %entry
4309 ; CHECK-NEXT: vmv2r.v v16, v12
4310 ; CHECK-NEXT: vmv4r.v v12, v8
4311 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, ma
4312 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v16
4315 tail call void @llvm.riscv.vsuxseg2.nxv8i32.nxv8i16(<vscale x 8 x i32> %val,<vscale x 8 x i32> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl)
4319 define void @test_vsuxseg2_mask_nxv8i32_nxv8i16(<vscale x 8 x i32> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl) {
4320 ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i32_nxv8i16:
4321 ; CHECK: # %bb.0: # %entry
4322 ; CHECK-NEXT: vmv2r.v v16, v12
4323 ; CHECK-NEXT: vmv4r.v v12, v8
4324 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, ma
4325 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v16, v0.t
4328 tail call void @llvm.riscv.vsuxseg2.mask.nxv8i32.nxv8i16(<vscale x 8 x i32> %val,<vscale x 8 x i32> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl)
4332 declare void @llvm.riscv.vsuxseg2.nxv8i32.nxv8i8(<vscale x 8 x i32>,<vscale x 8 x i32>, ptr, <vscale x 8 x i8>, i32)
4333 declare void @llvm.riscv.vsuxseg2.mask.nxv8i32.nxv8i8(<vscale x 8 x i32>,<vscale x 8 x i32>, ptr, <vscale x 8 x i8>, <vscale x 8 x i1>, i32)
4335 define void @test_vsuxseg2_nxv8i32_nxv8i8(<vscale x 8 x i32> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl) {
4336 ; CHECK-LABEL: test_vsuxseg2_nxv8i32_nxv8i8:
4337 ; CHECK: # %bb.0: # %entry
4338 ; CHECK-NEXT: vmv1r.v v16, v12
4339 ; CHECK-NEXT: vmv4r.v v12, v8
4340 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, ma
4341 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v16
4344 tail call void @llvm.riscv.vsuxseg2.nxv8i32.nxv8i8(<vscale x 8 x i32> %val,<vscale x 8 x i32> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl)
4348 define void @test_vsuxseg2_mask_nxv8i32_nxv8i8(<vscale x 8 x i32> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl) {
4349 ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i32_nxv8i8:
4350 ; CHECK: # %bb.0: # %entry
4351 ; CHECK-NEXT: vmv1r.v v16, v12
4352 ; CHECK-NEXT: vmv4r.v v12, v8
4353 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, ma
4354 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v16, v0.t
4357 tail call void @llvm.riscv.vsuxseg2.mask.nxv8i32.nxv8i8(<vscale x 8 x i32> %val,<vscale x 8 x i32> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl)
4361 declare void @llvm.riscv.vsuxseg2.nxv8i32.nxv8i32(<vscale x 8 x i32>,<vscale x 8 x i32>, ptr, <vscale x 8 x i32>, i32)
4362 declare void @llvm.riscv.vsuxseg2.mask.nxv8i32.nxv8i32(<vscale x 8 x i32>,<vscale x 8 x i32>, ptr, <vscale x 8 x i32>, <vscale x 8 x i1>, i32)
4364 define void @test_vsuxseg2_nxv8i32_nxv8i32(<vscale x 8 x i32> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl) {
4365 ; CHECK-LABEL: test_vsuxseg2_nxv8i32_nxv8i32:
4366 ; CHECK: # %bb.0: # %entry
4367 ; CHECK-NEXT: vmv4r.v v16, v12
4368 ; CHECK-NEXT: vmv4r.v v12, v8
4369 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, ma
4370 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16
4373 tail call void @llvm.riscv.vsuxseg2.nxv8i32.nxv8i32(<vscale x 8 x i32> %val,<vscale x 8 x i32> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl)
4377 define void @test_vsuxseg2_mask_nxv8i32_nxv8i32(<vscale x 8 x i32> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl) {
4378 ; CHECK-LABEL: test_vsuxseg2_mask_nxv8i32_nxv8i32:
4379 ; CHECK: # %bb.0: # %entry
4380 ; CHECK-NEXT: vmv4r.v v16, v12
4381 ; CHECK-NEXT: vmv4r.v v12, v8
4382 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, ma
4383 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16, v0.t
4386 tail call void @llvm.riscv.vsuxseg2.mask.nxv8i32.nxv8i32(<vscale x 8 x i32> %val,<vscale x 8 x i32> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl)
4390 declare void @llvm.riscv.vsuxseg2.nxv4i8.nxv4i16(<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i16>, i32)
4391 declare void @llvm.riscv.vsuxseg2.mask.nxv4i8.nxv4i16(<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
4393 define void @test_vsuxseg2_nxv4i8_nxv4i16(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
4394 ; CHECK-LABEL: test_vsuxseg2_nxv4i8_nxv4i16:
4395 ; CHECK: # %bb.0: # %entry
4396 ; CHECK-NEXT: vmv1r.v v10, v9
4397 ; CHECK-NEXT: vmv1r.v v9, v8
4398 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4399 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10
4402 tail call void @llvm.riscv.vsuxseg2.nxv4i8.nxv4i16(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
4406 define void @test_vsuxseg2_mask_nxv4i8_nxv4i16(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
4407 ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i8_nxv4i16:
4408 ; CHECK: # %bb.0: # %entry
4409 ; CHECK-NEXT: vmv1r.v v10, v9
4410 ; CHECK-NEXT: vmv1r.v v9, v8
4411 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4412 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10, v0.t
4415 tail call void @llvm.riscv.vsuxseg2.mask.nxv4i8.nxv4i16(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
4419 declare void @llvm.riscv.vsuxseg2.nxv4i8.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i8>, i32)
4420 declare void @llvm.riscv.vsuxseg2.mask.nxv4i8.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
4422 define void @test_vsuxseg2_nxv4i8_nxv4i8(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
4423 ; CHECK-LABEL: test_vsuxseg2_nxv4i8_nxv4i8:
4424 ; CHECK: # %bb.0: # %entry
4425 ; CHECK-NEXT: vmv1r.v v10, v9
4426 ; CHECK-NEXT: vmv1r.v v9, v8
4427 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4428 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10
4431 tail call void @llvm.riscv.vsuxseg2.nxv4i8.nxv4i8(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
4435 define void @test_vsuxseg2_mask_nxv4i8_nxv4i8(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
4436 ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i8_nxv4i8:
4437 ; CHECK: # %bb.0: # %entry
4438 ; CHECK-NEXT: vmv1r.v v10, v9
4439 ; CHECK-NEXT: vmv1r.v v9, v8
4440 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4441 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10, v0.t
4444 tail call void @llvm.riscv.vsuxseg2.mask.nxv4i8.nxv4i8(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
4448 declare void @llvm.riscv.vsuxseg2.nxv4i8.nxv4i32(<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i32>, i32)
4449 declare void @llvm.riscv.vsuxseg2.mask.nxv4i8.nxv4i32(<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
4451 define void @test_vsuxseg2_nxv4i8_nxv4i32(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
4452 ; CHECK-LABEL: test_vsuxseg2_nxv4i8_nxv4i32:
4453 ; CHECK: # %bb.0: # %entry
4454 ; CHECK-NEXT: vmv1r.v v9, v8
4455 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4456 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10
4459 tail call void @llvm.riscv.vsuxseg2.nxv4i8.nxv4i32(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
4463 define void @test_vsuxseg2_mask_nxv4i8_nxv4i32(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
4464 ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i8_nxv4i32:
4465 ; CHECK: # %bb.0: # %entry
4466 ; CHECK-NEXT: vmv1r.v v9, v8
4467 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4468 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10, v0.t
4471 tail call void @llvm.riscv.vsuxseg2.mask.nxv4i8.nxv4i32(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
4475 declare void @llvm.riscv.vsuxseg3.nxv4i8.nxv4i16(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i16>, i32)
4476 declare void @llvm.riscv.vsuxseg3.mask.nxv4i8.nxv4i16(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
4478 define void @test_vsuxseg3_nxv4i8_nxv4i16(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
4479 ; CHECK-LABEL: test_vsuxseg3_nxv4i8_nxv4i16:
4480 ; CHECK: # %bb.0: # %entry
4481 ; CHECK-NEXT: vmv1r.v v10, v8
4482 ; CHECK-NEXT: vmv1r.v v11, v8
4483 ; CHECK-NEXT: vmv1r.v v12, v8
4484 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4485 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9
4488 tail call void @llvm.riscv.vsuxseg3.nxv4i8.nxv4i16(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
4492 define void @test_vsuxseg3_mask_nxv4i8_nxv4i16(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
4493 ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i8_nxv4i16:
4494 ; CHECK: # %bb.0: # %entry
4495 ; CHECK-NEXT: vmv1r.v v10, v8
4496 ; CHECK-NEXT: vmv1r.v v11, v8
4497 ; CHECK-NEXT: vmv1r.v v12, v8
4498 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4499 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t
4502 tail call void @llvm.riscv.vsuxseg3.mask.nxv4i8.nxv4i16(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
4506 declare void @llvm.riscv.vsuxseg3.nxv4i8.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i8>, i32)
4507 declare void @llvm.riscv.vsuxseg3.mask.nxv4i8.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
4509 define void @test_vsuxseg3_nxv4i8_nxv4i8(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
4510 ; CHECK-LABEL: test_vsuxseg3_nxv4i8_nxv4i8:
4511 ; CHECK: # %bb.0: # %entry
4512 ; CHECK-NEXT: vmv1r.v v10, v8
4513 ; CHECK-NEXT: vmv1r.v v11, v8
4514 ; CHECK-NEXT: vmv1r.v v12, v8
4515 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4516 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9
4519 tail call void @llvm.riscv.vsuxseg3.nxv4i8.nxv4i8(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
4523 define void @test_vsuxseg3_mask_nxv4i8_nxv4i8(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
4524 ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i8_nxv4i8:
4525 ; CHECK: # %bb.0: # %entry
4526 ; CHECK-NEXT: vmv1r.v v10, v8
4527 ; CHECK-NEXT: vmv1r.v v11, v8
4528 ; CHECK-NEXT: vmv1r.v v12, v8
4529 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4530 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t
4533 tail call void @llvm.riscv.vsuxseg3.mask.nxv4i8.nxv4i8(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
4537 declare void @llvm.riscv.vsuxseg3.nxv4i8.nxv4i32(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i32>, i32)
4538 declare void @llvm.riscv.vsuxseg3.mask.nxv4i8.nxv4i32(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
4540 define void @test_vsuxseg3_nxv4i8_nxv4i32(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
4541 ; CHECK-LABEL: test_vsuxseg3_nxv4i8_nxv4i32:
4542 ; CHECK: # %bb.0: # %entry
4543 ; CHECK-NEXT: vmv1r.v v9, v8
4544 ; CHECK-NEXT: vmv2r.v v12, v10
4545 ; CHECK-NEXT: vmv1r.v v10, v8
4546 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4547 ; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v12
4550 tail call void @llvm.riscv.vsuxseg3.nxv4i8.nxv4i32(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
4554 define void @test_vsuxseg3_mask_nxv4i8_nxv4i32(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
4555 ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i8_nxv4i32:
4556 ; CHECK: # %bb.0: # %entry
4557 ; CHECK-NEXT: vmv1r.v v9, v8
4558 ; CHECK-NEXT: vmv2r.v v12, v10
4559 ; CHECK-NEXT: vmv1r.v v10, v8
4560 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4561 ; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v12, v0.t
4564 tail call void @llvm.riscv.vsuxseg3.mask.nxv4i8.nxv4i32(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
4568 declare void @llvm.riscv.vsuxseg4.nxv4i8.nxv4i16(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i16>, i32)
4569 declare void @llvm.riscv.vsuxseg4.mask.nxv4i8.nxv4i16(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
4571 define void @test_vsuxseg4_nxv4i8_nxv4i16(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
4572 ; CHECK-LABEL: test_vsuxseg4_nxv4i8_nxv4i16:
4573 ; CHECK: # %bb.0: # %entry
4574 ; CHECK-NEXT: vmv1r.v v10, v8
4575 ; CHECK-NEXT: vmv1r.v v11, v8
4576 ; CHECK-NEXT: vmv1r.v v12, v8
4577 ; CHECK-NEXT: vmv1r.v v13, v8
4578 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4579 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9
4582 tail call void @llvm.riscv.vsuxseg4.nxv4i8.nxv4i16(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
4586 define void @test_vsuxseg4_mask_nxv4i8_nxv4i16(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
4587 ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i8_nxv4i16:
4588 ; CHECK: # %bb.0: # %entry
4589 ; CHECK-NEXT: vmv1r.v v10, v8
4590 ; CHECK-NEXT: vmv1r.v v11, v8
4591 ; CHECK-NEXT: vmv1r.v v12, v8
4592 ; CHECK-NEXT: vmv1r.v v13, v8
4593 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4594 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t
4597 tail call void @llvm.riscv.vsuxseg4.mask.nxv4i8.nxv4i16(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
4601 declare void @llvm.riscv.vsuxseg4.nxv4i8.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i8>, i32)
4602 declare void @llvm.riscv.vsuxseg4.mask.nxv4i8.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
4604 define void @test_vsuxseg4_nxv4i8_nxv4i8(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
4605 ; CHECK-LABEL: test_vsuxseg4_nxv4i8_nxv4i8:
4606 ; CHECK: # %bb.0: # %entry
4607 ; CHECK-NEXT: vmv1r.v v10, v8
4608 ; CHECK-NEXT: vmv1r.v v11, v8
4609 ; CHECK-NEXT: vmv1r.v v12, v8
4610 ; CHECK-NEXT: vmv1r.v v13, v8
4611 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4612 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9
4615 tail call void @llvm.riscv.vsuxseg4.nxv4i8.nxv4i8(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
4619 define void @test_vsuxseg4_mask_nxv4i8_nxv4i8(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
4620 ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i8_nxv4i8:
4621 ; CHECK: # %bb.0: # %entry
4622 ; CHECK-NEXT: vmv1r.v v10, v8
4623 ; CHECK-NEXT: vmv1r.v v11, v8
4624 ; CHECK-NEXT: vmv1r.v v12, v8
4625 ; CHECK-NEXT: vmv1r.v v13, v8
4626 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4627 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t
4630 tail call void @llvm.riscv.vsuxseg4.mask.nxv4i8.nxv4i8(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
4634 declare void @llvm.riscv.vsuxseg4.nxv4i8.nxv4i32(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i32>, i32)
4635 declare void @llvm.riscv.vsuxseg4.mask.nxv4i8.nxv4i32(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
4637 define void @test_vsuxseg4_nxv4i8_nxv4i32(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
4638 ; CHECK-LABEL: test_vsuxseg4_nxv4i8_nxv4i32:
4639 ; CHECK: # %bb.0: # %entry
4640 ; CHECK-NEXT: vmv1r.v v12, v8
4641 ; CHECK-NEXT: vmv1r.v v13, v8
4642 ; CHECK-NEXT: vmv1r.v v14, v8
4643 ; CHECK-NEXT: vmv1r.v v15, v8
4644 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4645 ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10
4648 tail call void @llvm.riscv.vsuxseg4.nxv4i8.nxv4i32(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
4652 define void @test_vsuxseg4_mask_nxv4i8_nxv4i32(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
4653 ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i8_nxv4i32:
4654 ; CHECK: # %bb.0: # %entry
4655 ; CHECK-NEXT: vmv1r.v v12, v8
4656 ; CHECK-NEXT: vmv1r.v v13, v8
4657 ; CHECK-NEXT: vmv1r.v v14, v8
4658 ; CHECK-NEXT: vmv1r.v v15, v8
4659 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4660 ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10, v0.t
4663 tail call void @llvm.riscv.vsuxseg4.mask.nxv4i8.nxv4i32(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
4667 declare void @llvm.riscv.vsuxseg5.nxv4i8.nxv4i16(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i16>, i32)
4668 declare void @llvm.riscv.vsuxseg5.mask.nxv4i8.nxv4i16(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
4670 define void @test_vsuxseg5_nxv4i8_nxv4i16(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
4671 ; CHECK-LABEL: test_vsuxseg5_nxv4i8_nxv4i16:
4672 ; CHECK: # %bb.0: # %entry
4673 ; CHECK-NEXT: vmv1r.v v10, v8
4674 ; CHECK-NEXT: vmv1r.v v11, v8
4675 ; CHECK-NEXT: vmv1r.v v12, v8
4676 ; CHECK-NEXT: vmv1r.v v13, v8
4677 ; CHECK-NEXT: vmv1r.v v14, v8
4678 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4679 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9
4682 tail call void @llvm.riscv.vsuxseg5.nxv4i8.nxv4i16(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
4686 define void @test_vsuxseg5_mask_nxv4i8_nxv4i16(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
4687 ; CHECK-LABEL: test_vsuxseg5_mask_nxv4i8_nxv4i16:
4688 ; CHECK: # %bb.0: # %entry
4689 ; CHECK-NEXT: vmv1r.v v10, v8
4690 ; CHECK-NEXT: vmv1r.v v11, v8
4691 ; CHECK-NEXT: vmv1r.v v12, v8
4692 ; CHECK-NEXT: vmv1r.v v13, v8
4693 ; CHECK-NEXT: vmv1r.v v14, v8
4694 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4695 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t
4698 tail call void @llvm.riscv.vsuxseg5.mask.nxv4i8.nxv4i16(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
4702 declare void @llvm.riscv.vsuxseg5.nxv4i8.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i8>, i32)
4703 declare void @llvm.riscv.vsuxseg5.mask.nxv4i8.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
4705 define void @test_vsuxseg5_nxv4i8_nxv4i8(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
4706 ; CHECK-LABEL: test_vsuxseg5_nxv4i8_nxv4i8:
4707 ; CHECK: # %bb.0: # %entry
4708 ; CHECK-NEXT: vmv1r.v v10, v8
4709 ; CHECK-NEXT: vmv1r.v v11, v8
4710 ; CHECK-NEXT: vmv1r.v v12, v8
4711 ; CHECK-NEXT: vmv1r.v v13, v8
4712 ; CHECK-NEXT: vmv1r.v v14, v8
4713 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4714 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9
4717 tail call void @llvm.riscv.vsuxseg5.nxv4i8.nxv4i8(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
4721 define void @test_vsuxseg5_mask_nxv4i8_nxv4i8(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
4722 ; CHECK-LABEL: test_vsuxseg5_mask_nxv4i8_nxv4i8:
4723 ; CHECK: # %bb.0: # %entry
4724 ; CHECK-NEXT: vmv1r.v v10, v8
4725 ; CHECK-NEXT: vmv1r.v v11, v8
4726 ; CHECK-NEXT: vmv1r.v v12, v8
4727 ; CHECK-NEXT: vmv1r.v v13, v8
4728 ; CHECK-NEXT: vmv1r.v v14, v8
4729 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4730 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t
4733 tail call void @llvm.riscv.vsuxseg5.mask.nxv4i8.nxv4i8(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
4737 declare void @llvm.riscv.vsuxseg5.nxv4i8.nxv4i32(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i32>, i32)
4738 declare void @llvm.riscv.vsuxseg5.mask.nxv4i8.nxv4i32(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
4740 define void @test_vsuxseg5_nxv4i8_nxv4i32(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
4741 ; CHECK-LABEL: test_vsuxseg5_nxv4i8_nxv4i32:
4742 ; CHECK: # %bb.0: # %entry
4743 ; CHECK-NEXT: vmv1r.v v12, v8
4744 ; CHECK-NEXT: vmv1r.v v13, v8
4745 ; CHECK-NEXT: vmv1r.v v14, v8
4746 ; CHECK-NEXT: vmv1r.v v15, v8
4747 ; CHECK-NEXT: vmv1r.v v16, v8
4748 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4749 ; CHECK-NEXT: vsuxseg5ei32.v v12, (a0), v10
4752 tail call void @llvm.riscv.vsuxseg5.nxv4i8.nxv4i32(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
4756 define void @test_vsuxseg5_mask_nxv4i8_nxv4i32(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
4757 ; CHECK-LABEL: test_vsuxseg5_mask_nxv4i8_nxv4i32:
4758 ; CHECK: # %bb.0: # %entry
4759 ; CHECK-NEXT: vmv1r.v v12, v8
4760 ; CHECK-NEXT: vmv1r.v v13, v8
4761 ; CHECK-NEXT: vmv1r.v v14, v8
4762 ; CHECK-NEXT: vmv1r.v v15, v8
4763 ; CHECK-NEXT: vmv1r.v v16, v8
4764 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4765 ; CHECK-NEXT: vsuxseg5ei32.v v12, (a0), v10, v0.t
4768 tail call void @llvm.riscv.vsuxseg5.mask.nxv4i8.nxv4i32(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
4772 declare void @llvm.riscv.vsuxseg6.nxv4i8.nxv4i16(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i16>, i32)
4773 declare void @llvm.riscv.vsuxseg6.mask.nxv4i8.nxv4i16(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
4775 define void @test_vsuxseg6_nxv4i8_nxv4i16(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
4776 ; CHECK-LABEL: test_vsuxseg6_nxv4i8_nxv4i16:
4777 ; CHECK: # %bb.0: # %entry
4778 ; CHECK-NEXT: vmv1r.v v10, v8
4779 ; CHECK-NEXT: vmv1r.v v11, v8
4780 ; CHECK-NEXT: vmv1r.v v12, v8
4781 ; CHECK-NEXT: vmv1r.v v13, v8
4782 ; CHECK-NEXT: vmv1r.v v14, v8
4783 ; CHECK-NEXT: vmv1r.v v15, v8
4784 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4785 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9
4788 tail call void @llvm.riscv.vsuxseg6.nxv4i8.nxv4i16(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
4792 define void @test_vsuxseg6_mask_nxv4i8_nxv4i16(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
4793 ; CHECK-LABEL: test_vsuxseg6_mask_nxv4i8_nxv4i16:
4794 ; CHECK: # %bb.0: # %entry
4795 ; CHECK-NEXT: vmv1r.v v10, v8
4796 ; CHECK-NEXT: vmv1r.v v11, v8
4797 ; CHECK-NEXT: vmv1r.v v12, v8
4798 ; CHECK-NEXT: vmv1r.v v13, v8
4799 ; CHECK-NEXT: vmv1r.v v14, v8
4800 ; CHECK-NEXT: vmv1r.v v15, v8
4801 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4802 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t
4805 tail call void @llvm.riscv.vsuxseg6.mask.nxv4i8.nxv4i16(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
4809 declare void @llvm.riscv.vsuxseg6.nxv4i8.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i8>, i32)
4810 declare void @llvm.riscv.vsuxseg6.mask.nxv4i8.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
4812 define void @test_vsuxseg6_nxv4i8_nxv4i8(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
4813 ; CHECK-LABEL: test_vsuxseg6_nxv4i8_nxv4i8:
4814 ; CHECK: # %bb.0: # %entry
4815 ; CHECK-NEXT: vmv1r.v v10, v8
4816 ; CHECK-NEXT: vmv1r.v v11, v8
4817 ; CHECK-NEXT: vmv1r.v v12, v8
4818 ; CHECK-NEXT: vmv1r.v v13, v8
4819 ; CHECK-NEXT: vmv1r.v v14, v8
4820 ; CHECK-NEXT: vmv1r.v v15, v8
4821 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4822 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9
4825 tail call void @llvm.riscv.vsuxseg6.nxv4i8.nxv4i8(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
4829 define void @test_vsuxseg6_mask_nxv4i8_nxv4i8(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
4830 ; CHECK-LABEL: test_vsuxseg6_mask_nxv4i8_nxv4i8:
4831 ; CHECK: # %bb.0: # %entry
4832 ; CHECK-NEXT: vmv1r.v v10, v8
4833 ; CHECK-NEXT: vmv1r.v v11, v8
4834 ; CHECK-NEXT: vmv1r.v v12, v8
4835 ; CHECK-NEXT: vmv1r.v v13, v8
4836 ; CHECK-NEXT: vmv1r.v v14, v8
4837 ; CHECK-NEXT: vmv1r.v v15, v8
4838 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4839 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t
4842 tail call void @llvm.riscv.vsuxseg6.mask.nxv4i8.nxv4i8(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
4846 declare void @llvm.riscv.vsuxseg6.nxv4i8.nxv4i32(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i32>, i32)
4847 declare void @llvm.riscv.vsuxseg6.mask.nxv4i8.nxv4i32(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
4849 define void @test_vsuxseg6_nxv4i8_nxv4i32(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
4850 ; CHECK-LABEL: test_vsuxseg6_nxv4i8_nxv4i32:
4851 ; CHECK: # %bb.0: # %entry
4852 ; CHECK-NEXT: vmv1r.v v12, v8
4853 ; CHECK-NEXT: vmv1r.v v13, v8
4854 ; CHECK-NEXT: vmv1r.v v14, v8
4855 ; CHECK-NEXT: vmv1r.v v15, v8
4856 ; CHECK-NEXT: vmv1r.v v16, v8
4857 ; CHECK-NEXT: vmv1r.v v17, v8
4858 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4859 ; CHECK-NEXT: vsuxseg6ei32.v v12, (a0), v10
4862 tail call void @llvm.riscv.vsuxseg6.nxv4i8.nxv4i32(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
4866 define void @test_vsuxseg6_mask_nxv4i8_nxv4i32(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
4867 ; CHECK-LABEL: test_vsuxseg6_mask_nxv4i8_nxv4i32:
4868 ; CHECK: # %bb.0: # %entry
4869 ; CHECK-NEXT: vmv1r.v v12, v8
4870 ; CHECK-NEXT: vmv1r.v v13, v8
4871 ; CHECK-NEXT: vmv1r.v v14, v8
4872 ; CHECK-NEXT: vmv1r.v v15, v8
4873 ; CHECK-NEXT: vmv1r.v v16, v8
4874 ; CHECK-NEXT: vmv1r.v v17, v8
4875 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4876 ; CHECK-NEXT: vsuxseg6ei32.v v12, (a0), v10, v0.t
4879 tail call void @llvm.riscv.vsuxseg6.mask.nxv4i8.nxv4i32(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
4883 declare void @llvm.riscv.vsuxseg7.nxv4i8.nxv4i16(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i16>, i32)
4884 declare void @llvm.riscv.vsuxseg7.mask.nxv4i8.nxv4i16(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
4886 define void @test_vsuxseg7_nxv4i8_nxv4i16(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
4887 ; CHECK-LABEL: test_vsuxseg7_nxv4i8_nxv4i16:
4888 ; CHECK: # %bb.0: # %entry
4889 ; CHECK-NEXT: vmv1r.v v10, v8
4890 ; CHECK-NEXT: vmv1r.v v11, v8
4891 ; CHECK-NEXT: vmv1r.v v12, v8
4892 ; CHECK-NEXT: vmv1r.v v13, v8
4893 ; CHECK-NEXT: vmv1r.v v14, v8
4894 ; CHECK-NEXT: vmv1r.v v15, v8
4895 ; CHECK-NEXT: vmv1r.v v16, v8
4896 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4897 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9
4900 tail call void @llvm.riscv.vsuxseg7.nxv4i8.nxv4i16(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
4904 define void @test_vsuxseg7_mask_nxv4i8_nxv4i16(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
4905 ; CHECK-LABEL: test_vsuxseg7_mask_nxv4i8_nxv4i16:
4906 ; CHECK: # %bb.0: # %entry
4907 ; CHECK-NEXT: vmv1r.v v10, v8
4908 ; CHECK-NEXT: vmv1r.v v11, v8
4909 ; CHECK-NEXT: vmv1r.v v12, v8
4910 ; CHECK-NEXT: vmv1r.v v13, v8
4911 ; CHECK-NEXT: vmv1r.v v14, v8
4912 ; CHECK-NEXT: vmv1r.v v15, v8
4913 ; CHECK-NEXT: vmv1r.v v16, v8
4914 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4915 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t
4918 tail call void @llvm.riscv.vsuxseg7.mask.nxv4i8.nxv4i16(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
4922 declare void @llvm.riscv.vsuxseg7.nxv4i8.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i8>, i32)
4923 declare void @llvm.riscv.vsuxseg7.mask.nxv4i8.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
4925 define void @test_vsuxseg7_nxv4i8_nxv4i8(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
4926 ; CHECK-LABEL: test_vsuxseg7_nxv4i8_nxv4i8:
4927 ; CHECK: # %bb.0: # %entry
4928 ; CHECK-NEXT: vmv1r.v v10, v8
4929 ; CHECK-NEXT: vmv1r.v v11, v8
4930 ; CHECK-NEXT: vmv1r.v v12, v8
4931 ; CHECK-NEXT: vmv1r.v v13, v8
4932 ; CHECK-NEXT: vmv1r.v v14, v8
4933 ; CHECK-NEXT: vmv1r.v v15, v8
4934 ; CHECK-NEXT: vmv1r.v v16, v8
4935 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4936 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9
4939 tail call void @llvm.riscv.vsuxseg7.nxv4i8.nxv4i8(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
4943 define void @test_vsuxseg7_mask_nxv4i8_nxv4i8(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
4944 ; CHECK-LABEL: test_vsuxseg7_mask_nxv4i8_nxv4i8:
4945 ; CHECK: # %bb.0: # %entry
4946 ; CHECK-NEXT: vmv1r.v v10, v8
4947 ; CHECK-NEXT: vmv1r.v v11, v8
4948 ; CHECK-NEXT: vmv1r.v v12, v8
4949 ; CHECK-NEXT: vmv1r.v v13, v8
4950 ; CHECK-NEXT: vmv1r.v v14, v8
4951 ; CHECK-NEXT: vmv1r.v v15, v8
4952 ; CHECK-NEXT: vmv1r.v v16, v8
4953 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4954 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t
4957 tail call void @llvm.riscv.vsuxseg7.mask.nxv4i8.nxv4i8(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
4961 declare void @llvm.riscv.vsuxseg7.nxv4i8.nxv4i32(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i32>, i32)
4962 declare void @llvm.riscv.vsuxseg7.mask.nxv4i8.nxv4i32(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
4964 define void @test_vsuxseg7_nxv4i8_nxv4i32(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
4965 ; CHECK-LABEL: test_vsuxseg7_nxv4i8_nxv4i32:
4966 ; CHECK: # %bb.0: # %entry
4967 ; CHECK-NEXT: vmv1r.v v12, v8
4968 ; CHECK-NEXT: vmv1r.v v13, v8
4969 ; CHECK-NEXT: vmv1r.v v14, v8
4970 ; CHECK-NEXT: vmv1r.v v15, v8
4971 ; CHECK-NEXT: vmv1r.v v16, v8
4972 ; CHECK-NEXT: vmv1r.v v17, v8
4973 ; CHECK-NEXT: vmv1r.v v18, v8
4974 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4975 ; CHECK-NEXT: vsuxseg7ei32.v v12, (a0), v10
4978 tail call void @llvm.riscv.vsuxseg7.nxv4i8.nxv4i32(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
4982 define void @test_vsuxseg7_mask_nxv4i8_nxv4i32(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
4983 ; CHECK-LABEL: test_vsuxseg7_mask_nxv4i8_nxv4i32:
4984 ; CHECK: # %bb.0: # %entry
4985 ; CHECK-NEXT: vmv1r.v v12, v8
4986 ; CHECK-NEXT: vmv1r.v v13, v8
4987 ; CHECK-NEXT: vmv1r.v v14, v8
4988 ; CHECK-NEXT: vmv1r.v v15, v8
4989 ; CHECK-NEXT: vmv1r.v v16, v8
4990 ; CHECK-NEXT: vmv1r.v v17, v8
4991 ; CHECK-NEXT: vmv1r.v v18, v8
4992 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
4993 ; CHECK-NEXT: vsuxseg7ei32.v v12, (a0), v10, v0.t
4996 tail call void @llvm.riscv.vsuxseg7.mask.nxv4i8.nxv4i32(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
5000 declare void @llvm.riscv.vsuxseg8.nxv4i8.nxv4i16(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i16>, i32)
5001 declare void @llvm.riscv.vsuxseg8.mask.nxv4i8.nxv4i16(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
5003 define void @test_vsuxseg8_nxv4i8_nxv4i16(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
5004 ; CHECK-LABEL: test_vsuxseg8_nxv4i8_nxv4i16:
5005 ; CHECK: # %bb.0: # %entry
5006 ; CHECK-NEXT: vmv1r.v v10, v8
5007 ; CHECK-NEXT: vmv1r.v v11, v8
5008 ; CHECK-NEXT: vmv1r.v v12, v8
5009 ; CHECK-NEXT: vmv1r.v v13, v8
5010 ; CHECK-NEXT: vmv1r.v v14, v8
5011 ; CHECK-NEXT: vmv1r.v v15, v8
5012 ; CHECK-NEXT: vmv1r.v v16, v8
5013 ; CHECK-NEXT: vmv1r.v v17, v8
5014 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
5015 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9
5018 tail call void @llvm.riscv.vsuxseg8.nxv4i8.nxv4i16(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
5022 define void @test_vsuxseg8_mask_nxv4i8_nxv4i16(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
5023 ; CHECK-LABEL: test_vsuxseg8_mask_nxv4i8_nxv4i16:
5024 ; CHECK: # %bb.0: # %entry
5025 ; CHECK-NEXT: vmv1r.v v10, v8
5026 ; CHECK-NEXT: vmv1r.v v11, v8
5027 ; CHECK-NEXT: vmv1r.v v12, v8
5028 ; CHECK-NEXT: vmv1r.v v13, v8
5029 ; CHECK-NEXT: vmv1r.v v14, v8
5030 ; CHECK-NEXT: vmv1r.v v15, v8
5031 ; CHECK-NEXT: vmv1r.v v16, v8
5032 ; CHECK-NEXT: vmv1r.v v17, v8
5033 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
5034 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t
5037 tail call void @llvm.riscv.vsuxseg8.mask.nxv4i8.nxv4i16(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
5041 declare void @llvm.riscv.vsuxseg8.nxv4i8.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i8>, i32)
5042 declare void @llvm.riscv.vsuxseg8.mask.nxv4i8.nxv4i8(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
5044 define void @test_vsuxseg8_nxv4i8_nxv4i8(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
5045 ; CHECK-LABEL: test_vsuxseg8_nxv4i8_nxv4i8:
5046 ; CHECK: # %bb.0: # %entry
5047 ; CHECK-NEXT: vmv1r.v v10, v8
5048 ; CHECK-NEXT: vmv1r.v v11, v8
5049 ; CHECK-NEXT: vmv1r.v v12, v8
5050 ; CHECK-NEXT: vmv1r.v v13, v8
5051 ; CHECK-NEXT: vmv1r.v v14, v8
5052 ; CHECK-NEXT: vmv1r.v v15, v8
5053 ; CHECK-NEXT: vmv1r.v v16, v8
5054 ; CHECK-NEXT: vmv1r.v v17, v8
5055 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
5056 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9
5059 tail call void @llvm.riscv.vsuxseg8.nxv4i8.nxv4i8(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
5063 define void @test_vsuxseg8_mask_nxv4i8_nxv4i8(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
5064 ; CHECK-LABEL: test_vsuxseg8_mask_nxv4i8_nxv4i8:
5065 ; CHECK: # %bb.0: # %entry
5066 ; CHECK-NEXT: vmv1r.v v10, v8
5067 ; CHECK-NEXT: vmv1r.v v11, v8
5068 ; CHECK-NEXT: vmv1r.v v12, v8
5069 ; CHECK-NEXT: vmv1r.v v13, v8
5070 ; CHECK-NEXT: vmv1r.v v14, v8
5071 ; CHECK-NEXT: vmv1r.v v15, v8
5072 ; CHECK-NEXT: vmv1r.v v16, v8
5073 ; CHECK-NEXT: vmv1r.v v17, v8
5074 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
5075 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t
5078 tail call void @llvm.riscv.vsuxseg8.mask.nxv4i8.nxv4i8(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
5082 declare void @llvm.riscv.vsuxseg8.nxv4i8.nxv4i32(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i32>, i32)
5083 declare void @llvm.riscv.vsuxseg8.mask.nxv4i8.nxv4i32(<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>,<vscale x 4 x i8>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
5085 define void @test_vsuxseg8_nxv4i8_nxv4i32(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
5086 ; CHECK-LABEL: test_vsuxseg8_nxv4i8_nxv4i32:
5087 ; CHECK: # %bb.0: # %entry
5088 ; CHECK-NEXT: vmv1r.v v12, v8
5089 ; CHECK-NEXT: vmv1r.v v13, v8
5090 ; CHECK-NEXT: vmv1r.v v14, v8
5091 ; CHECK-NEXT: vmv1r.v v15, v8
5092 ; CHECK-NEXT: vmv1r.v v16, v8
5093 ; CHECK-NEXT: vmv1r.v v17, v8
5094 ; CHECK-NEXT: vmv1r.v v18, v8
5095 ; CHECK-NEXT: vmv1r.v v19, v8
5096 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
5097 ; CHECK-NEXT: vsuxseg8ei32.v v12, (a0), v10
5100 tail call void @llvm.riscv.vsuxseg8.nxv4i8.nxv4i32(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
5104 define void @test_vsuxseg8_mask_nxv4i8_nxv4i32(<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
5105 ; CHECK-LABEL: test_vsuxseg8_mask_nxv4i8_nxv4i32:
5106 ; CHECK: # %bb.0: # %entry
5107 ; CHECK-NEXT: vmv1r.v v12, v8
5108 ; CHECK-NEXT: vmv1r.v v13, v8
5109 ; CHECK-NEXT: vmv1r.v v14, v8
5110 ; CHECK-NEXT: vmv1r.v v15, v8
5111 ; CHECK-NEXT: vmv1r.v v16, v8
5112 ; CHECK-NEXT: vmv1r.v v17, v8
5113 ; CHECK-NEXT: vmv1r.v v18, v8
5114 ; CHECK-NEXT: vmv1r.v v19, v8
5115 ; CHECK-NEXT: vsetvli zero, a1, e8, mf2, ta, ma
5116 ; CHECK-NEXT: vsuxseg8ei32.v v12, (a0), v10, v0.t
5119 tail call void @llvm.riscv.vsuxseg8.mask.nxv4i8.nxv4i32(<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val,<vscale x 4 x i8> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
5123 declare void @llvm.riscv.vsuxseg2.nxv1i16.nxv1i8(<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i8>, i32)
5124 declare void @llvm.riscv.vsuxseg2.mask.nxv1i16.nxv1i8(<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
5126 define void @test_vsuxseg2_nxv1i16_nxv1i8(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
5127 ; CHECK-LABEL: test_vsuxseg2_nxv1i16_nxv1i8:
5128 ; CHECK: # %bb.0: # %entry
5129 ; CHECK-NEXT: vmv1r.v v10, v9
5130 ; CHECK-NEXT: vmv1r.v v9, v8
5131 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5132 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10
5135 tail call void @llvm.riscv.vsuxseg2.nxv1i16.nxv1i8(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
5139 define void @test_vsuxseg2_mask_nxv1i16_nxv1i8(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
5140 ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i16_nxv1i8:
5141 ; CHECK: # %bb.0: # %entry
5142 ; CHECK-NEXT: vmv1r.v v10, v9
5143 ; CHECK-NEXT: vmv1r.v v9, v8
5144 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5145 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10, v0.t
5148 tail call void @llvm.riscv.vsuxseg2.mask.nxv1i16.nxv1i8(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
5152 declare void @llvm.riscv.vsuxseg2.nxv1i16.nxv1i32(<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i32>, i32)
5153 declare void @llvm.riscv.vsuxseg2.mask.nxv1i16.nxv1i32(<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
5155 define void @test_vsuxseg2_nxv1i16_nxv1i32(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
5156 ; CHECK-LABEL: test_vsuxseg2_nxv1i16_nxv1i32:
5157 ; CHECK: # %bb.0: # %entry
5158 ; CHECK-NEXT: vmv1r.v v10, v9
5159 ; CHECK-NEXT: vmv1r.v v9, v8
5160 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5161 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10
5164 tail call void @llvm.riscv.vsuxseg2.nxv1i16.nxv1i32(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
5168 define void @test_vsuxseg2_mask_nxv1i16_nxv1i32(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
5169 ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i16_nxv1i32:
5170 ; CHECK: # %bb.0: # %entry
5171 ; CHECK-NEXT: vmv1r.v v10, v9
5172 ; CHECK-NEXT: vmv1r.v v9, v8
5173 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5174 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10, v0.t
5177 tail call void @llvm.riscv.vsuxseg2.mask.nxv1i16.nxv1i32(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
5181 declare void @llvm.riscv.vsuxseg2.nxv1i16.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i16>, i32)
5182 declare void @llvm.riscv.vsuxseg2.mask.nxv1i16.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
5184 define void @test_vsuxseg2_nxv1i16_nxv1i16(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
5185 ; CHECK-LABEL: test_vsuxseg2_nxv1i16_nxv1i16:
5186 ; CHECK: # %bb.0: # %entry
5187 ; CHECK-NEXT: vmv1r.v v10, v9
5188 ; CHECK-NEXT: vmv1r.v v9, v8
5189 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5190 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10
5193 tail call void @llvm.riscv.vsuxseg2.nxv1i16.nxv1i16(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
5197 define void @test_vsuxseg2_mask_nxv1i16_nxv1i16(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
5198 ; CHECK-LABEL: test_vsuxseg2_mask_nxv1i16_nxv1i16:
5199 ; CHECK: # %bb.0: # %entry
5200 ; CHECK-NEXT: vmv1r.v v10, v9
5201 ; CHECK-NEXT: vmv1r.v v9, v8
5202 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5203 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10, v0.t
5206 tail call void @llvm.riscv.vsuxseg2.mask.nxv1i16.nxv1i16(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
5210 declare void @llvm.riscv.vsuxseg3.nxv1i16.nxv1i8(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i8>, i32)
5211 declare void @llvm.riscv.vsuxseg3.mask.nxv1i16.nxv1i8(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
5213 define void @test_vsuxseg3_nxv1i16_nxv1i8(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
5214 ; CHECK-LABEL: test_vsuxseg3_nxv1i16_nxv1i8:
5215 ; CHECK: # %bb.0: # %entry
5216 ; CHECK-NEXT: vmv1r.v v10, v8
5217 ; CHECK-NEXT: vmv1r.v v11, v8
5218 ; CHECK-NEXT: vmv1r.v v12, v8
5219 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5220 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9
5223 tail call void @llvm.riscv.vsuxseg3.nxv1i16.nxv1i8(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
5227 define void @test_vsuxseg3_mask_nxv1i16_nxv1i8(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
5228 ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i16_nxv1i8:
5229 ; CHECK: # %bb.0: # %entry
5230 ; CHECK-NEXT: vmv1r.v v10, v8
5231 ; CHECK-NEXT: vmv1r.v v11, v8
5232 ; CHECK-NEXT: vmv1r.v v12, v8
5233 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5234 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t
5237 tail call void @llvm.riscv.vsuxseg3.mask.nxv1i16.nxv1i8(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
5241 declare void @llvm.riscv.vsuxseg3.nxv1i16.nxv1i32(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i32>, i32)
5242 declare void @llvm.riscv.vsuxseg3.mask.nxv1i16.nxv1i32(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
5244 define void @test_vsuxseg3_nxv1i16_nxv1i32(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
5245 ; CHECK-LABEL: test_vsuxseg3_nxv1i16_nxv1i32:
5246 ; CHECK: # %bb.0: # %entry
5247 ; CHECK-NEXT: vmv1r.v v10, v8
5248 ; CHECK-NEXT: vmv1r.v v11, v8
5249 ; CHECK-NEXT: vmv1r.v v12, v8
5250 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5251 ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9
5254 tail call void @llvm.riscv.vsuxseg3.nxv1i16.nxv1i32(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
5258 define void @test_vsuxseg3_mask_nxv1i16_nxv1i32(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
5259 ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i16_nxv1i32:
5260 ; CHECK: # %bb.0: # %entry
5261 ; CHECK-NEXT: vmv1r.v v10, v8
5262 ; CHECK-NEXT: vmv1r.v v11, v8
5263 ; CHECK-NEXT: vmv1r.v v12, v8
5264 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5265 ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t
5268 tail call void @llvm.riscv.vsuxseg3.mask.nxv1i16.nxv1i32(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
5272 declare void @llvm.riscv.vsuxseg3.nxv1i16.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i16>, i32)
5273 declare void @llvm.riscv.vsuxseg3.mask.nxv1i16.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
5275 define void @test_vsuxseg3_nxv1i16_nxv1i16(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
5276 ; CHECK-LABEL: test_vsuxseg3_nxv1i16_nxv1i16:
5277 ; CHECK: # %bb.0: # %entry
5278 ; CHECK-NEXT: vmv1r.v v10, v8
5279 ; CHECK-NEXT: vmv1r.v v11, v8
5280 ; CHECK-NEXT: vmv1r.v v12, v8
5281 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5282 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9
5285 tail call void @llvm.riscv.vsuxseg3.nxv1i16.nxv1i16(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
5289 define void @test_vsuxseg3_mask_nxv1i16_nxv1i16(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
5290 ; CHECK-LABEL: test_vsuxseg3_mask_nxv1i16_nxv1i16:
5291 ; CHECK: # %bb.0: # %entry
5292 ; CHECK-NEXT: vmv1r.v v10, v8
5293 ; CHECK-NEXT: vmv1r.v v11, v8
5294 ; CHECK-NEXT: vmv1r.v v12, v8
5295 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5296 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t
5299 tail call void @llvm.riscv.vsuxseg3.mask.nxv1i16.nxv1i16(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
5303 declare void @llvm.riscv.vsuxseg4.nxv1i16.nxv1i8(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i8>, i32)
5304 declare void @llvm.riscv.vsuxseg4.mask.nxv1i16.nxv1i8(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
5306 define void @test_vsuxseg4_nxv1i16_nxv1i8(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
5307 ; CHECK-LABEL: test_vsuxseg4_nxv1i16_nxv1i8:
5308 ; CHECK: # %bb.0: # %entry
5309 ; CHECK-NEXT: vmv1r.v v10, v8
5310 ; CHECK-NEXT: vmv1r.v v11, v8
5311 ; CHECK-NEXT: vmv1r.v v12, v8
5312 ; CHECK-NEXT: vmv1r.v v13, v8
5313 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5314 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9
5317 tail call void @llvm.riscv.vsuxseg4.nxv1i16.nxv1i8(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
5321 define void @test_vsuxseg4_mask_nxv1i16_nxv1i8(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
5322 ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i16_nxv1i8:
5323 ; CHECK: # %bb.0: # %entry
5324 ; CHECK-NEXT: vmv1r.v v10, v8
5325 ; CHECK-NEXT: vmv1r.v v11, v8
5326 ; CHECK-NEXT: vmv1r.v v12, v8
5327 ; CHECK-NEXT: vmv1r.v v13, v8
5328 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5329 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t
5332 tail call void @llvm.riscv.vsuxseg4.mask.nxv1i16.nxv1i8(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
5336 declare void @llvm.riscv.vsuxseg4.nxv1i16.nxv1i32(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i32>, i32)
5337 declare void @llvm.riscv.vsuxseg4.mask.nxv1i16.nxv1i32(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
5339 define void @test_vsuxseg4_nxv1i16_nxv1i32(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
5340 ; CHECK-LABEL: test_vsuxseg4_nxv1i16_nxv1i32:
5341 ; CHECK: # %bb.0: # %entry
5342 ; CHECK-NEXT: vmv1r.v v10, v8
5343 ; CHECK-NEXT: vmv1r.v v11, v8
5344 ; CHECK-NEXT: vmv1r.v v12, v8
5345 ; CHECK-NEXT: vmv1r.v v13, v8
5346 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5347 ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9
5350 tail call void @llvm.riscv.vsuxseg4.nxv1i16.nxv1i32(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
5354 define void @test_vsuxseg4_mask_nxv1i16_nxv1i32(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
5355 ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i16_nxv1i32:
5356 ; CHECK: # %bb.0: # %entry
5357 ; CHECK-NEXT: vmv1r.v v10, v8
5358 ; CHECK-NEXT: vmv1r.v v11, v8
5359 ; CHECK-NEXT: vmv1r.v v12, v8
5360 ; CHECK-NEXT: vmv1r.v v13, v8
5361 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5362 ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t
5365 tail call void @llvm.riscv.vsuxseg4.mask.nxv1i16.nxv1i32(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
5369 declare void @llvm.riscv.vsuxseg4.nxv1i16.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i16>, i32)
5370 declare void @llvm.riscv.vsuxseg4.mask.nxv1i16.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
5372 define void @test_vsuxseg4_nxv1i16_nxv1i16(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
5373 ; CHECK-LABEL: test_vsuxseg4_nxv1i16_nxv1i16:
5374 ; CHECK: # %bb.0: # %entry
5375 ; CHECK-NEXT: vmv1r.v v10, v8
5376 ; CHECK-NEXT: vmv1r.v v11, v8
5377 ; CHECK-NEXT: vmv1r.v v12, v8
5378 ; CHECK-NEXT: vmv1r.v v13, v8
5379 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5380 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9
5383 tail call void @llvm.riscv.vsuxseg4.nxv1i16.nxv1i16(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
5387 define void @test_vsuxseg4_mask_nxv1i16_nxv1i16(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
5388 ; CHECK-LABEL: test_vsuxseg4_mask_nxv1i16_nxv1i16:
5389 ; CHECK: # %bb.0: # %entry
5390 ; CHECK-NEXT: vmv1r.v v10, v8
5391 ; CHECK-NEXT: vmv1r.v v11, v8
5392 ; CHECK-NEXT: vmv1r.v v12, v8
5393 ; CHECK-NEXT: vmv1r.v v13, v8
5394 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5395 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t
5398 tail call void @llvm.riscv.vsuxseg4.mask.nxv1i16.nxv1i16(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
5402 declare void @llvm.riscv.vsuxseg5.nxv1i16.nxv1i8(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i8>, i32)
5403 declare void @llvm.riscv.vsuxseg5.mask.nxv1i16.nxv1i8(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
5405 define void @test_vsuxseg5_nxv1i16_nxv1i8(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
5406 ; CHECK-LABEL: test_vsuxseg5_nxv1i16_nxv1i8:
5407 ; CHECK: # %bb.0: # %entry
5408 ; CHECK-NEXT: vmv1r.v v10, v8
5409 ; CHECK-NEXT: vmv1r.v v11, v8
5410 ; CHECK-NEXT: vmv1r.v v12, v8
5411 ; CHECK-NEXT: vmv1r.v v13, v8
5412 ; CHECK-NEXT: vmv1r.v v14, v8
5413 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5414 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9
5417 tail call void @llvm.riscv.vsuxseg5.nxv1i16.nxv1i8(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
5421 define void @test_vsuxseg5_mask_nxv1i16_nxv1i8(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
5422 ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i16_nxv1i8:
5423 ; CHECK: # %bb.0: # %entry
5424 ; CHECK-NEXT: vmv1r.v v10, v8
5425 ; CHECK-NEXT: vmv1r.v v11, v8
5426 ; CHECK-NEXT: vmv1r.v v12, v8
5427 ; CHECK-NEXT: vmv1r.v v13, v8
5428 ; CHECK-NEXT: vmv1r.v v14, v8
5429 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5430 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t
5433 tail call void @llvm.riscv.vsuxseg5.mask.nxv1i16.nxv1i8(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
5437 declare void @llvm.riscv.vsuxseg5.nxv1i16.nxv1i32(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i32>, i32)
5438 declare void @llvm.riscv.vsuxseg5.mask.nxv1i16.nxv1i32(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
5440 define void @test_vsuxseg5_nxv1i16_nxv1i32(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
5441 ; CHECK-LABEL: test_vsuxseg5_nxv1i16_nxv1i32:
5442 ; CHECK: # %bb.0: # %entry
5443 ; CHECK-NEXT: vmv1r.v v10, v8
5444 ; CHECK-NEXT: vmv1r.v v11, v8
5445 ; CHECK-NEXT: vmv1r.v v12, v8
5446 ; CHECK-NEXT: vmv1r.v v13, v8
5447 ; CHECK-NEXT: vmv1r.v v14, v8
5448 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5449 ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9
5452 tail call void @llvm.riscv.vsuxseg5.nxv1i16.nxv1i32(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
5456 define void @test_vsuxseg5_mask_nxv1i16_nxv1i32(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
5457 ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i16_nxv1i32:
5458 ; CHECK: # %bb.0: # %entry
5459 ; CHECK-NEXT: vmv1r.v v10, v8
5460 ; CHECK-NEXT: vmv1r.v v11, v8
5461 ; CHECK-NEXT: vmv1r.v v12, v8
5462 ; CHECK-NEXT: vmv1r.v v13, v8
5463 ; CHECK-NEXT: vmv1r.v v14, v8
5464 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5465 ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t
5468 tail call void @llvm.riscv.vsuxseg5.mask.nxv1i16.nxv1i32(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
5472 declare void @llvm.riscv.vsuxseg5.nxv1i16.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i16>, i32)
5473 declare void @llvm.riscv.vsuxseg5.mask.nxv1i16.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
5475 define void @test_vsuxseg5_nxv1i16_nxv1i16(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
5476 ; CHECK-LABEL: test_vsuxseg5_nxv1i16_nxv1i16:
5477 ; CHECK: # %bb.0: # %entry
5478 ; CHECK-NEXT: vmv1r.v v10, v8
5479 ; CHECK-NEXT: vmv1r.v v11, v8
5480 ; CHECK-NEXT: vmv1r.v v12, v8
5481 ; CHECK-NEXT: vmv1r.v v13, v8
5482 ; CHECK-NEXT: vmv1r.v v14, v8
5483 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5484 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9
5487 tail call void @llvm.riscv.vsuxseg5.nxv1i16.nxv1i16(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
5491 define void @test_vsuxseg5_mask_nxv1i16_nxv1i16(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
5492 ; CHECK-LABEL: test_vsuxseg5_mask_nxv1i16_nxv1i16:
5493 ; CHECK: # %bb.0: # %entry
5494 ; CHECK-NEXT: vmv1r.v v10, v8
5495 ; CHECK-NEXT: vmv1r.v v11, v8
5496 ; CHECK-NEXT: vmv1r.v v12, v8
5497 ; CHECK-NEXT: vmv1r.v v13, v8
5498 ; CHECK-NEXT: vmv1r.v v14, v8
5499 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5500 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t
5503 tail call void @llvm.riscv.vsuxseg5.mask.nxv1i16.nxv1i16(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
5507 declare void @llvm.riscv.vsuxseg6.nxv1i16.nxv1i8(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i8>, i32)
5508 declare void @llvm.riscv.vsuxseg6.mask.nxv1i16.nxv1i8(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
5510 define void @test_vsuxseg6_nxv1i16_nxv1i8(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
5511 ; CHECK-LABEL: test_vsuxseg6_nxv1i16_nxv1i8:
5512 ; CHECK: # %bb.0: # %entry
5513 ; CHECK-NEXT: vmv1r.v v10, v8
5514 ; CHECK-NEXT: vmv1r.v v11, v8
5515 ; CHECK-NEXT: vmv1r.v v12, v8
5516 ; CHECK-NEXT: vmv1r.v v13, v8
5517 ; CHECK-NEXT: vmv1r.v v14, v8
5518 ; CHECK-NEXT: vmv1r.v v15, v8
5519 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5520 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9
5523 tail call void @llvm.riscv.vsuxseg6.nxv1i16.nxv1i8(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
5527 define void @test_vsuxseg6_mask_nxv1i16_nxv1i8(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
5528 ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i16_nxv1i8:
5529 ; CHECK: # %bb.0: # %entry
5530 ; CHECK-NEXT: vmv1r.v v10, v8
5531 ; CHECK-NEXT: vmv1r.v v11, v8
5532 ; CHECK-NEXT: vmv1r.v v12, v8
5533 ; CHECK-NEXT: vmv1r.v v13, v8
5534 ; CHECK-NEXT: vmv1r.v v14, v8
5535 ; CHECK-NEXT: vmv1r.v v15, v8
5536 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5537 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t
5540 tail call void @llvm.riscv.vsuxseg6.mask.nxv1i16.nxv1i8(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
5544 declare void @llvm.riscv.vsuxseg6.nxv1i16.nxv1i32(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i32>, i32)
5545 declare void @llvm.riscv.vsuxseg6.mask.nxv1i16.nxv1i32(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
5547 define void @test_vsuxseg6_nxv1i16_nxv1i32(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
5548 ; CHECK-LABEL: test_vsuxseg6_nxv1i16_nxv1i32:
5549 ; CHECK: # %bb.0: # %entry
5550 ; CHECK-NEXT: vmv1r.v v10, v8
5551 ; CHECK-NEXT: vmv1r.v v11, v8
5552 ; CHECK-NEXT: vmv1r.v v12, v8
5553 ; CHECK-NEXT: vmv1r.v v13, v8
5554 ; CHECK-NEXT: vmv1r.v v14, v8
5555 ; CHECK-NEXT: vmv1r.v v15, v8
5556 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5557 ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9
5560 tail call void @llvm.riscv.vsuxseg6.nxv1i16.nxv1i32(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
5564 define void @test_vsuxseg6_mask_nxv1i16_nxv1i32(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
5565 ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i16_nxv1i32:
5566 ; CHECK: # %bb.0: # %entry
5567 ; CHECK-NEXT: vmv1r.v v10, v8
5568 ; CHECK-NEXT: vmv1r.v v11, v8
5569 ; CHECK-NEXT: vmv1r.v v12, v8
5570 ; CHECK-NEXT: vmv1r.v v13, v8
5571 ; CHECK-NEXT: vmv1r.v v14, v8
5572 ; CHECK-NEXT: vmv1r.v v15, v8
5573 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5574 ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t
5577 tail call void @llvm.riscv.vsuxseg6.mask.nxv1i16.nxv1i32(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
5581 declare void @llvm.riscv.vsuxseg6.nxv1i16.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i16>, i32)
5582 declare void @llvm.riscv.vsuxseg6.mask.nxv1i16.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
5584 define void @test_vsuxseg6_nxv1i16_nxv1i16(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
5585 ; CHECK-LABEL: test_vsuxseg6_nxv1i16_nxv1i16:
5586 ; CHECK: # %bb.0: # %entry
5587 ; CHECK-NEXT: vmv1r.v v10, v8
5588 ; CHECK-NEXT: vmv1r.v v11, v8
5589 ; CHECK-NEXT: vmv1r.v v12, v8
5590 ; CHECK-NEXT: vmv1r.v v13, v8
5591 ; CHECK-NEXT: vmv1r.v v14, v8
5592 ; CHECK-NEXT: vmv1r.v v15, v8
5593 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5594 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9
5597 tail call void @llvm.riscv.vsuxseg6.nxv1i16.nxv1i16(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
5601 define void @test_vsuxseg6_mask_nxv1i16_nxv1i16(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
5602 ; CHECK-LABEL: test_vsuxseg6_mask_nxv1i16_nxv1i16:
5603 ; CHECK: # %bb.0: # %entry
5604 ; CHECK-NEXT: vmv1r.v v10, v8
5605 ; CHECK-NEXT: vmv1r.v v11, v8
5606 ; CHECK-NEXT: vmv1r.v v12, v8
5607 ; CHECK-NEXT: vmv1r.v v13, v8
5608 ; CHECK-NEXT: vmv1r.v v14, v8
5609 ; CHECK-NEXT: vmv1r.v v15, v8
5610 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5611 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t
5614 tail call void @llvm.riscv.vsuxseg6.mask.nxv1i16.nxv1i16(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
5618 declare void @llvm.riscv.vsuxseg7.nxv1i16.nxv1i8(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i8>, i32)
5619 declare void @llvm.riscv.vsuxseg7.mask.nxv1i16.nxv1i8(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
5621 define void @test_vsuxseg7_nxv1i16_nxv1i8(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
5622 ; CHECK-LABEL: test_vsuxseg7_nxv1i16_nxv1i8:
5623 ; CHECK: # %bb.0: # %entry
5624 ; CHECK-NEXT: vmv1r.v v10, v8
5625 ; CHECK-NEXT: vmv1r.v v11, v8
5626 ; CHECK-NEXT: vmv1r.v v12, v8
5627 ; CHECK-NEXT: vmv1r.v v13, v8
5628 ; CHECK-NEXT: vmv1r.v v14, v8
5629 ; CHECK-NEXT: vmv1r.v v15, v8
5630 ; CHECK-NEXT: vmv1r.v v16, v8
5631 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5632 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9
5635 tail call void @llvm.riscv.vsuxseg7.nxv1i16.nxv1i8(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
5639 define void @test_vsuxseg7_mask_nxv1i16_nxv1i8(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
5640 ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i16_nxv1i8:
5641 ; CHECK: # %bb.0: # %entry
5642 ; CHECK-NEXT: vmv1r.v v10, v8
5643 ; CHECK-NEXT: vmv1r.v v11, v8
5644 ; CHECK-NEXT: vmv1r.v v12, v8
5645 ; CHECK-NEXT: vmv1r.v v13, v8
5646 ; CHECK-NEXT: vmv1r.v v14, v8
5647 ; CHECK-NEXT: vmv1r.v v15, v8
5648 ; CHECK-NEXT: vmv1r.v v16, v8
5649 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5650 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t
5653 tail call void @llvm.riscv.vsuxseg7.mask.nxv1i16.nxv1i8(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
5657 declare void @llvm.riscv.vsuxseg7.nxv1i16.nxv1i32(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i32>, i32)
5658 declare void @llvm.riscv.vsuxseg7.mask.nxv1i16.nxv1i32(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
5660 define void @test_vsuxseg7_nxv1i16_nxv1i32(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
5661 ; CHECK-LABEL: test_vsuxseg7_nxv1i16_nxv1i32:
5662 ; CHECK: # %bb.0: # %entry
5663 ; CHECK-NEXT: vmv1r.v v10, v8
5664 ; CHECK-NEXT: vmv1r.v v11, v8
5665 ; CHECK-NEXT: vmv1r.v v12, v8
5666 ; CHECK-NEXT: vmv1r.v v13, v8
5667 ; CHECK-NEXT: vmv1r.v v14, v8
5668 ; CHECK-NEXT: vmv1r.v v15, v8
5669 ; CHECK-NEXT: vmv1r.v v16, v8
5670 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5671 ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9
5674 tail call void @llvm.riscv.vsuxseg7.nxv1i16.nxv1i32(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
5678 define void @test_vsuxseg7_mask_nxv1i16_nxv1i32(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
5679 ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i16_nxv1i32:
5680 ; CHECK: # %bb.0: # %entry
5681 ; CHECK-NEXT: vmv1r.v v10, v8
5682 ; CHECK-NEXT: vmv1r.v v11, v8
5683 ; CHECK-NEXT: vmv1r.v v12, v8
5684 ; CHECK-NEXT: vmv1r.v v13, v8
5685 ; CHECK-NEXT: vmv1r.v v14, v8
5686 ; CHECK-NEXT: vmv1r.v v15, v8
5687 ; CHECK-NEXT: vmv1r.v v16, v8
5688 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5689 ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t
5692 tail call void @llvm.riscv.vsuxseg7.mask.nxv1i16.nxv1i32(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
5696 declare void @llvm.riscv.vsuxseg7.nxv1i16.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i16>, i32)
5697 declare void @llvm.riscv.vsuxseg7.mask.nxv1i16.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
5699 define void @test_vsuxseg7_nxv1i16_nxv1i16(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
5700 ; CHECK-LABEL: test_vsuxseg7_nxv1i16_nxv1i16:
5701 ; CHECK: # %bb.0: # %entry
5702 ; CHECK-NEXT: vmv1r.v v10, v8
5703 ; CHECK-NEXT: vmv1r.v v11, v8
5704 ; CHECK-NEXT: vmv1r.v v12, v8
5705 ; CHECK-NEXT: vmv1r.v v13, v8
5706 ; CHECK-NEXT: vmv1r.v v14, v8
5707 ; CHECK-NEXT: vmv1r.v v15, v8
5708 ; CHECK-NEXT: vmv1r.v v16, v8
5709 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5710 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9
5713 tail call void @llvm.riscv.vsuxseg7.nxv1i16.nxv1i16(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
5717 define void @test_vsuxseg7_mask_nxv1i16_nxv1i16(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
5718 ; CHECK-LABEL: test_vsuxseg7_mask_nxv1i16_nxv1i16:
5719 ; CHECK: # %bb.0: # %entry
5720 ; CHECK-NEXT: vmv1r.v v10, v8
5721 ; CHECK-NEXT: vmv1r.v v11, v8
5722 ; CHECK-NEXT: vmv1r.v v12, v8
5723 ; CHECK-NEXT: vmv1r.v v13, v8
5724 ; CHECK-NEXT: vmv1r.v v14, v8
5725 ; CHECK-NEXT: vmv1r.v v15, v8
5726 ; CHECK-NEXT: vmv1r.v v16, v8
5727 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5728 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t
5731 tail call void @llvm.riscv.vsuxseg7.mask.nxv1i16.nxv1i16(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
5735 declare void @llvm.riscv.vsuxseg8.nxv1i16.nxv1i8(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i8>, i32)
5736 declare void @llvm.riscv.vsuxseg8.mask.nxv1i16.nxv1i8(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
5738 define void @test_vsuxseg8_nxv1i16_nxv1i8(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
5739 ; CHECK-LABEL: test_vsuxseg8_nxv1i16_nxv1i8:
5740 ; CHECK: # %bb.0: # %entry
5741 ; CHECK-NEXT: vmv1r.v v10, v8
5742 ; CHECK-NEXT: vmv1r.v v11, v8
5743 ; CHECK-NEXT: vmv1r.v v12, v8
5744 ; CHECK-NEXT: vmv1r.v v13, v8
5745 ; CHECK-NEXT: vmv1r.v v14, v8
5746 ; CHECK-NEXT: vmv1r.v v15, v8
5747 ; CHECK-NEXT: vmv1r.v v16, v8
5748 ; CHECK-NEXT: vmv1r.v v17, v8
5749 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5750 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9
5753 tail call void @llvm.riscv.vsuxseg8.nxv1i16.nxv1i8(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
5757 define void @test_vsuxseg8_mask_nxv1i16_nxv1i8(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
5758 ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i16_nxv1i8:
5759 ; CHECK: # %bb.0: # %entry
5760 ; CHECK-NEXT: vmv1r.v v10, v8
5761 ; CHECK-NEXT: vmv1r.v v11, v8
5762 ; CHECK-NEXT: vmv1r.v v12, v8
5763 ; CHECK-NEXT: vmv1r.v v13, v8
5764 ; CHECK-NEXT: vmv1r.v v14, v8
5765 ; CHECK-NEXT: vmv1r.v v15, v8
5766 ; CHECK-NEXT: vmv1r.v v16, v8
5767 ; CHECK-NEXT: vmv1r.v v17, v8
5768 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5769 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t
5772 tail call void @llvm.riscv.vsuxseg8.mask.nxv1i16.nxv1i8(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
5776 declare void @llvm.riscv.vsuxseg8.nxv1i16.nxv1i32(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i32>, i32)
5777 declare void @llvm.riscv.vsuxseg8.mask.nxv1i16.nxv1i32(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
5779 define void @test_vsuxseg8_nxv1i16_nxv1i32(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
5780 ; CHECK-LABEL: test_vsuxseg8_nxv1i16_nxv1i32:
5781 ; CHECK: # %bb.0: # %entry
5782 ; CHECK-NEXT: vmv1r.v v10, v8
5783 ; CHECK-NEXT: vmv1r.v v11, v8
5784 ; CHECK-NEXT: vmv1r.v v12, v8
5785 ; CHECK-NEXT: vmv1r.v v13, v8
5786 ; CHECK-NEXT: vmv1r.v v14, v8
5787 ; CHECK-NEXT: vmv1r.v v15, v8
5788 ; CHECK-NEXT: vmv1r.v v16, v8
5789 ; CHECK-NEXT: vmv1r.v v17, v8
5790 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5791 ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9
5794 tail call void @llvm.riscv.vsuxseg8.nxv1i16.nxv1i32(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
5798 define void @test_vsuxseg8_mask_nxv1i16_nxv1i32(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
5799 ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i16_nxv1i32:
5800 ; CHECK: # %bb.0: # %entry
5801 ; CHECK-NEXT: vmv1r.v v10, v8
5802 ; CHECK-NEXT: vmv1r.v v11, v8
5803 ; CHECK-NEXT: vmv1r.v v12, v8
5804 ; CHECK-NEXT: vmv1r.v v13, v8
5805 ; CHECK-NEXT: vmv1r.v v14, v8
5806 ; CHECK-NEXT: vmv1r.v v15, v8
5807 ; CHECK-NEXT: vmv1r.v v16, v8
5808 ; CHECK-NEXT: vmv1r.v v17, v8
5809 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5810 ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t
5813 tail call void @llvm.riscv.vsuxseg8.mask.nxv1i16.nxv1i32(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
5817 declare void @llvm.riscv.vsuxseg8.nxv1i16.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i16>, i32)
5818 declare void @llvm.riscv.vsuxseg8.mask.nxv1i16.nxv1i16(<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>,<vscale x 1 x i16>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
5820 define void @test_vsuxseg8_nxv1i16_nxv1i16(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
5821 ; CHECK-LABEL: test_vsuxseg8_nxv1i16_nxv1i16:
5822 ; CHECK: # %bb.0: # %entry
5823 ; CHECK-NEXT: vmv1r.v v10, v8
5824 ; CHECK-NEXT: vmv1r.v v11, v8
5825 ; CHECK-NEXT: vmv1r.v v12, v8
5826 ; CHECK-NEXT: vmv1r.v v13, v8
5827 ; CHECK-NEXT: vmv1r.v v14, v8
5828 ; CHECK-NEXT: vmv1r.v v15, v8
5829 ; CHECK-NEXT: vmv1r.v v16, v8
5830 ; CHECK-NEXT: vmv1r.v v17, v8
5831 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5832 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9
5835 tail call void @llvm.riscv.vsuxseg8.nxv1i16.nxv1i16(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
5839 define void @test_vsuxseg8_mask_nxv1i16_nxv1i16(<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
5840 ; CHECK-LABEL: test_vsuxseg8_mask_nxv1i16_nxv1i16:
5841 ; CHECK: # %bb.0: # %entry
5842 ; CHECK-NEXT: vmv1r.v v10, v8
5843 ; CHECK-NEXT: vmv1r.v v11, v8
5844 ; CHECK-NEXT: vmv1r.v v12, v8
5845 ; CHECK-NEXT: vmv1r.v v13, v8
5846 ; CHECK-NEXT: vmv1r.v v14, v8
5847 ; CHECK-NEXT: vmv1r.v v15, v8
5848 ; CHECK-NEXT: vmv1r.v v16, v8
5849 ; CHECK-NEXT: vmv1r.v v17, v8
5850 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
5851 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t
5854 tail call void @llvm.riscv.vsuxseg8.mask.nxv1i16.nxv1i16(<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val,<vscale x 1 x i16> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
5858 declare void @llvm.riscv.vsuxseg2.nxv32i8.nxv32i16(<vscale x 32 x i8>,<vscale x 32 x i8>, ptr, <vscale x 32 x i16>, i32)
5859 declare void @llvm.riscv.vsuxseg2.mask.nxv32i8.nxv32i16(<vscale x 32 x i8>,<vscale x 32 x i8>, ptr, <vscale x 32 x i16>, <vscale x 32 x i1>, i32)
5861 define void @test_vsuxseg2_nxv32i8_nxv32i16(<vscale x 32 x i8> %val, ptr %base, <vscale x 32 x i16> %index, i32 %vl) {
5862 ; CHECK-LABEL: test_vsuxseg2_nxv32i8_nxv32i16:
5863 ; CHECK: # %bb.0: # %entry
5864 ; CHECK-NEXT: vmv4r.v v12, v8
5865 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, ma
5866 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v16
5869 tail call void @llvm.riscv.vsuxseg2.nxv32i8.nxv32i16(<vscale x 32 x i8> %val,<vscale x 32 x i8> %val, ptr %base, <vscale x 32 x i16> %index, i32 %vl)
5873 define void @test_vsuxseg2_mask_nxv32i8_nxv32i16(<vscale x 32 x i8> %val, ptr %base, <vscale x 32 x i16> %index, <vscale x 32 x i1> %mask, i32 %vl) {
5874 ; CHECK-LABEL: test_vsuxseg2_mask_nxv32i8_nxv32i16:
5875 ; CHECK: # %bb.0: # %entry
5876 ; CHECK-NEXT: vmv4r.v v12, v8
5877 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, ma
5878 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v16, v0.t
5881 tail call void @llvm.riscv.vsuxseg2.mask.nxv32i8.nxv32i16(<vscale x 32 x i8> %val,<vscale x 32 x i8> %val, ptr %base, <vscale x 32 x i16> %index, <vscale x 32 x i1> %mask, i32 %vl)
5885 declare void @llvm.riscv.vsuxseg2.nxv32i8.nxv32i8(<vscale x 32 x i8>,<vscale x 32 x i8>, ptr, <vscale x 32 x i8>, i32)
5886 declare void @llvm.riscv.vsuxseg2.mask.nxv32i8.nxv32i8(<vscale x 32 x i8>,<vscale x 32 x i8>, ptr, <vscale x 32 x i8>, <vscale x 32 x i1>, i32)
5888 define void @test_vsuxseg2_nxv32i8_nxv32i8(<vscale x 32 x i8> %val, ptr %base, <vscale x 32 x i8> %index, i32 %vl) {
5889 ; CHECK-LABEL: test_vsuxseg2_nxv32i8_nxv32i8:
5890 ; CHECK: # %bb.0: # %entry
5891 ; CHECK-NEXT: vmv4r.v v16, v12
5892 ; CHECK-NEXT: vmv4r.v v12, v8
5893 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, ma
5894 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v16
5897 tail call void @llvm.riscv.vsuxseg2.nxv32i8.nxv32i8(<vscale x 32 x i8> %val,<vscale x 32 x i8> %val, ptr %base, <vscale x 32 x i8> %index, i32 %vl)
5901 define void @test_vsuxseg2_mask_nxv32i8_nxv32i8(<vscale x 32 x i8> %val, ptr %base, <vscale x 32 x i8> %index, <vscale x 32 x i1> %mask, i32 %vl) {
5902 ; CHECK-LABEL: test_vsuxseg2_mask_nxv32i8_nxv32i8:
5903 ; CHECK: # %bb.0: # %entry
5904 ; CHECK-NEXT: vmv4r.v v16, v12
5905 ; CHECK-NEXT: vmv4r.v v12, v8
5906 ; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, ma
5907 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v16, v0.t
5910 tail call void @llvm.riscv.vsuxseg2.mask.nxv32i8.nxv32i8(<vscale x 32 x i8> %val,<vscale x 32 x i8> %val, ptr %base, <vscale x 32 x i8> %index, <vscale x 32 x i1> %mask, i32 %vl)
5914 declare void @llvm.riscv.vsuxseg2.nxv2i8.nxv2i32(<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i32>, i32)
5915 declare void @llvm.riscv.vsuxseg2.mask.nxv2i8.nxv2i32(<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
5917 define void @test_vsuxseg2_nxv2i8_nxv2i32(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
5918 ; CHECK-LABEL: test_vsuxseg2_nxv2i8_nxv2i32:
5919 ; CHECK: # %bb.0: # %entry
5920 ; CHECK-NEXT: vmv1r.v v10, v9
5921 ; CHECK-NEXT: vmv1r.v v9, v8
5922 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
5923 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10
5926 tail call void @llvm.riscv.vsuxseg2.nxv2i8.nxv2i32(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
5930 define void @test_vsuxseg2_mask_nxv2i8_nxv2i32(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
5931 ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i8_nxv2i32:
5932 ; CHECK: # %bb.0: # %entry
5933 ; CHECK-NEXT: vmv1r.v v10, v9
5934 ; CHECK-NEXT: vmv1r.v v9, v8
5935 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
5936 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10, v0.t
5939 tail call void @llvm.riscv.vsuxseg2.mask.nxv2i8.nxv2i32(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
5943 declare void @llvm.riscv.vsuxseg2.nxv2i8.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i8>, i32)
5944 declare void @llvm.riscv.vsuxseg2.mask.nxv2i8.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
5946 define void @test_vsuxseg2_nxv2i8_nxv2i8(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
5947 ; CHECK-LABEL: test_vsuxseg2_nxv2i8_nxv2i8:
5948 ; CHECK: # %bb.0: # %entry
5949 ; CHECK-NEXT: vmv1r.v v10, v9
5950 ; CHECK-NEXT: vmv1r.v v9, v8
5951 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
5952 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10
5955 tail call void @llvm.riscv.vsuxseg2.nxv2i8.nxv2i8(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
5959 define void @test_vsuxseg2_mask_nxv2i8_nxv2i8(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
5960 ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i8_nxv2i8:
5961 ; CHECK: # %bb.0: # %entry
5962 ; CHECK-NEXT: vmv1r.v v10, v9
5963 ; CHECK-NEXT: vmv1r.v v9, v8
5964 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
5965 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10, v0.t
5968 tail call void @llvm.riscv.vsuxseg2.mask.nxv2i8.nxv2i8(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
5972 declare void @llvm.riscv.vsuxseg2.nxv2i8.nxv2i16(<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i16>, i32)
5973 declare void @llvm.riscv.vsuxseg2.mask.nxv2i8.nxv2i16(<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
5975 define void @test_vsuxseg2_nxv2i8_nxv2i16(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
5976 ; CHECK-LABEL: test_vsuxseg2_nxv2i8_nxv2i16:
5977 ; CHECK: # %bb.0: # %entry
5978 ; CHECK-NEXT: vmv1r.v v10, v9
5979 ; CHECK-NEXT: vmv1r.v v9, v8
5980 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
5981 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10
5984 tail call void @llvm.riscv.vsuxseg2.nxv2i8.nxv2i16(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
5988 define void @test_vsuxseg2_mask_nxv2i8_nxv2i16(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
5989 ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i8_nxv2i16:
5990 ; CHECK: # %bb.0: # %entry
5991 ; CHECK-NEXT: vmv1r.v v10, v9
5992 ; CHECK-NEXT: vmv1r.v v9, v8
5993 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
5994 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10, v0.t
5997 tail call void @llvm.riscv.vsuxseg2.mask.nxv2i8.nxv2i16(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
6001 declare void @llvm.riscv.vsuxseg3.nxv2i8.nxv2i32(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i32>, i32)
6002 declare void @llvm.riscv.vsuxseg3.mask.nxv2i8.nxv2i32(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
6004 define void @test_vsuxseg3_nxv2i8_nxv2i32(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
6005 ; CHECK-LABEL: test_vsuxseg3_nxv2i8_nxv2i32:
6006 ; CHECK: # %bb.0: # %entry
6007 ; CHECK-NEXT: vmv1r.v v10, v8
6008 ; CHECK-NEXT: vmv1r.v v11, v8
6009 ; CHECK-NEXT: vmv1r.v v12, v8
6010 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6011 ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9
6014 tail call void @llvm.riscv.vsuxseg3.nxv2i8.nxv2i32(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
6018 define void @test_vsuxseg3_mask_nxv2i8_nxv2i32(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6019 ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i8_nxv2i32:
6020 ; CHECK: # %bb.0: # %entry
6021 ; CHECK-NEXT: vmv1r.v v10, v8
6022 ; CHECK-NEXT: vmv1r.v v11, v8
6023 ; CHECK-NEXT: vmv1r.v v12, v8
6024 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6025 ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t
6028 tail call void @llvm.riscv.vsuxseg3.mask.nxv2i8.nxv2i32(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
6032 declare void @llvm.riscv.vsuxseg3.nxv2i8.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i8>, i32)
6033 declare void @llvm.riscv.vsuxseg3.mask.nxv2i8.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
6035 define void @test_vsuxseg3_nxv2i8_nxv2i8(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
6036 ; CHECK-LABEL: test_vsuxseg3_nxv2i8_nxv2i8:
6037 ; CHECK: # %bb.0: # %entry
6038 ; CHECK-NEXT: vmv1r.v v10, v8
6039 ; CHECK-NEXT: vmv1r.v v11, v8
6040 ; CHECK-NEXT: vmv1r.v v12, v8
6041 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6042 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9
6045 tail call void @llvm.riscv.vsuxseg3.nxv2i8.nxv2i8(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
6049 define void @test_vsuxseg3_mask_nxv2i8_nxv2i8(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6050 ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i8_nxv2i8:
6051 ; CHECK: # %bb.0: # %entry
6052 ; CHECK-NEXT: vmv1r.v v10, v8
6053 ; CHECK-NEXT: vmv1r.v v11, v8
6054 ; CHECK-NEXT: vmv1r.v v12, v8
6055 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6056 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t
6059 tail call void @llvm.riscv.vsuxseg3.mask.nxv2i8.nxv2i8(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
6063 declare void @llvm.riscv.vsuxseg3.nxv2i8.nxv2i16(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i16>, i32)
6064 declare void @llvm.riscv.vsuxseg3.mask.nxv2i8.nxv2i16(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
6066 define void @test_vsuxseg3_nxv2i8_nxv2i16(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
6067 ; CHECK-LABEL: test_vsuxseg3_nxv2i8_nxv2i16:
6068 ; CHECK: # %bb.0: # %entry
6069 ; CHECK-NEXT: vmv1r.v v10, v8
6070 ; CHECK-NEXT: vmv1r.v v11, v8
6071 ; CHECK-NEXT: vmv1r.v v12, v8
6072 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6073 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9
6076 tail call void @llvm.riscv.vsuxseg3.nxv2i8.nxv2i16(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
6080 define void @test_vsuxseg3_mask_nxv2i8_nxv2i16(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6081 ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i8_nxv2i16:
6082 ; CHECK: # %bb.0: # %entry
6083 ; CHECK-NEXT: vmv1r.v v10, v8
6084 ; CHECK-NEXT: vmv1r.v v11, v8
6085 ; CHECK-NEXT: vmv1r.v v12, v8
6086 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6087 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t
6090 tail call void @llvm.riscv.vsuxseg3.mask.nxv2i8.nxv2i16(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
6094 declare void @llvm.riscv.vsuxseg4.nxv2i8.nxv2i32(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i32>, i32)
6095 declare void @llvm.riscv.vsuxseg4.mask.nxv2i8.nxv2i32(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
6097 define void @test_vsuxseg4_nxv2i8_nxv2i32(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
6098 ; CHECK-LABEL: test_vsuxseg4_nxv2i8_nxv2i32:
6099 ; CHECK: # %bb.0: # %entry
6100 ; CHECK-NEXT: vmv1r.v v10, v8
6101 ; CHECK-NEXT: vmv1r.v v11, v8
6102 ; CHECK-NEXT: vmv1r.v v12, v8
6103 ; CHECK-NEXT: vmv1r.v v13, v8
6104 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6105 ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9
6108 tail call void @llvm.riscv.vsuxseg4.nxv2i8.nxv2i32(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
6112 define void @test_vsuxseg4_mask_nxv2i8_nxv2i32(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6113 ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i8_nxv2i32:
6114 ; CHECK: # %bb.0: # %entry
6115 ; CHECK-NEXT: vmv1r.v v10, v8
6116 ; CHECK-NEXT: vmv1r.v v11, v8
6117 ; CHECK-NEXT: vmv1r.v v12, v8
6118 ; CHECK-NEXT: vmv1r.v v13, v8
6119 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6120 ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t
6123 tail call void @llvm.riscv.vsuxseg4.mask.nxv2i8.nxv2i32(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
6127 declare void @llvm.riscv.vsuxseg4.nxv2i8.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i8>, i32)
6128 declare void @llvm.riscv.vsuxseg4.mask.nxv2i8.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
6130 define void @test_vsuxseg4_nxv2i8_nxv2i8(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
6131 ; CHECK-LABEL: test_vsuxseg4_nxv2i8_nxv2i8:
6132 ; CHECK: # %bb.0: # %entry
6133 ; CHECK-NEXT: vmv1r.v v10, v8
6134 ; CHECK-NEXT: vmv1r.v v11, v8
6135 ; CHECK-NEXT: vmv1r.v v12, v8
6136 ; CHECK-NEXT: vmv1r.v v13, v8
6137 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6138 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9
6141 tail call void @llvm.riscv.vsuxseg4.nxv2i8.nxv2i8(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
6145 define void @test_vsuxseg4_mask_nxv2i8_nxv2i8(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6146 ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i8_nxv2i8:
6147 ; CHECK: # %bb.0: # %entry
6148 ; CHECK-NEXT: vmv1r.v v10, v8
6149 ; CHECK-NEXT: vmv1r.v v11, v8
6150 ; CHECK-NEXT: vmv1r.v v12, v8
6151 ; CHECK-NEXT: vmv1r.v v13, v8
6152 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6153 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t
6156 tail call void @llvm.riscv.vsuxseg4.mask.nxv2i8.nxv2i8(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
6160 declare void @llvm.riscv.vsuxseg4.nxv2i8.nxv2i16(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i16>, i32)
6161 declare void @llvm.riscv.vsuxseg4.mask.nxv2i8.nxv2i16(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
6163 define void @test_vsuxseg4_nxv2i8_nxv2i16(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
6164 ; CHECK-LABEL: test_vsuxseg4_nxv2i8_nxv2i16:
6165 ; CHECK: # %bb.0: # %entry
6166 ; CHECK-NEXT: vmv1r.v v10, v8
6167 ; CHECK-NEXT: vmv1r.v v11, v8
6168 ; CHECK-NEXT: vmv1r.v v12, v8
6169 ; CHECK-NEXT: vmv1r.v v13, v8
6170 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6171 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9
6174 tail call void @llvm.riscv.vsuxseg4.nxv2i8.nxv2i16(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
6178 define void @test_vsuxseg4_mask_nxv2i8_nxv2i16(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6179 ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i8_nxv2i16:
6180 ; CHECK: # %bb.0: # %entry
6181 ; CHECK-NEXT: vmv1r.v v10, v8
6182 ; CHECK-NEXT: vmv1r.v v11, v8
6183 ; CHECK-NEXT: vmv1r.v v12, v8
6184 ; CHECK-NEXT: vmv1r.v v13, v8
6185 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6186 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t
6189 tail call void @llvm.riscv.vsuxseg4.mask.nxv2i8.nxv2i16(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
6193 declare void @llvm.riscv.vsuxseg5.nxv2i8.nxv2i32(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i32>, i32)
6194 declare void @llvm.riscv.vsuxseg5.mask.nxv2i8.nxv2i32(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
6196 define void @test_vsuxseg5_nxv2i8_nxv2i32(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
6197 ; CHECK-LABEL: test_vsuxseg5_nxv2i8_nxv2i32:
6198 ; CHECK: # %bb.0: # %entry
6199 ; CHECK-NEXT: vmv1r.v v10, v8
6200 ; CHECK-NEXT: vmv1r.v v11, v8
6201 ; CHECK-NEXT: vmv1r.v v12, v8
6202 ; CHECK-NEXT: vmv1r.v v13, v8
6203 ; CHECK-NEXT: vmv1r.v v14, v8
6204 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6205 ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9
6208 tail call void @llvm.riscv.vsuxseg5.nxv2i8.nxv2i32(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
6212 define void @test_vsuxseg5_mask_nxv2i8_nxv2i32(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6213 ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i8_nxv2i32:
6214 ; CHECK: # %bb.0: # %entry
6215 ; CHECK-NEXT: vmv1r.v v10, v8
6216 ; CHECK-NEXT: vmv1r.v v11, v8
6217 ; CHECK-NEXT: vmv1r.v v12, v8
6218 ; CHECK-NEXT: vmv1r.v v13, v8
6219 ; CHECK-NEXT: vmv1r.v v14, v8
6220 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6221 ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t
6224 tail call void @llvm.riscv.vsuxseg5.mask.nxv2i8.nxv2i32(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
6228 declare void @llvm.riscv.vsuxseg5.nxv2i8.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i8>, i32)
6229 declare void @llvm.riscv.vsuxseg5.mask.nxv2i8.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
6231 define void @test_vsuxseg5_nxv2i8_nxv2i8(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
6232 ; CHECK-LABEL: test_vsuxseg5_nxv2i8_nxv2i8:
6233 ; CHECK: # %bb.0: # %entry
6234 ; CHECK-NEXT: vmv1r.v v10, v8
6235 ; CHECK-NEXT: vmv1r.v v11, v8
6236 ; CHECK-NEXT: vmv1r.v v12, v8
6237 ; CHECK-NEXT: vmv1r.v v13, v8
6238 ; CHECK-NEXT: vmv1r.v v14, v8
6239 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6240 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9
6243 tail call void @llvm.riscv.vsuxseg5.nxv2i8.nxv2i8(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
6247 define void @test_vsuxseg5_mask_nxv2i8_nxv2i8(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6248 ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i8_nxv2i8:
6249 ; CHECK: # %bb.0: # %entry
6250 ; CHECK-NEXT: vmv1r.v v10, v8
6251 ; CHECK-NEXT: vmv1r.v v11, v8
6252 ; CHECK-NEXT: vmv1r.v v12, v8
6253 ; CHECK-NEXT: vmv1r.v v13, v8
6254 ; CHECK-NEXT: vmv1r.v v14, v8
6255 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6256 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t
6259 tail call void @llvm.riscv.vsuxseg5.mask.nxv2i8.nxv2i8(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
6263 declare void @llvm.riscv.vsuxseg5.nxv2i8.nxv2i16(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i16>, i32)
6264 declare void @llvm.riscv.vsuxseg5.mask.nxv2i8.nxv2i16(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
6266 define void @test_vsuxseg5_nxv2i8_nxv2i16(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
6267 ; CHECK-LABEL: test_vsuxseg5_nxv2i8_nxv2i16:
6268 ; CHECK: # %bb.0: # %entry
6269 ; CHECK-NEXT: vmv1r.v v10, v8
6270 ; CHECK-NEXT: vmv1r.v v11, v8
6271 ; CHECK-NEXT: vmv1r.v v12, v8
6272 ; CHECK-NEXT: vmv1r.v v13, v8
6273 ; CHECK-NEXT: vmv1r.v v14, v8
6274 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6275 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9
6278 tail call void @llvm.riscv.vsuxseg5.nxv2i8.nxv2i16(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
6282 define void @test_vsuxseg5_mask_nxv2i8_nxv2i16(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6283 ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i8_nxv2i16:
6284 ; CHECK: # %bb.0: # %entry
6285 ; CHECK-NEXT: vmv1r.v v10, v8
6286 ; CHECK-NEXT: vmv1r.v v11, v8
6287 ; CHECK-NEXT: vmv1r.v v12, v8
6288 ; CHECK-NEXT: vmv1r.v v13, v8
6289 ; CHECK-NEXT: vmv1r.v v14, v8
6290 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6291 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t
6294 tail call void @llvm.riscv.vsuxseg5.mask.nxv2i8.nxv2i16(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
6298 declare void @llvm.riscv.vsuxseg6.nxv2i8.nxv2i32(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i32>, i32)
6299 declare void @llvm.riscv.vsuxseg6.mask.nxv2i8.nxv2i32(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
6301 define void @test_vsuxseg6_nxv2i8_nxv2i32(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
6302 ; CHECK-LABEL: test_vsuxseg6_nxv2i8_nxv2i32:
6303 ; CHECK: # %bb.0: # %entry
6304 ; CHECK-NEXT: vmv1r.v v10, v8
6305 ; CHECK-NEXT: vmv1r.v v11, v8
6306 ; CHECK-NEXT: vmv1r.v v12, v8
6307 ; CHECK-NEXT: vmv1r.v v13, v8
6308 ; CHECK-NEXT: vmv1r.v v14, v8
6309 ; CHECK-NEXT: vmv1r.v v15, v8
6310 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6311 ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9
6314 tail call void @llvm.riscv.vsuxseg6.nxv2i8.nxv2i32(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
6318 define void @test_vsuxseg6_mask_nxv2i8_nxv2i32(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6319 ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i8_nxv2i32:
6320 ; CHECK: # %bb.0: # %entry
6321 ; CHECK-NEXT: vmv1r.v v10, v8
6322 ; CHECK-NEXT: vmv1r.v v11, v8
6323 ; CHECK-NEXT: vmv1r.v v12, v8
6324 ; CHECK-NEXT: vmv1r.v v13, v8
6325 ; CHECK-NEXT: vmv1r.v v14, v8
6326 ; CHECK-NEXT: vmv1r.v v15, v8
6327 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6328 ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t
6331 tail call void @llvm.riscv.vsuxseg6.mask.nxv2i8.nxv2i32(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
6335 declare void @llvm.riscv.vsuxseg6.nxv2i8.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i8>, i32)
6336 declare void @llvm.riscv.vsuxseg6.mask.nxv2i8.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
6338 define void @test_vsuxseg6_nxv2i8_nxv2i8(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
6339 ; CHECK-LABEL: test_vsuxseg6_nxv2i8_nxv2i8:
6340 ; CHECK: # %bb.0: # %entry
6341 ; CHECK-NEXT: vmv1r.v v10, v8
6342 ; CHECK-NEXT: vmv1r.v v11, v8
6343 ; CHECK-NEXT: vmv1r.v v12, v8
6344 ; CHECK-NEXT: vmv1r.v v13, v8
6345 ; CHECK-NEXT: vmv1r.v v14, v8
6346 ; CHECK-NEXT: vmv1r.v v15, v8
6347 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6348 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9
6351 tail call void @llvm.riscv.vsuxseg6.nxv2i8.nxv2i8(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
6355 define void @test_vsuxseg6_mask_nxv2i8_nxv2i8(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6356 ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i8_nxv2i8:
6357 ; CHECK: # %bb.0: # %entry
6358 ; CHECK-NEXT: vmv1r.v v10, v8
6359 ; CHECK-NEXT: vmv1r.v v11, v8
6360 ; CHECK-NEXT: vmv1r.v v12, v8
6361 ; CHECK-NEXT: vmv1r.v v13, v8
6362 ; CHECK-NEXT: vmv1r.v v14, v8
6363 ; CHECK-NEXT: vmv1r.v v15, v8
6364 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6365 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t
6368 tail call void @llvm.riscv.vsuxseg6.mask.nxv2i8.nxv2i8(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
6372 declare void @llvm.riscv.vsuxseg6.nxv2i8.nxv2i16(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i16>, i32)
6373 declare void @llvm.riscv.vsuxseg6.mask.nxv2i8.nxv2i16(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
6375 define void @test_vsuxseg6_nxv2i8_nxv2i16(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
6376 ; CHECK-LABEL: test_vsuxseg6_nxv2i8_nxv2i16:
6377 ; CHECK: # %bb.0: # %entry
6378 ; CHECK-NEXT: vmv1r.v v10, v8
6379 ; CHECK-NEXT: vmv1r.v v11, v8
6380 ; CHECK-NEXT: vmv1r.v v12, v8
6381 ; CHECK-NEXT: vmv1r.v v13, v8
6382 ; CHECK-NEXT: vmv1r.v v14, v8
6383 ; CHECK-NEXT: vmv1r.v v15, v8
6384 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6385 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9
6388 tail call void @llvm.riscv.vsuxseg6.nxv2i8.nxv2i16(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
6392 define void @test_vsuxseg6_mask_nxv2i8_nxv2i16(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6393 ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i8_nxv2i16:
6394 ; CHECK: # %bb.0: # %entry
6395 ; CHECK-NEXT: vmv1r.v v10, v8
6396 ; CHECK-NEXT: vmv1r.v v11, v8
6397 ; CHECK-NEXT: vmv1r.v v12, v8
6398 ; CHECK-NEXT: vmv1r.v v13, v8
6399 ; CHECK-NEXT: vmv1r.v v14, v8
6400 ; CHECK-NEXT: vmv1r.v v15, v8
6401 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6402 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t
6405 tail call void @llvm.riscv.vsuxseg6.mask.nxv2i8.nxv2i16(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
6409 declare void @llvm.riscv.vsuxseg7.nxv2i8.nxv2i32(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i32>, i32)
6410 declare void @llvm.riscv.vsuxseg7.mask.nxv2i8.nxv2i32(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
6412 define void @test_vsuxseg7_nxv2i8_nxv2i32(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
6413 ; CHECK-LABEL: test_vsuxseg7_nxv2i8_nxv2i32:
6414 ; CHECK: # %bb.0: # %entry
6415 ; CHECK-NEXT: vmv1r.v v10, v8
6416 ; CHECK-NEXT: vmv1r.v v11, v8
6417 ; CHECK-NEXT: vmv1r.v v12, v8
6418 ; CHECK-NEXT: vmv1r.v v13, v8
6419 ; CHECK-NEXT: vmv1r.v v14, v8
6420 ; CHECK-NEXT: vmv1r.v v15, v8
6421 ; CHECK-NEXT: vmv1r.v v16, v8
6422 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6423 ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9
6426 tail call void @llvm.riscv.vsuxseg7.nxv2i8.nxv2i32(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
6430 define void @test_vsuxseg7_mask_nxv2i8_nxv2i32(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6431 ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i8_nxv2i32:
6432 ; CHECK: # %bb.0: # %entry
6433 ; CHECK-NEXT: vmv1r.v v10, v8
6434 ; CHECK-NEXT: vmv1r.v v11, v8
6435 ; CHECK-NEXT: vmv1r.v v12, v8
6436 ; CHECK-NEXT: vmv1r.v v13, v8
6437 ; CHECK-NEXT: vmv1r.v v14, v8
6438 ; CHECK-NEXT: vmv1r.v v15, v8
6439 ; CHECK-NEXT: vmv1r.v v16, v8
6440 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6441 ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t
6444 tail call void @llvm.riscv.vsuxseg7.mask.nxv2i8.nxv2i32(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
6448 declare void @llvm.riscv.vsuxseg7.nxv2i8.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i8>, i32)
6449 declare void @llvm.riscv.vsuxseg7.mask.nxv2i8.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
6451 define void @test_vsuxseg7_nxv2i8_nxv2i8(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
6452 ; CHECK-LABEL: test_vsuxseg7_nxv2i8_nxv2i8:
6453 ; CHECK: # %bb.0: # %entry
6454 ; CHECK-NEXT: vmv1r.v v10, v8
6455 ; CHECK-NEXT: vmv1r.v v11, v8
6456 ; CHECK-NEXT: vmv1r.v v12, v8
6457 ; CHECK-NEXT: vmv1r.v v13, v8
6458 ; CHECK-NEXT: vmv1r.v v14, v8
6459 ; CHECK-NEXT: vmv1r.v v15, v8
6460 ; CHECK-NEXT: vmv1r.v v16, v8
6461 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6462 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9
6465 tail call void @llvm.riscv.vsuxseg7.nxv2i8.nxv2i8(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
6469 define void @test_vsuxseg7_mask_nxv2i8_nxv2i8(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6470 ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i8_nxv2i8:
6471 ; CHECK: # %bb.0: # %entry
6472 ; CHECK-NEXT: vmv1r.v v10, v8
6473 ; CHECK-NEXT: vmv1r.v v11, v8
6474 ; CHECK-NEXT: vmv1r.v v12, v8
6475 ; CHECK-NEXT: vmv1r.v v13, v8
6476 ; CHECK-NEXT: vmv1r.v v14, v8
6477 ; CHECK-NEXT: vmv1r.v v15, v8
6478 ; CHECK-NEXT: vmv1r.v v16, v8
6479 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6480 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t
6483 tail call void @llvm.riscv.vsuxseg7.mask.nxv2i8.nxv2i8(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
6487 declare void @llvm.riscv.vsuxseg7.nxv2i8.nxv2i16(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i16>, i32)
6488 declare void @llvm.riscv.vsuxseg7.mask.nxv2i8.nxv2i16(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
6490 define void @test_vsuxseg7_nxv2i8_nxv2i16(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
6491 ; CHECK-LABEL: test_vsuxseg7_nxv2i8_nxv2i16:
6492 ; CHECK: # %bb.0: # %entry
6493 ; CHECK-NEXT: vmv1r.v v10, v8
6494 ; CHECK-NEXT: vmv1r.v v11, v8
6495 ; CHECK-NEXT: vmv1r.v v12, v8
6496 ; CHECK-NEXT: vmv1r.v v13, v8
6497 ; CHECK-NEXT: vmv1r.v v14, v8
6498 ; CHECK-NEXT: vmv1r.v v15, v8
6499 ; CHECK-NEXT: vmv1r.v v16, v8
6500 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6501 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9
6504 tail call void @llvm.riscv.vsuxseg7.nxv2i8.nxv2i16(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
6508 define void @test_vsuxseg7_mask_nxv2i8_nxv2i16(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6509 ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i8_nxv2i16:
6510 ; CHECK: # %bb.0: # %entry
6511 ; CHECK-NEXT: vmv1r.v v10, v8
6512 ; CHECK-NEXT: vmv1r.v v11, v8
6513 ; CHECK-NEXT: vmv1r.v v12, v8
6514 ; CHECK-NEXT: vmv1r.v v13, v8
6515 ; CHECK-NEXT: vmv1r.v v14, v8
6516 ; CHECK-NEXT: vmv1r.v v15, v8
6517 ; CHECK-NEXT: vmv1r.v v16, v8
6518 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6519 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t
6522 tail call void @llvm.riscv.vsuxseg7.mask.nxv2i8.nxv2i16(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
6526 declare void @llvm.riscv.vsuxseg8.nxv2i8.nxv2i32(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i32>, i32)
6527 declare void @llvm.riscv.vsuxseg8.mask.nxv2i8.nxv2i32(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
6529 define void @test_vsuxseg8_nxv2i8_nxv2i32(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
6530 ; CHECK-LABEL: test_vsuxseg8_nxv2i8_nxv2i32:
6531 ; CHECK: # %bb.0: # %entry
6532 ; CHECK-NEXT: vmv1r.v v10, v8
6533 ; CHECK-NEXT: vmv1r.v v11, v8
6534 ; CHECK-NEXT: vmv1r.v v12, v8
6535 ; CHECK-NEXT: vmv1r.v v13, v8
6536 ; CHECK-NEXT: vmv1r.v v14, v8
6537 ; CHECK-NEXT: vmv1r.v v15, v8
6538 ; CHECK-NEXT: vmv1r.v v16, v8
6539 ; CHECK-NEXT: vmv1r.v v17, v8
6540 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6541 ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9
6544 tail call void @llvm.riscv.vsuxseg8.nxv2i8.nxv2i32(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
6548 define void @test_vsuxseg8_mask_nxv2i8_nxv2i32(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6549 ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i8_nxv2i32:
6550 ; CHECK: # %bb.0: # %entry
6551 ; CHECK-NEXT: vmv1r.v v10, v8
6552 ; CHECK-NEXT: vmv1r.v v11, v8
6553 ; CHECK-NEXT: vmv1r.v v12, v8
6554 ; CHECK-NEXT: vmv1r.v v13, v8
6555 ; CHECK-NEXT: vmv1r.v v14, v8
6556 ; CHECK-NEXT: vmv1r.v v15, v8
6557 ; CHECK-NEXT: vmv1r.v v16, v8
6558 ; CHECK-NEXT: vmv1r.v v17, v8
6559 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6560 ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t
6563 tail call void @llvm.riscv.vsuxseg8.mask.nxv2i8.nxv2i32(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
6567 declare void @llvm.riscv.vsuxseg8.nxv2i8.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i8>, i32)
6568 declare void @llvm.riscv.vsuxseg8.mask.nxv2i8.nxv2i8(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
6570 define void @test_vsuxseg8_nxv2i8_nxv2i8(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
6571 ; CHECK-LABEL: test_vsuxseg8_nxv2i8_nxv2i8:
6572 ; CHECK: # %bb.0: # %entry
6573 ; CHECK-NEXT: vmv1r.v v10, v8
6574 ; CHECK-NEXT: vmv1r.v v11, v8
6575 ; CHECK-NEXT: vmv1r.v v12, v8
6576 ; CHECK-NEXT: vmv1r.v v13, v8
6577 ; CHECK-NEXT: vmv1r.v v14, v8
6578 ; CHECK-NEXT: vmv1r.v v15, v8
6579 ; CHECK-NEXT: vmv1r.v v16, v8
6580 ; CHECK-NEXT: vmv1r.v v17, v8
6581 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6582 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9
6585 tail call void @llvm.riscv.vsuxseg8.nxv2i8.nxv2i8(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
6589 define void @test_vsuxseg8_mask_nxv2i8_nxv2i8(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6590 ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i8_nxv2i8:
6591 ; CHECK: # %bb.0: # %entry
6592 ; CHECK-NEXT: vmv1r.v v10, v8
6593 ; CHECK-NEXT: vmv1r.v v11, v8
6594 ; CHECK-NEXT: vmv1r.v v12, v8
6595 ; CHECK-NEXT: vmv1r.v v13, v8
6596 ; CHECK-NEXT: vmv1r.v v14, v8
6597 ; CHECK-NEXT: vmv1r.v v15, v8
6598 ; CHECK-NEXT: vmv1r.v v16, v8
6599 ; CHECK-NEXT: vmv1r.v v17, v8
6600 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6601 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t
6604 tail call void @llvm.riscv.vsuxseg8.mask.nxv2i8.nxv2i8(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
6608 declare void @llvm.riscv.vsuxseg8.nxv2i8.nxv2i16(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i16>, i32)
6609 declare void @llvm.riscv.vsuxseg8.mask.nxv2i8.nxv2i16(<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>,<vscale x 2 x i8>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
6611 define void @test_vsuxseg8_nxv2i8_nxv2i16(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
6612 ; CHECK-LABEL: test_vsuxseg8_nxv2i8_nxv2i16:
6613 ; CHECK: # %bb.0: # %entry
6614 ; CHECK-NEXT: vmv1r.v v10, v8
6615 ; CHECK-NEXT: vmv1r.v v11, v8
6616 ; CHECK-NEXT: vmv1r.v v12, v8
6617 ; CHECK-NEXT: vmv1r.v v13, v8
6618 ; CHECK-NEXT: vmv1r.v v14, v8
6619 ; CHECK-NEXT: vmv1r.v v15, v8
6620 ; CHECK-NEXT: vmv1r.v v16, v8
6621 ; CHECK-NEXT: vmv1r.v v17, v8
6622 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6623 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9
6626 tail call void @llvm.riscv.vsuxseg8.nxv2i8.nxv2i16(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
6630 define void @test_vsuxseg8_mask_nxv2i8_nxv2i16(<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6631 ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i8_nxv2i16:
6632 ; CHECK: # %bb.0: # %entry
6633 ; CHECK-NEXT: vmv1r.v v10, v8
6634 ; CHECK-NEXT: vmv1r.v v11, v8
6635 ; CHECK-NEXT: vmv1r.v v12, v8
6636 ; CHECK-NEXT: vmv1r.v v13, v8
6637 ; CHECK-NEXT: vmv1r.v v14, v8
6638 ; CHECK-NEXT: vmv1r.v v15, v8
6639 ; CHECK-NEXT: vmv1r.v v16, v8
6640 ; CHECK-NEXT: vmv1r.v v17, v8
6641 ; CHECK-NEXT: vsetvli zero, a1, e8, mf4, ta, ma
6642 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t
6645 tail call void @llvm.riscv.vsuxseg8.mask.nxv2i8.nxv2i16(<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val,<vscale x 2 x i8> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
6649 declare void @llvm.riscv.vsuxseg2.nxv2i16.nxv2i32(<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i32>, i32)
6650 declare void @llvm.riscv.vsuxseg2.mask.nxv2i16.nxv2i32(<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
6652 define void @test_vsuxseg2_nxv2i16_nxv2i32(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
6653 ; CHECK-LABEL: test_vsuxseg2_nxv2i16_nxv2i32:
6654 ; CHECK: # %bb.0: # %entry
6655 ; CHECK-NEXT: vmv1r.v v10, v9
6656 ; CHECK-NEXT: vmv1r.v v9, v8
6657 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
6658 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10
6661 tail call void @llvm.riscv.vsuxseg2.nxv2i16.nxv2i32(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
6665 define void @test_vsuxseg2_mask_nxv2i16_nxv2i32(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6666 ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i16_nxv2i32:
6667 ; CHECK: # %bb.0: # %entry
6668 ; CHECK-NEXT: vmv1r.v v10, v9
6669 ; CHECK-NEXT: vmv1r.v v9, v8
6670 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
6671 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10, v0.t
6674 tail call void @llvm.riscv.vsuxseg2.mask.nxv2i16.nxv2i32(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
6678 declare void @llvm.riscv.vsuxseg2.nxv2i16.nxv2i8(<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i8>, i32)
6679 declare void @llvm.riscv.vsuxseg2.mask.nxv2i16.nxv2i8(<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
6681 define void @test_vsuxseg2_nxv2i16_nxv2i8(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
6682 ; CHECK-LABEL: test_vsuxseg2_nxv2i16_nxv2i8:
6683 ; CHECK: # %bb.0: # %entry
6684 ; CHECK-NEXT: vmv1r.v v10, v9
6685 ; CHECK-NEXT: vmv1r.v v9, v8
6686 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
6687 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10
6690 tail call void @llvm.riscv.vsuxseg2.nxv2i16.nxv2i8(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
6694 define void @test_vsuxseg2_mask_nxv2i16_nxv2i8(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6695 ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i16_nxv2i8:
6696 ; CHECK: # %bb.0: # %entry
6697 ; CHECK-NEXT: vmv1r.v v10, v9
6698 ; CHECK-NEXT: vmv1r.v v9, v8
6699 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
6700 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10, v0.t
6703 tail call void @llvm.riscv.vsuxseg2.mask.nxv2i16.nxv2i8(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
6707 declare void @llvm.riscv.vsuxseg2.nxv2i16.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i16>, i32)
6708 declare void @llvm.riscv.vsuxseg2.mask.nxv2i16.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
6710 define void @test_vsuxseg2_nxv2i16_nxv2i16(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
6711 ; CHECK-LABEL: test_vsuxseg2_nxv2i16_nxv2i16:
6712 ; CHECK: # %bb.0: # %entry
6713 ; CHECK-NEXT: vmv1r.v v10, v9
6714 ; CHECK-NEXT: vmv1r.v v9, v8
6715 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
6716 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10
6719 tail call void @llvm.riscv.vsuxseg2.nxv2i16.nxv2i16(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
6723 define void @test_vsuxseg2_mask_nxv2i16_nxv2i16(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6724 ; CHECK-LABEL: test_vsuxseg2_mask_nxv2i16_nxv2i16:
6725 ; CHECK: # %bb.0: # %entry
6726 ; CHECK-NEXT: vmv1r.v v10, v9
6727 ; CHECK-NEXT: vmv1r.v v9, v8
6728 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
6729 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10, v0.t
6732 tail call void @llvm.riscv.vsuxseg2.mask.nxv2i16.nxv2i16(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
6736 declare void @llvm.riscv.vsuxseg3.nxv2i16.nxv2i32(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i32>, i32)
6737 declare void @llvm.riscv.vsuxseg3.mask.nxv2i16.nxv2i32(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
6739 define void @test_vsuxseg3_nxv2i16_nxv2i32(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
6740 ; CHECK-LABEL: test_vsuxseg3_nxv2i16_nxv2i32:
6741 ; CHECK: # %bb.0: # %entry
6742 ; CHECK-NEXT: vmv1r.v v10, v8
6743 ; CHECK-NEXT: vmv1r.v v11, v8
6744 ; CHECK-NEXT: vmv1r.v v12, v8
6745 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
6746 ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9
6749 tail call void @llvm.riscv.vsuxseg3.nxv2i16.nxv2i32(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
6753 define void @test_vsuxseg3_mask_nxv2i16_nxv2i32(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6754 ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i16_nxv2i32:
6755 ; CHECK: # %bb.0: # %entry
6756 ; CHECK-NEXT: vmv1r.v v10, v8
6757 ; CHECK-NEXT: vmv1r.v v11, v8
6758 ; CHECK-NEXT: vmv1r.v v12, v8
6759 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
6760 ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t
6763 tail call void @llvm.riscv.vsuxseg3.mask.nxv2i16.nxv2i32(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
6767 declare void @llvm.riscv.vsuxseg3.nxv2i16.nxv2i8(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i8>, i32)
6768 declare void @llvm.riscv.vsuxseg3.mask.nxv2i16.nxv2i8(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
6770 define void @test_vsuxseg3_nxv2i16_nxv2i8(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
6771 ; CHECK-LABEL: test_vsuxseg3_nxv2i16_nxv2i8:
6772 ; CHECK: # %bb.0: # %entry
6773 ; CHECK-NEXT: vmv1r.v v10, v8
6774 ; CHECK-NEXT: vmv1r.v v11, v8
6775 ; CHECK-NEXT: vmv1r.v v12, v8
6776 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
6777 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9
6780 tail call void @llvm.riscv.vsuxseg3.nxv2i16.nxv2i8(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
6784 define void @test_vsuxseg3_mask_nxv2i16_nxv2i8(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6785 ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i16_nxv2i8:
6786 ; CHECK: # %bb.0: # %entry
6787 ; CHECK-NEXT: vmv1r.v v10, v8
6788 ; CHECK-NEXT: vmv1r.v v11, v8
6789 ; CHECK-NEXT: vmv1r.v v12, v8
6790 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
6791 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t
6794 tail call void @llvm.riscv.vsuxseg3.mask.nxv2i16.nxv2i8(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
6798 declare void @llvm.riscv.vsuxseg3.nxv2i16.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i16>, i32)
6799 declare void @llvm.riscv.vsuxseg3.mask.nxv2i16.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
6801 define void @test_vsuxseg3_nxv2i16_nxv2i16(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
6802 ; CHECK-LABEL: test_vsuxseg3_nxv2i16_nxv2i16:
6803 ; CHECK: # %bb.0: # %entry
6804 ; CHECK-NEXT: vmv1r.v v10, v8
6805 ; CHECK-NEXT: vmv1r.v v11, v8
6806 ; CHECK-NEXT: vmv1r.v v12, v8
6807 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
6808 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9
6811 tail call void @llvm.riscv.vsuxseg3.nxv2i16.nxv2i16(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
6815 define void @test_vsuxseg3_mask_nxv2i16_nxv2i16(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6816 ; CHECK-LABEL: test_vsuxseg3_mask_nxv2i16_nxv2i16:
6817 ; CHECK: # %bb.0: # %entry
6818 ; CHECK-NEXT: vmv1r.v v10, v8
6819 ; CHECK-NEXT: vmv1r.v v11, v8
6820 ; CHECK-NEXT: vmv1r.v v12, v8
6821 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
6822 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t
6825 tail call void @llvm.riscv.vsuxseg3.mask.nxv2i16.nxv2i16(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
6829 declare void @llvm.riscv.vsuxseg4.nxv2i16.nxv2i32(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i32>, i32)
6830 declare void @llvm.riscv.vsuxseg4.mask.nxv2i16.nxv2i32(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
6832 define void @test_vsuxseg4_nxv2i16_nxv2i32(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
6833 ; CHECK-LABEL: test_vsuxseg4_nxv2i16_nxv2i32:
6834 ; CHECK: # %bb.0: # %entry
6835 ; CHECK-NEXT: vmv1r.v v10, v8
6836 ; CHECK-NEXT: vmv1r.v v11, v8
6837 ; CHECK-NEXT: vmv1r.v v12, v8
6838 ; CHECK-NEXT: vmv1r.v v13, v8
6839 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
6840 ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9
6843 tail call void @llvm.riscv.vsuxseg4.nxv2i16.nxv2i32(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
6847 define void @test_vsuxseg4_mask_nxv2i16_nxv2i32(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6848 ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i16_nxv2i32:
6849 ; CHECK: # %bb.0: # %entry
6850 ; CHECK-NEXT: vmv1r.v v10, v8
6851 ; CHECK-NEXT: vmv1r.v v11, v8
6852 ; CHECK-NEXT: vmv1r.v v12, v8
6853 ; CHECK-NEXT: vmv1r.v v13, v8
6854 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
6855 ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t
6858 tail call void @llvm.riscv.vsuxseg4.mask.nxv2i16.nxv2i32(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
6862 declare void @llvm.riscv.vsuxseg4.nxv2i16.nxv2i8(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i8>, i32)
6863 declare void @llvm.riscv.vsuxseg4.mask.nxv2i16.nxv2i8(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
6865 define void @test_vsuxseg4_nxv2i16_nxv2i8(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
6866 ; CHECK-LABEL: test_vsuxseg4_nxv2i16_nxv2i8:
6867 ; CHECK: # %bb.0: # %entry
6868 ; CHECK-NEXT: vmv1r.v v10, v8
6869 ; CHECK-NEXT: vmv1r.v v11, v8
6870 ; CHECK-NEXT: vmv1r.v v12, v8
6871 ; CHECK-NEXT: vmv1r.v v13, v8
6872 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
6873 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9
6876 tail call void @llvm.riscv.vsuxseg4.nxv2i16.nxv2i8(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
6880 define void @test_vsuxseg4_mask_nxv2i16_nxv2i8(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6881 ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i16_nxv2i8:
6882 ; CHECK: # %bb.0: # %entry
6883 ; CHECK-NEXT: vmv1r.v v10, v8
6884 ; CHECK-NEXT: vmv1r.v v11, v8
6885 ; CHECK-NEXT: vmv1r.v v12, v8
6886 ; CHECK-NEXT: vmv1r.v v13, v8
6887 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
6888 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t
6891 tail call void @llvm.riscv.vsuxseg4.mask.nxv2i16.nxv2i8(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
6895 declare void @llvm.riscv.vsuxseg4.nxv2i16.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i16>, i32)
6896 declare void @llvm.riscv.vsuxseg4.mask.nxv2i16.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
6898 define void @test_vsuxseg4_nxv2i16_nxv2i16(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
6899 ; CHECK-LABEL: test_vsuxseg4_nxv2i16_nxv2i16:
6900 ; CHECK: # %bb.0: # %entry
6901 ; CHECK-NEXT: vmv1r.v v10, v8
6902 ; CHECK-NEXT: vmv1r.v v11, v8
6903 ; CHECK-NEXT: vmv1r.v v12, v8
6904 ; CHECK-NEXT: vmv1r.v v13, v8
6905 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
6906 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9
6909 tail call void @llvm.riscv.vsuxseg4.nxv2i16.nxv2i16(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
6913 define void @test_vsuxseg4_mask_nxv2i16_nxv2i16(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6914 ; CHECK-LABEL: test_vsuxseg4_mask_nxv2i16_nxv2i16:
6915 ; CHECK: # %bb.0: # %entry
6916 ; CHECK-NEXT: vmv1r.v v10, v8
6917 ; CHECK-NEXT: vmv1r.v v11, v8
6918 ; CHECK-NEXT: vmv1r.v v12, v8
6919 ; CHECK-NEXT: vmv1r.v v13, v8
6920 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
6921 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t
6924 tail call void @llvm.riscv.vsuxseg4.mask.nxv2i16.nxv2i16(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
6928 declare void @llvm.riscv.vsuxseg5.nxv2i16.nxv2i32(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i32>, i32)
6929 declare void @llvm.riscv.vsuxseg5.mask.nxv2i16.nxv2i32(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
6931 define void @test_vsuxseg5_nxv2i16_nxv2i32(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
6932 ; CHECK-LABEL: test_vsuxseg5_nxv2i16_nxv2i32:
6933 ; CHECK: # %bb.0: # %entry
6934 ; CHECK-NEXT: vmv1r.v v10, v8
6935 ; CHECK-NEXT: vmv1r.v v11, v8
6936 ; CHECK-NEXT: vmv1r.v v12, v8
6937 ; CHECK-NEXT: vmv1r.v v13, v8
6938 ; CHECK-NEXT: vmv1r.v v14, v8
6939 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
6940 ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9
6943 tail call void @llvm.riscv.vsuxseg5.nxv2i16.nxv2i32(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
6947 define void @test_vsuxseg5_mask_nxv2i16_nxv2i32(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6948 ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i16_nxv2i32:
6949 ; CHECK: # %bb.0: # %entry
6950 ; CHECK-NEXT: vmv1r.v v10, v8
6951 ; CHECK-NEXT: vmv1r.v v11, v8
6952 ; CHECK-NEXT: vmv1r.v v12, v8
6953 ; CHECK-NEXT: vmv1r.v v13, v8
6954 ; CHECK-NEXT: vmv1r.v v14, v8
6955 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
6956 ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t
6959 tail call void @llvm.riscv.vsuxseg5.mask.nxv2i16.nxv2i32(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
6963 declare void @llvm.riscv.vsuxseg5.nxv2i16.nxv2i8(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i8>, i32)
6964 declare void @llvm.riscv.vsuxseg5.mask.nxv2i16.nxv2i8(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
6966 define void @test_vsuxseg5_nxv2i16_nxv2i8(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
6967 ; CHECK-LABEL: test_vsuxseg5_nxv2i16_nxv2i8:
6968 ; CHECK: # %bb.0: # %entry
6969 ; CHECK-NEXT: vmv1r.v v10, v8
6970 ; CHECK-NEXT: vmv1r.v v11, v8
6971 ; CHECK-NEXT: vmv1r.v v12, v8
6972 ; CHECK-NEXT: vmv1r.v v13, v8
6973 ; CHECK-NEXT: vmv1r.v v14, v8
6974 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
6975 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9
6978 tail call void @llvm.riscv.vsuxseg5.nxv2i16.nxv2i8(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
6982 define void @test_vsuxseg5_mask_nxv2i16_nxv2i8(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
6983 ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i16_nxv2i8:
6984 ; CHECK: # %bb.0: # %entry
6985 ; CHECK-NEXT: vmv1r.v v10, v8
6986 ; CHECK-NEXT: vmv1r.v v11, v8
6987 ; CHECK-NEXT: vmv1r.v v12, v8
6988 ; CHECK-NEXT: vmv1r.v v13, v8
6989 ; CHECK-NEXT: vmv1r.v v14, v8
6990 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
6991 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t
6994 tail call void @llvm.riscv.vsuxseg5.mask.nxv2i16.nxv2i8(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
6998 declare void @llvm.riscv.vsuxseg5.nxv2i16.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i16>, i32)
6999 declare void @llvm.riscv.vsuxseg5.mask.nxv2i16.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
7001 define void @test_vsuxseg5_nxv2i16_nxv2i16(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
7002 ; CHECK-LABEL: test_vsuxseg5_nxv2i16_nxv2i16:
7003 ; CHECK: # %bb.0: # %entry
7004 ; CHECK-NEXT: vmv1r.v v10, v8
7005 ; CHECK-NEXT: vmv1r.v v11, v8
7006 ; CHECK-NEXT: vmv1r.v v12, v8
7007 ; CHECK-NEXT: vmv1r.v v13, v8
7008 ; CHECK-NEXT: vmv1r.v v14, v8
7009 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
7010 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9
7013 tail call void @llvm.riscv.vsuxseg5.nxv2i16.nxv2i16(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
7017 define void @test_vsuxseg5_mask_nxv2i16_nxv2i16(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
7018 ; CHECK-LABEL: test_vsuxseg5_mask_nxv2i16_nxv2i16:
7019 ; CHECK: # %bb.0: # %entry
7020 ; CHECK-NEXT: vmv1r.v v10, v8
7021 ; CHECK-NEXT: vmv1r.v v11, v8
7022 ; CHECK-NEXT: vmv1r.v v12, v8
7023 ; CHECK-NEXT: vmv1r.v v13, v8
7024 ; CHECK-NEXT: vmv1r.v v14, v8
7025 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
7026 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t
7029 tail call void @llvm.riscv.vsuxseg5.mask.nxv2i16.nxv2i16(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
7033 declare void @llvm.riscv.vsuxseg6.nxv2i16.nxv2i32(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i32>, i32)
7034 declare void @llvm.riscv.vsuxseg6.mask.nxv2i16.nxv2i32(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
7036 define void @test_vsuxseg6_nxv2i16_nxv2i32(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
7037 ; CHECK-LABEL: test_vsuxseg6_nxv2i16_nxv2i32:
7038 ; CHECK: # %bb.0: # %entry
7039 ; CHECK-NEXT: vmv1r.v v10, v8
7040 ; CHECK-NEXT: vmv1r.v v11, v8
7041 ; CHECK-NEXT: vmv1r.v v12, v8
7042 ; CHECK-NEXT: vmv1r.v v13, v8
7043 ; CHECK-NEXT: vmv1r.v v14, v8
7044 ; CHECK-NEXT: vmv1r.v v15, v8
7045 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
7046 ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9
7049 tail call void @llvm.riscv.vsuxseg6.nxv2i16.nxv2i32(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
7053 define void @test_vsuxseg6_mask_nxv2i16_nxv2i32(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
7054 ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i16_nxv2i32:
7055 ; CHECK: # %bb.0: # %entry
7056 ; CHECK-NEXT: vmv1r.v v10, v8
7057 ; CHECK-NEXT: vmv1r.v v11, v8
7058 ; CHECK-NEXT: vmv1r.v v12, v8
7059 ; CHECK-NEXT: vmv1r.v v13, v8
7060 ; CHECK-NEXT: vmv1r.v v14, v8
7061 ; CHECK-NEXT: vmv1r.v v15, v8
7062 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
7063 ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t
7066 tail call void @llvm.riscv.vsuxseg6.mask.nxv2i16.nxv2i32(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
7070 declare void @llvm.riscv.vsuxseg6.nxv2i16.nxv2i8(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i8>, i32)
7071 declare void @llvm.riscv.vsuxseg6.mask.nxv2i16.nxv2i8(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
7073 define void @test_vsuxseg6_nxv2i16_nxv2i8(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
7074 ; CHECK-LABEL: test_vsuxseg6_nxv2i16_nxv2i8:
7075 ; CHECK: # %bb.0: # %entry
7076 ; CHECK-NEXT: vmv1r.v v10, v8
7077 ; CHECK-NEXT: vmv1r.v v11, v8
7078 ; CHECK-NEXT: vmv1r.v v12, v8
7079 ; CHECK-NEXT: vmv1r.v v13, v8
7080 ; CHECK-NEXT: vmv1r.v v14, v8
7081 ; CHECK-NEXT: vmv1r.v v15, v8
7082 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
7083 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9
7086 tail call void @llvm.riscv.vsuxseg6.nxv2i16.nxv2i8(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
7090 define void @test_vsuxseg6_mask_nxv2i16_nxv2i8(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
7091 ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i16_nxv2i8:
7092 ; CHECK: # %bb.0: # %entry
7093 ; CHECK-NEXT: vmv1r.v v10, v8
7094 ; CHECK-NEXT: vmv1r.v v11, v8
7095 ; CHECK-NEXT: vmv1r.v v12, v8
7096 ; CHECK-NEXT: vmv1r.v v13, v8
7097 ; CHECK-NEXT: vmv1r.v v14, v8
7098 ; CHECK-NEXT: vmv1r.v v15, v8
7099 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
7100 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t
7103 tail call void @llvm.riscv.vsuxseg6.mask.nxv2i16.nxv2i8(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
7107 declare void @llvm.riscv.vsuxseg6.nxv2i16.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i16>, i32)
7108 declare void @llvm.riscv.vsuxseg6.mask.nxv2i16.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
7110 define void @test_vsuxseg6_nxv2i16_nxv2i16(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
7111 ; CHECK-LABEL: test_vsuxseg6_nxv2i16_nxv2i16:
7112 ; CHECK: # %bb.0: # %entry
7113 ; CHECK-NEXT: vmv1r.v v10, v8
7114 ; CHECK-NEXT: vmv1r.v v11, v8
7115 ; CHECK-NEXT: vmv1r.v v12, v8
7116 ; CHECK-NEXT: vmv1r.v v13, v8
7117 ; CHECK-NEXT: vmv1r.v v14, v8
7118 ; CHECK-NEXT: vmv1r.v v15, v8
7119 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
7120 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9
7123 tail call void @llvm.riscv.vsuxseg6.nxv2i16.nxv2i16(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
7127 define void @test_vsuxseg6_mask_nxv2i16_nxv2i16(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
7128 ; CHECK-LABEL: test_vsuxseg6_mask_nxv2i16_nxv2i16:
7129 ; CHECK: # %bb.0: # %entry
7130 ; CHECK-NEXT: vmv1r.v v10, v8
7131 ; CHECK-NEXT: vmv1r.v v11, v8
7132 ; CHECK-NEXT: vmv1r.v v12, v8
7133 ; CHECK-NEXT: vmv1r.v v13, v8
7134 ; CHECK-NEXT: vmv1r.v v14, v8
7135 ; CHECK-NEXT: vmv1r.v v15, v8
7136 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
7137 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t
7140 tail call void @llvm.riscv.vsuxseg6.mask.nxv2i16.nxv2i16(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
7144 declare void @llvm.riscv.vsuxseg7.nxv2i16.nxv2i32(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i32>, i32)
7145 declare void @llvm.riscv.vsuxseg7.mask.nxv2i16.nxv2i32(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
7147 define void @test_vsuxseg7_nxv2i16_nxv2i32(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
7148 ; CHECK-LABEL: test_vsuxseg7_nxv2i16_nxv2i32:
7149 ; CHECK: # %bb.0: # %entry
7150 ; CHECK-NEXT: vmv1r.v v10, v8
7151 ; CHECK-NEXT: vmv1r.v v11, v8
7152 ; CHECK-NEXT: vmv1r.v v12, v8
7153 ; CHECK-NEXT: vmv1r.v v13, v8
7154 ; CHECK-NEXT: vmv1r.v v14, v8
7155 ; CHECK-NEXT: vmv1r.v v15, v8
7156 ; CHECK-NEXT: vmv1r.v v16, v8
7157 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
7158 ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9
7161 tail call void @llvm.riscv.vsuxseg7.nxv2i16.nxv2i32(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
7165 define void @test_vsuxseg7_mask_nxv2i16_nxv2i32(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
7166 ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i16_nxv2i32:
7167 ; CHECK: # %bb.0: # %entry
7168 ; CHECK-NEXT: vmv1r.v v10, v8
7169 ; CHECK-NEXT: vmv1r.v v11, v8
7170 ; CHECK-NEXT: vmv1r.v v12, v8
7171 ; CHECK-NEXT: vmv1r.v v13, v8
7172 ; CHECK-NEXT: vmv1r.v v14, v8
7173 ; CHECK-NEXT: vmv1r.v v15, v8
7174 ; CHECK-NEXT: vmv1r.v v16, v8
7175 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
7176 ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t
7179 tail call void @llvm.riscv.vsuxseg7.mask.nxv2i16.nxv2i32(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
7183 declare void @llvm.riscv.vsuxseg7.nxv2i16.nxv2i8(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i8>, i32)
7184 declare void @llvm.riscv.vsuxseg7.mask.nxv2i16.nxv2i8(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
7186 define void @test_vsuxseg7_nxv2i16_nxv2i8(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
7187 ; CHECK-LABEL: test_vsuxseg7_nxv2i16_nxv2i8:
7188 ; CHECK: # %bb.0: # %entry
7189 ; CHECK-NEXT: vmv1r.v v10, v8
7190 ; CHECK-NEXT: vmv1r.v v11, v8
7191 ; CHECK-NEXT: vmv1r.v v12, v8
7192 ; CHECK-NEXT: vmv1r.v v13, v8
7193 ; CHECK-NEXT: vmv1r.v v14, v8
7194 ; CHECK-NEXT: vmv1r.v v15, v8
7195 ; CHECK-NEXT: vmv1r.v v16, v8
7196 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
7197 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9
7200 tail call void @llvm.riscv.vsuxseg7.nxv2i16.nxv2i8(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
7204 define void @test_vsuxseg7_mask_nxv2i16_nxv2i8(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
7205 ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i16_nxv2i8:
7206 ; CHECK: # %bb.0: # %entry
7207 ; CHECK-NEXT: vmv1r.v v10, v8
7208 ; CHECK-NEXT: vmv1r.v v11, v8
7209 ; CHECK-NEXT: vmv1r.v v12, v8
7210 ; CHECK-NEXT: vmv1r.v v13, v8
7211 ; CHECK-NEXT: vmv1r.v v14, v8
7212 ; CHECK-NEXT: vmv1r.v v15, v8
7213 ; CHECK-NEXT: vmv1r.v v16, v8
7214 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
7215 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t
7218 tail call void @llvm.riscv.vsuxseg7.mask.nxv2i16.nxv2i8(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
7222 declare void @llvm.riscv.vsuxseg7.nxv2i16.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i16>, i32)
7223 declare void @llvm.riscv.vsuxseg7.mask.nxv2i16.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
7225 define void @test_vsuxseg7_nxv2i16_nxv2i16(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
7226 ; CHECK-LABEL: test_vsuxseg7_nxv2i16_nxv2i16:
7227 ; CHECK: # %bb.0: # %entry
7228 ; CHECK-NEXT: vmv1r.v v10, v8
7229 ; CHECK-NEXT: vmv1r.v v11, v8
7230 ; CHECK-NEXT: vmv1r.v v12, v8
7231 ; CHECK-NEXT: vmv1r.v v13, v8
7232 ; CHECK-NEXT: vmv1r.v v14, v8
7233 ; CHECK-NEXT: vmv1r.v v15, v8
7234 ; CHECK-NEXT: vmv1r.v v16, v8
7235 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
7236 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9
7239 tail call void @llvm.riscv.vsuxseg7.nxv2i16.nxv2i16(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
7243 define void @test_vsuxseg7_mask_nxv2i16_nxv2i16(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
7244 ; CHECK-LABEL: test_vsuxseg7_mask_nxv2i16_nxv2i16:
7245 ; CHECK: # %bb.0: # %entry
7246 ; CHECK-NEXT: vmv1r.v v10, v8
7247 ; CHECK-NEXT: vmv1r.v v11, v8
7248 ; CHECK-NEXT: vmv1r.v v12, v8
7249 ; CHECK-NEXT: vmv1r.v v13, v8
7250 ; CHECK-NEXT: vmv1r.v v14, v8
7251 ; CHECK-NEXT: vmv1r.v v15, v8
7252 ; CHECK-NEXT: vmv1r.v v16, v8
7253 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
7254 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t
7257 tail call void @llvm.riscv.vsuxseg7.mask.nxv2i16.nxv2i16(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
7261 declare void @llvm.riscv.vsuxseg8.nxv2i16.nxv2i32(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i32>, i32)
7262 declare void @llvm.riscv.vsuxseg8.mask.nxv2i16.nxv2i32(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
7264 define void @test_vsuxseg8_nxv2i16_nxv2i32(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
7265 ; CHECK-LABEL: test_vsuxseg8_nxv2i16_nxv2i32:
7266 ; CHECK: # %bb.0: # %entry
7267 ; CHECK-NEXT: vmv1r.v v10, v8
7268 ; CHECK-NEXT: vmv1r.v v11, v8
7269 ; CHECK-NEXT: vmv1r.v v12, v8
7270 ; CHECK-NEXT: vmv1r.v v13, v8
7271 ; CHECK-NEXT: vmv1r.v v14, v8
7272 ; CHECK-NEXT: vmv1r.v v15, v8
7273 ; CHECK-NEXT: vmv1r.v v16, v8
7274 ; CHECK-NEXT: vmv1r.v v17, v8
7275 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
7276 ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9
7279 tail call void @llvm.riscv.vsuxseg8.nxv2i16.nxv2i32(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
7283 define void @test_vsuxseg8_mask_nxv2i16_nxv2i32(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
7284 ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i16_nxv2i32:
7285 ; CHECK: # %bb.0: # %entry
7286 ; CHECK-NEXT: vmv1r.v v10, v8
7287 ; CHECK-NEXT: vmv1r.v v11, v8
7288 ; CHECK-NEXT: vmv1r.v v12, v8
7289 ; CHECK-NEXT: vmv1r.v v13, v8
7290 ; CHECK-NEXT: vmv1r.v v14, v8
7291 ; CHECK-NEXT: vmv1r.v v15, v8
7292 ; CHECK-NEXT: vmv1r.v v16, v8
7293 ; CHECK-NEXT: vmv1r.v v17, v8
7294 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
7295 ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t
7298 tail call void @llvm.riscv.vsuxseg8.mask.nxv2i16.nxv2i32(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
7302 declare void @llvm.riscv.vsuxseg8.nxv2i16.nxv2i8(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i8>, i32)
7303 declare void @llvm.riscv.vsuxseg8.mask.nxv2i16.nxv2i8(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
7305 define void @test_vsuxseg8_nxv2i16_nxv2i8(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
7306 ; CHECK-LABEL: test_vsuxseg8_nxv2i16_nxv2i8:
7307 ; CHECK: # %bb.0: # %entry
7308 ; CHECK-NEXT: vmv1r.v v10, v8
7309 ; CHECK-NEXT: vmv1r.v v11, v8
7310 ; CHECK-NEXT: vmv1r.v v12, v8
7311 ; CHECK-NEXT: vmv1r.v v13, v8
7312 ; CHECK-NEXT: vmv1r.v v14, v8
7313 ; CHECK-NEXT: vmv1r.v v15, v8
7314 ; CHECK-NEXT: vmv1r.v v16, v8
7315 ; CHECK-NEXT: vmv1r.v v17, v8
7316 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
7317 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9
7320 tail call void @llvm.riscv.vsuxseg8.nxv2i16.nxv2i8(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
7324 define void @test_vsuxseg8_mask_nxv2i16_nxv2i8(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
7325 ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i16_nxv2i8:
7326 ; CHECK: # %bb.0: # %entry
7327 ; CHECK-NEXT: vmv1r.v v10, v8
7328 ; CHECK-NEXT: vmv1r.v v11, v8
7329 ; CHECK-NEXT: vmv1r.v v12, v8
7330 ; CHECK-NEXT: vmv1r.v v13, v8
7331 ; CHECK-NEXT: vmv1r.v v14, v8
7332 ; CHECK-NEXT: vmv1r.v v15, v8
7333 ; CHECK-NEXT: vmv1r.v v16, v8
7334 ; CHECK-NEXT: vmv1r.v v17, v8
7335 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
7336 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t
7339 tail call void @llvm.riscv.vsuxseg8.mask.nxv2i16.nxv2i8(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
7343 declare void @llvm.riscv.vsuxseg8.nxv2i16.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i16>, i32)
7344 declare void @llvm.riscv.vsuxseg8.mask.nxv2i16.nxv2i16(<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>,<vscale x 2 x i16>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
7346 define void @test_vsuxseg8_nxv2i16_nxv2i16(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
7347 ; CHECK-LABEL: test_vsuxseg8_nxv2i16_nxv2i16:
7348 ; CHECK: # %bb.0: # %entry
7349 ; CHECK-NEXT: vmv1r.v v10, v8
7350 ; CHECK-NEXT: vmv1r.v v11, v8
7351 ; CHECK-NEXT: vmv1r.v v12, v8
7352 ; CHECK-NEXT: vmv1r.v v13, v8
7353 ; CHECK-NEXT: vmv1r.v v14, v8
7354 ; CHECK-NEXT: vmv1r.v v15, v8
7355 ; CHECK-NEXT: vmv1r.v v16, v8
7356 ; CHECK-NEXT: vmv1r.v v17, v8
7357 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
7358 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9
7361 tail call void @llvm.riscv.vsuxseg8.nxv2i16.nxv2i16(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
7365 define void @test_vsuxseg8_mask_nxv2i16_nxv2i16(<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
7366 ; CHECK-LABEL: test_vsuxseg8_mask_nxv2i16_nxv2i16:
7367 ; CHECK: # %bb.0: # %entry
7368 ; CHECK-NEXT: vmv1r.v v10, v8
7369 ; CHECK-NEXT: vmv1r.v v11, v8
7370 ; CHECK-NEXT: vmv1r.v v12, v8
7371 ; CHECK-NEXT: vmv1r.v v13, v8
7372 ; CHECK-NEXT: vmv1r.v v14, v8
7373 ; CHECK-NEXT: vmv1r.v v15, v8
7374 ; CHECK-NEXT: vmv1r.v v16, v8
7375 ; CHECK-NEXT: vmv1r.v v17, v8
7376 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
7377 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t
7380 tail call void @llvm.riscv.vsuxseg8.mask.nxv2i16.nxv2i16(<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val,<vscale x 2 x i16> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
7384 declare void @llvm.riscv.vsuxseg2.nxv4i32.nxv4i16(<vscale x 4 x i32>,<vscale x 4 x i32>, ptr, <vscale x 4 x i16>, i32)
7385 declare void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i16(<vscale x 4 x i32>,<vscale x 4 x i32>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
7387 define void @test_vsuxseg2_nxv4i32_nxv4i16(<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
7388 ; CHECK-LABEL: test_vsuxseg2_nxv4i32_nxv4i16:
7389 ; CHECK: # %bb.0: # %entry
7390 ; CHECK-NEXT: vmv1r.v v12, v10
7391 ; CHECK-NEXT: vmv2r.v v10, v8
7392 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
7393 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v12
7396 tail call void @llvm.riscv.vsuxseg2.nxv4i32.nxv4i16(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
7400 define void @test_vsuxseg2_mask_nxv4i32_nxv4i16(<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
7401 ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i32_nxv4i16:
7402 ; CHECK: # %bb.0: # %entry
7403 ; CHECK-NEXT: vmv1r.v v12, v10
7404 ; CHECK-NEXT: vmv2r.v v10, v8
7405 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
7406 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v12, v0.t
7409 tail call void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i16(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
7413 declare void @llvm.riscv.vsuxseg2.nxv4i32.nxv4i8(<vscale x 4 x i32>,<vscale x 4 x i32>, ptr, <vscale x 4 x i8>, i32)
7414 declare void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i8(<vscale x 4 x i32>,<vscale x 4 x i32>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
7416 define void @test_vsuxseg2_nxv4i32_nxv4i8(<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
7417 ; CHECK-LABEL: test_vsuxseg2_nxv4i32_nxv4i8:
7418 ; CHECK: # %bb.0: # %entry
7419 ; CHECK-NEXT: vmv1r.v v12, v10
7420 ; CHECK-NEXT: vmv2r.v v10, v8
7421 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
7422 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v12
7425 tail call void @llvm.riscv.vsuxseg2.nxv4i32.nxv4i8(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
7429 define void @test_vsuxseg2_mask_nxv4i32_nxv4i8(<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
7430 ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i32_nxv4i8:
7431 ; CHECK: # %bb.0: # %entry
7432 ; CHECK-NEXT: vmv1r.v v12, v10
7433 ; CHECK-NEXT: vmv2r.v v10, v8
7434 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
7435 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v12, v0.t
7438 tail call void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i8(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
7442 declare void @llvm.riscv.vsuxseg2.nxv4i32.nxv4i32(<vscale x 4 x i32>,<vscale x 4 x i32>, ptr, <vscale x 4 x i32>, i32)
7443 declare void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i32(<vscale x 4 x i32>,<vscale x 4 x i32>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
7445 define void @test_vsuxseg2_nxv4i32_nxv4i32(<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
7446 ; CHECK-LABEL: test_vsuxseg2_nxv4i32_nxv4i32:
7447 ; CHECK: # %bb.0: # %entry
7448 ; CHECK-NEXT: vmv2r.v v12, v10
7449 ; CHECK-NEXT: vmv2r.v v10, v8
7450 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
7451 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12
7454 tail call void @llvm.riscv.vsuxseg2.nxv4i32.nxv4i32(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
7458 define void @test_vsuxseg2_mask_nxv4i32_nxv4i32(<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
7459 ; CHECK-LABEL: test_vsuxseg2_mask_nxv4i32_nxv4i32:
7460 ; CHECK: # %bb.0: # %entry
7461 ; CHECK-NEXT: vmv2r.v v12, v10
7462 ; CHECK-NEXT: vmv2r.v v10, v8
7463 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
7464 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12, v0.t
7467 tail call void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i32(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
7471 declare void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i16(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, ptr, <vscale x 4 x i16>, i32)
7472 declare void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i16(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
7474 define void @test_vsuxseg3_nxv4i32_nxv4i16(<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
7475 ; CHECK-LABEL: test_vsuxseg3_nxv4i32_nxv4i16:
7476 ; CHECK: # %bb.0: # %entry
7477 ; CHECK-NEXT: vmv2r.v v12, v8
7478 ; CHECK-NEXT: vmv2r.v v14, v8
7479 ; CHECK-NEXT: vmv2r.v v16, v8
7480 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
7481 ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10
7484 tail call void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i16(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
7488 define void @test_vsuxseg3_mask_nxv4i32_nxv4i16(<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
7489 ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i32_nxv4i16:
7490 ; CHECK: # %bb.0: # %entry
7491 ; CHECK-NEXT: vmv2r.v v12, v8
7492 ; CHECK-NEXT: vmv2r.v v14, v8
7493 ; CHECK-NEXT: vmv2r.v v16, v8
7494 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
7495 ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10, v0.t
7498 tail call void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i16(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
7502 declare void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i8(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, ptr, <vscale x 4 x i8>, i32)
7503 declare void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i8(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
7505 define void @test_vsuxseg3_nxv4i32_nxv4i8(<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
7506 ; CHECK-LABEL: test_vsuxseg3_nxv4i32_nxv4i8:
7507 ; CHECK: # %bb.0: # %entry
7508 ; CHECK-NEXT: vmv2r.v v12, v8
7509 ; CHECK-NEXT: vmv2r.v v14, v8
7510 ; CHECK-NEXT: vmv2r.v v16, v8
7511 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
7512 ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10
7515 tail call void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i8(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
7519 define void @test_vsuxseg3_mask_nxv4i32_nxv4i8(<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
7520 ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i32_nxv4i8:
7521 ; CHECK: # %bb.0: # %entry
7522 ; CHECK-NEXT: vmv2r.v v12, v8
7523 ; CHECK-NEXT: vmv2r.v v14, v8
7524 ; CHECK-NEXT: vmv2r.v v16, v8
7525 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
7526 ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10, v0.t
7529 tail call void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i8(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
7533 declare void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i32(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, ptr, <vscale x 4 x i32>, i32)
7534 declare void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i32(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
7536 define void @test_vsuxseg3_nxv4i32_nxv4i32(<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
7537 ; CHECK-LABEL: test_vsuxseg3_nxv4i32_nxv4i32:
7538 ; CHECK: # %bb.0: # %entry
7539 ; CHECK-NEXT: vmv2r.v v12, v8
7540 ; CHECK-NEXT: vmv2r.v v14, v8
7541 ; CHECK-NEXT: vmv2r.v v16, v8
7542 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
7543 ; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10
7546 tail call void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i32(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
7550 define void @test_vsuxseg3_mask_nxv4i32_nxv4i32(<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
7551 ; CHECK-LABEL: test_vsuxseg3_mask_nxv4i32_nxv4i32:
7552 ; CHECK: # %bb.0: # %entry
7553 ; CHECK-NEXT: vmv2r.v v12, v8
7554 ; CHECK-NEXT: vmv2r.v v14, v8
7555 ; CHECK-NEXT: vmv2r.v v16, v8
7556 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
7557 ; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10, v0.t
7560 tail call void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i32(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
7564 declare void @llvm.riscv.vsuxseg4.nxv4i32.nxv4i16(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, ptr, <vscale x 4 x i16>, i32)
7565 declare void @llvm.riscv.vsuxseg4.mask.nxv4i32.nxv4i16(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
7567 define void @test_vsuxseg4_nxv4i32_nxv4i16(<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
7568 ; CHECK-LABEL: test_vsuxseg4_nxv4i32_nxv4i16:
7569 ; CHECK: # %bb.0: # %entry
7570 ; CHECK-NEXT: vmv2r.v v12, v8
7571 ; CHECK-NEXT: vmv2r.v v14, v8
7572 ; CHECK-NEXT: vmv2r.v v16, v8
7573 ; CHECK-NEXT: vmv2r.v v18, v8
7574 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
7575 ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10
7578 tail call void @llvm.riscv.vsuxseg4.nxv4i32.nxv4i16(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
7582 define void @test_vsuxseg4_mask_nxv4i32_nxv4i16(<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
7583 ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i32_nxv4i16:
7584 ; CHECK: # %bb.0: # %entry
7585 ; CHECK-NEXT: vmv2r.v v12, v8
7586 ; CHECK-NEXT: vmv2r.v v14, v8
7587 ; CHECK-NEXT: vmv2r.v v16, v8
7588 ; CHECK-NEXT: vmv2r.v v18, v8
7589 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
7590 ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10, v0.t
7593 tail call void @llvm.riscv.vsuxseg4.mask.nxv4i32.nxv4i16(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
7597 declare void @llvm.riscv.vsuxseg4.nxv4i32.nxv4i8(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, ptr, <vscale x 4 x i8>, i32)
7598 declare void @llvm.riscv.vsuxseg4.mask.nxv4i32.nxv4i8(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
7600 define void @test_vsuxseg4_nxv4i32_nxv4i8(<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
7601 ; CHECK-LABEL: test_vsuxseg4_nxv4i32_nxv4i8:
7602 ; CHECK: # %bb.0: # %entry
7603 ; CHECK-NEXT: vmv2r.v v12, v8
7604 ; CHECK-NEXT: vmv2r.v v14, v8
7605 ; CHECK-NEXT: vmv2r.v v16, v8
7606 ; CHECK-NEXT: vmv2r.v v18, v8
7607 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
7608 ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10
7611 tail call void @llvm.riscv.vsuxseg4.nxv4i32.nxv4i8(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
7615 define void @test_vsuxseg4_mask_nxv4i32_nxv4i8(<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
7616 ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i32_nxv4i8:
7617 ; CHECK: # %bb.0: # %entry
7618 ; CHECK-NEXT: vmv2r.v v12, v8
7619 ; CHECK-NEXT: vmv2r.v v14, v8
7620 ; CHECK-NEXT: vmv2r.v v16, v8
7621 ; CHECK-NEXT: vmv2r.v v18, v8
7622 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
7623 ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10, v0.t
7626 tail call void @llvm.riscv.vsuxseg4.mask.nxv4i32.nxv4i8(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
7630 declare void @llvm.riscv.vsuxseg4.nxv4i32.nxv4i32(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, ptr, <vscale x 4 x i32>, i32)
7631 declare void @llvm.riscv.vsuxseg4.mask.nxv4i32.nxv4i32(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
7633 define void @test_vsuxseg4_nxv4i32_nxv4i32(<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
7634 ; CHECK-LABEL: test_vsuxseg4_nxv4i32_nxv4i32:
7635 ; CHECK: # %bb.0: # %entry
7636 ; CHECK-NEXT: vmv2r.v v12, v8
7637 ; CHECK-NEXT: vmv2r.v v14, v8
7638 ; CHECK-NEXT: vmv2r.v v16, v8
7639 ; CHECK-NEXT: vmv2r.v v18, v8
7640 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
7641 ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10
7644 tail call void @llvm.riscv.vsuxseg4.nxv4i32.nxv4i32(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
7648 define void @test_vsuxseg4_mask_nxv4i32_nxv4i32(<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
7649 ; CHECK-LABEL: test_vsuxseg4_mask_nxv4i32_nxv4i32:
7650 ; CHECK: # %bb.0: # %entry
7651 ; CHECK-NEXT: vmv2r.v v12, v8
7652 ; CHECK-NEXT: vmv2r.v v14, v8
7653 ; CHECK-NEXT: vmv2r.v v16, v8
7654 ; CHECK-NEXT: vmv2r.v v18, v8
7655 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
7656 ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10, v0.t
7659 tail call void @llvm.riscv.vsuxseg4.mask.nxv4i32.nxv4i32(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
7663 declare void @llvm.riscv.vsuxseg2.nxv16f16.nxv16i16(<vscale x 16 x half>,<vscale x 16 x half>, ptr, <vscale x 16 x i16>, i32)
7664 declare void @llvm.riscv.vsuxseg2.mask.nxv16f16.nxv16i16(<vscale x 16 x half>,<vscale x 16 x half>, ptr, <vscale x 16 x i16>, <vscale x 16 x i1>, i32)
7666 define void @test_vsuxseg2_nxv16f16_nxv16i16(<vscale x 16 x half> %val, ptr %base, <vscale x 16 x i16> %index, i32 %vl) {
7667 ; CHECK-LABEL: test_vsuxseg2_nxv16f16_nxv16i16:
7668 ; CHECK: # %bb.0: # %entry
7669 ; CHECK-NEXT: vmv4r.v v16, v12
7670 ; CHECK-NEXT: vmv4r.v v12, v8
7671 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, ma
7672 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v16
7675 tail call void @llvm.riscv.vsuxseg2.nxv16f16.nxv16i16(<vscale x 16 x half> %val,<vscale x 16 x half> %val, ptr %base, <vscale x 16 x i16> %index, i32 %vl)
7679 define void @test_vsuxseg2_mask_nxv16f16_nxv16i16(<vscale x 16 x half> %val, ptr %base, <vscale x 16 x i16> %index, <vscale x 16 x i1> %mask, i32 %vl) {
7680 ; CHECK-LABEL: test_vsuxseg2_mask_nxv16f16_nxv16i16:
7681 ; CHECK: # %bb.0: # %entry
7682 ; CHECK-NEXT: vmv4r.v v16, v12
7683 ; CHECK-NEXT: vmv4r.v v12, v8
7684 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, ma
7685 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v16, v0.t
7688 tail call void @llvm.riscv.vsuxseg2.mask.nxv16f16.nxv16i16(<vscale x 16 x half> %val,<vscale x 16 x half> %val, ptr %base, <vscale x 16 x i16> %index, <vscale x 16 x i1> %mask, i32 %vl)
7692 declare void @llvm.riscv.vsuxseg2.nxv16f16.nxv16i8(<vscale x 16 x half>,<vscale x 16 x half>, ptr, <vscale x 16 x i8>, i32)
7693 declare void @llvm.riscv.vsuxseg2.mask.nxv16f16.nxv16i8(<vscale x 16 x half>,<vscale x 16 x half>, ptr, <vscale x 16 x i8>, <vscale x 16 x i1>, i32)
7695 define void @test_vsuxseg2_nxv16f16_nxv16i8(<vscale x 16 x half> %val, ptr %base, <vscale x 16 x i8> %index, i32 %vl) {
7696 ; CHECK-LABEL: test_vsuxseg2_nxv16f16_nxv16i8:
7697 ; CHECK: # %bb.0: # %entry
7698 ; CHECK-NEXT: vmv2r.v v16, v12
7699 ; CHECK-NEXT: vmv4r.v v12, v8
7700 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, ma
7701 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v16
7704 tail call void @llvm.riscv.vsuxseg2.nxv16f16.nxv16i8(<vscale x 16 x half> %val,<vscale x 16 x half> %val, ptr %base, <vscale x 16 x i8> %index, i32 %vl)
7708 define void @test_vsuxseg2_mask_nxv16f16_nxv16i8(<vscale x 16 x half> %val, ptr %base, <vscale x 16 x i8> %index, <vscale x 16 x i1> %mask, i32 %vl) {
7709 ; CHECK-LABEL: test_vsuxseg2_mask_nxv16f16_nxv16i8:
7710 ; CHECK: # %bb.0: # %entry
7711 ; CHECK-NEXT: vmv2r.v v16, v12
7712 ; CHECK-NEXT: vmv4r.v v12, v8
7713 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, ma
7714 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v16, v0.t
7717 tail call void @llvm.riscv.vsuxseg2.mask.nxv16f16.nxv16i8(<vscale x 16 x half> %val,<vscale x 16 x half> %val, ptr %base, <vscale x 16 x i8> %index, <vscale x 16 x i1> %mask, i32 %vl)
7721 declare void @llvm.riscv.vsuxseg2.nxv16f16.nxv16i32(<vscale x 16 x half>,<vscale x 16 x half>, ptr, <vscale x 16 x i32>, i32)
7722 declare void @llvm.riscv.vsuxseg2.mask.nxv16f16.nxv16i32(<vscale x 16 x half>,<vscale x 16 x half>, ptr, <vscale x 16 x i32>, <vscale x 16 x i1>, i32)
7724 define void @test_vsuxseg2_nxv16f16_nxv16i32(<vscale x 16 x half> %val, ptr %base, <vscale x 16 x i32> %index, i32 %vl) {
7725 ; CHECK-LABEL: test_vsuxseg2_nxv16f16_nxv16i32:
7726 ; CHECK: # %bb.0: # %entry
7727 ; CHECK-NEXT: vmv4r.v v12, v8
7728 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, ma
7729 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16
7732 tail call void @llvm.riscv.vsuxseg2.nxv16f16.nxv16i32(<vscale x 16 x half> %val,<vscale x 16 x half> %val, ptr %base, <vscale x 16 x i32> %index, i32 %vl)
7736 define void @test_vsuxseg2_mask_nxv16f16_nxv16i32(<vscale x 16 x half> %val, ptr %base, <vscale x 16 x i32> %index, <vscale x 16 x i1> %mask, i32 %vl) {
7737 ; CHECK-LABEL: test_vsuxseg2_mask_nxv16f16_nxv16i32:
7738 ; CHECK: # %bb.0: # %entry
7739 ; CHECK-NEXT: vmv4r.v v12, v8
7740 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, ma
7741 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16, v0.t
7744 tail call void @llvm.riscv.vsuxseg2.mask.nxv16f16.nxv16i32(<vscale x 16 x half> %val,<vscale x 16 x half> %val, ptr %base, <vscale x 16 x i32> %index, <vscale x 16 x i1> %mask, i32 %vl)
7748 declare void @llvm.riscv.vsuxseg2.nxv4f64.nxv4i16(<vscale x 4 x double>,<vscale x 4 x double>, ptr, <vscale x 4 x i16>, i32)
7749 declare void @llvm.riscv.vsuxseg2.mask.nxv4f64.nxv4i16(<vscale x 4 x double>,<vscale x 4 x double>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
7751 define void @test_vsuxseg2_nxv4f64_nxv4i16(<vscale x 4 x double> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
7752 ; CHECK-LABEL: test_vsuxseg2_nxv4f64_nxv4i16:
7753 ; CHECK: # %bb.0: # %entry
7754 ; CHECK-NEXT: vmv1r.v v16, v12
7755 ; CHECK-NEXT: vmv4r.v v12, v8
7756 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, ma
7757 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v16
7760 tail call void @llvm.riscv.vsuxseg2.nxv4f64.nxv4i16(<vscale x 4 x double> %val,<vscale x 4 x double> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
7764 define void @test_vsuxseg2_mask_nxv4f64_nxv4i16(<vscale x 4 x double> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
7765 ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f64_nxv4i16:
7766 ; CHECK: # %bb.0: # %entry
7767 ; CHECK-NEXT: vmv1r.v v16, v12
7768 ; CHECK-NEXT: vmv4r.v v12, v8
7769 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, ma
7770 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v16, v0.t
7773 tail call void @llvm.riscv.vsuxseg2.mask.nxv4f64.nxv4i16(<vscale x 4 x double> %val,<vscale x 4 x double> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
7777 declare void @llvm.riscv.vsuxseg2.nxv4f64.nxv4i8(<vscale x 4 x double>,<vscale x 4 x double>, ptr, <vscale x 4 x i8>, i32)
7778 declare void @llvm.riscv.vsuxseg2.mask.nxv4f64.nxv4i8(<vscale x 4 x double>,<vscale x 4 x double>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
7780 define void @test_vsuxseg2_nxv4f64_nxv4i8(<vscale x 4 x double> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
7781 ; CHECK-LABEL: test_vsuxseg2_nxv4f64_nxv4i8:
7782 ; CHECK: # %bb.0: # %entry
7783 ; CHECK-NEXT: vmv1r.v v16, v12
7784 ; CHECK-NEXT: vmv4r.v v12, v8
7785 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, ma
7786 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v16
7789 tail call void @llvm.riscv.vsuxseg2.nxv4f64.nxv4i8(<vscale x 4 x double> %val,<vscale x 4 x double> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
7793 define void @test_vsuxseg2_mask_nxv4f64_nxv4i8(<vscale x 4 x double> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
7794 ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f64_nxv4i8:
7795 ; CHECK: # %bb.0: # %entry
7796 ; CHECK-NEXT: vmv1r.v v16, v12
7797 ; CHECK-NEXT: vmv4r.v v12, v8
7798 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, ma
7799 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v16, v0.t
7802 tail call void @llvm.riscv.vsuxseg2.mask.nxv4f64.nxv4i8(<vscale x 4 x double> %val,<vscale x 4 x double> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
7806 declare void @llvm.riscv.vsuxseg2.nxv4f64.nxv4i32(<vscale x 4 x double>,<vscale x 4 x double>, ptr, <vscale x 4 x i32>, i32)
7807 declare void @llvm.riscv.vsuxseg2.mask.nxv4f64.nxv4i32(<vscale x 4 x double>,<vscale x 4 x double>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
7809 define void @test_vsuxseg2_nxv4f64_nxv4i32(<vscale x 4 x double> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
7810 ; CHECK-LABEL: test_vsuxseg2_nxv4f64_nxv4i32:
7811 ; CHECK: # %bb.0: # %entry
7812 ; CHECK-NEXT: vmv2r.v v16, v12
7813 ; CHECK-NEXT: vmv4r.v v12, v8
7814 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, ma
7815 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16
7818 tail call void @llvm.riscv.vsuxseg2.nxv4f64.nxv4i32(<vscale x 4 x double> %val,<vscale x 4 x double> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
7822 define void @test_vsuxseg2_mask_nxv4f64_nxv4i32(<vscale x 4 x double> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
7823 ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f64_nxv4i32:
7824 ; CHECK: # %bb.0: # %entry
7825 ; CHECK-NEXT: vmv2r.v v16, v12
7826 ; CHECK-NEXT: vmv4r.v v12, v8
7827 ; CHECK-NEXT: vsetvli zero, a1, e64, m4, ta, ma
7828 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16, v0.t
7831 tail call void @llvm.riscv.vsuxseg2.mask.nxv4f64.nxv4i32(<vscale x 4 x double> %val,<vscale x 4 x double> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
7835 declare void @llvm.riscv.vsuxseg2.nxv1f64.nxv1i8(<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i8>, i32)
7836 declare void @llvm.riscv.vsuxseg2.mask.nxv1f64.nxv1i8(<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
7838 define void @test_vsuxseg2_nxv1f64_nxv1i8(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
7839 ; CHECK-LABEL: test_vsuxseg2_nxv1f64_nxv1i8:
7840 ; CHECK: # %bb.0: # %entry
7841 ; CHECK-NEXT: vmv1r.v v10, v9
7842 ; CHECK-NEXT: vmv1r.v v9, v8
7843 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
7844 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10
7847 tail call void @llvm.riscv.vsuxseg2.nxv1f64.nxv1i8(<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
7851 define void @test_vsuxseg2_mask_nxv1f64_nxv1i8(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
7852 ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f64_nxv1i8:
7853 ; CHECK: # %bb.0: # %entry
7854 ; CHECK-NEXT: vmv1r.v v10, v9
7855 ; CHECK-NEXT: vmv1r.v v9, v8
7856 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
7857 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10, v0.t
7860 tail call void @llvm.riscv.vsuxseg2.mask.nxv1f64.nxv1i8(<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
7864 declare void @llvm.riscv.vsuxseg2.nxv1f64.nxv1i32(<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i32>, i32)
7865 declare void @llvm.riscv.vsuxseg2.mask.nxv1f64.nxv1i32(<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
7867 define void @test_vsuxseg2_nxv1f64_nxv1i32(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
7868 ; CHECK-LABEL: test_vsuxseg2_nxv1f64_nxv1i32:
7869 ; CHECK: # %bb.0: # %entry
7870 ; CHECK-NEXT: vmv1r.v v10, v9
7871 ; CHECK-NEXT: vmv1r.v v9, v8
7872 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
7873 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10
7876 tail call void @llvm.riscv.vsuxseg2.nxv1f64.nxv1i32(<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
7880 define void @test_vsuxseg2_mask_nxv1f64_nxv1i32(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
7881 ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f64_nxv1i32:
7882 ; CHECK: # %bb.0: # %entry
7883 ; CHECK-NEXT: vmv1r.v v10, v9
7884 ; CHECK-NEXT: vmv1r.v v9, v8
7885 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
7886 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10, v0.t
7889 tail call void @llvm.riscv.vsuxseg2.mask.nxv1f64.nxv1i32(<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
7893 declare void @llvm.riscv.vsuxseg2.nxv1f64.nxv1i16(<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i16>, i32)
7894 declare void @llvm.riscv.vsuxseg2.mask.nxv1f64.nxv1i16(<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
7896 define void @test_vsuxseg2_nxv1f64_nxv1i16(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
7897 ; CHECK-LABEL: test_vsuxseg2_nxv1f64_nxv1i16:
7898 ; CHECK: # %bb.0: # %entry
7899 ; CHECK-NEXT: vmv1r.v v10, v9
7900 ; CHECK-NEXT: vmv1r.v v9, v8
7901 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
7902 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10
7905 tail call void @llvm.riscv.vsuxseg2.nxv1f64.nxv1i16(<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
7909 define void @test_vsuxseg2_mask_nxv1f64_nxv1i16(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
7910 ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f64_nxv1i16:
7911 ; CHECK: # %bb.0: # %entry
7912 ; CHECK-NEXT: vmv1r.v v10, v9
7913 ; CHECK-NEXT: vmv1r.v v9, v8
7914 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
7915 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10, v0.t
7918 tail call void @llvm.riscv.vsuxseg2.mask.nxv1f64.nxv1i16(<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
7922 declare void @llvm.riscv.vsuxseg3.nxv1f64.nxv1i8(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i8>, i32)
7923 declare void @llvm.riscv.vsuxseg3.mask.nxv1f64.nxv1i8(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
7925 define void @test_vsuxseg3_nxv1f64_nxv1i8(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
7926 ; CHECK-LABEL: test_vsuxseg3_nxv1f64_nxv1i8:
7927 ; CHECK: # %bb.0: # %entry
7928 ; CHECK-NEXT: vmv1r.v v10, v8
7929 ; CHECK-NEXT: vmv1r.v v11, v8
7930 ; CHECK-NEXT: vmv1r.v v12, v8
7931 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
7932 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9
7935 tail call void @llvm.riscv.vsuxseg3.nxv1f64.nxv1i8(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
7939 define void @test_vsuxseg3_mask_nxv1f64_nxv1i8(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
7940 ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f64_nxv1i8:
7941 ; CHECK: # %bb.0: # %entry
7942 ; CHECK-NEXT: vmv1r.v v10, v8
7943 ; CHECK-NEXT: vmv1r.v v11, v8
7944 ; CHECK-NEXT: vmv1r.v v12, v8
7945 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
7946 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t
7949 tail call void @llvm.riscv.vsuxseg3.mask.nxv1f64.nxv1i8(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
7953 declare void @llvm.riscv.vsuxseg3.nxv1f64.nxv1i32(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i32>, i32)
7954 declare void @llvm.riscv.vsuxseg3.mask.nxv1f64.nxv1i32(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
7956 define void @test_vsuxseg3_nxv1f64_nxv1i32(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
7957 ; CHECK-LABEL: test_vsuxseg3_nxv1f64_nxv1i32:
7958 ; CHECK: # %bb.0: # %entry
7959 ; CHECK-NEXT: vmv1r.v v10, v8
7960 ; CHECK-NEXT: vmv1r.v v11, v8
7961 ; CHECK-NEXT: vmv1r.v v12, v8
7962 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
7963 ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9
7966 tail call void @llvm.riscv.vsuxseg3.nxv1f64.nxv1i32(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
7970 define void @test_vsuxseg3_mask_nxv1f64_nxv1i32(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
7971 ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f64_nxv1i32:
7972 ; CHECK: # %bb.0: # %entry
7973 ; CHECK-NEXT: vmv1r.v v10, v8
7974 ; CHECK-NEXT: vmv1r.v v11, v8
7975 ; CHECK-NEXT: vmv1r.v v12, v8
7976 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
7977 ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t
7980 tail call void @llvm.riscv.vsuxseg3.mask.nxv1f64.nxv1i32(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
7984 declare void @llvm.riscv.vsuxseg3.nxv1f64.nxv1i16(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i16>, i32)
7985 declare void @llvm.riscv.vsuxseg3.mask.nxv1f64.nxv1i16(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
7987 define void @test_vsuxseg3_nxv1f64_nxv1i16(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
7988 ; CHECK-LABEL: test_vsuxseg3_nxv1f64_nxv1i16:
7989 ; CHECK: # %bb.0: # %entry
7990 ; CHECK-NEXT: vmv1r.v v10, v8
7991 ; CHECK-NEXT: vmv1r.v v11, v8
7992 ; CHECK-NEXT: vmv1r.v v12, v8
7993 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
7994 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9
7997 tail call void @llvm.riscv.vsuxseg3.nxv1f64.nxv1i16(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
8001 define void @test_vsuxseg3_mask_nxv1f64_nxv1i16(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
8002 ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f64_nxv1i16:
8003 ; CHECK: # %bb.0: # %entry
8004 ; CHECK-NEXT: vmv1r.v v10, v8
8005 ; CHECK-NEXT: vmv1r.v v11, v8
8006 ; CHECK-NEXT: vmv1r.v v12, v8
8007 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8008 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t
8011 tail call void @llvm.riscv.vsuxseg3.mask.nxv1f64.nxv1i16(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
8015 declare void @llvm.riscv.vsuxseg4.nxv1f64.nxv1i8(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i8>, i32)
8016 declare void @llvm.riscv.vsuxseg4.mask.nxv1f64.nxv1i8(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
8018 define void @test_vsuxseg4_nxv1f64_nxv1i8(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
8019 ; CHECK-LABEL: test_vsuxseg4_nxv1f64_nxv1i8:
8020 ; CHECK: # %bb.0: # %entry
8021 ; CHECK-NEXT: vmv1r.v v10, v8
8022 ; CHECK-NEXT: vmv1r.v v11, v8
8023 ; CHECK-NEXT: vmv1r.v v12, v8
8024 ; CHECK-NEXT: vmv1r.v v13, v8
8025 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8026 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9
8029 tail call void @llvm.riscv.vsuxseg4.nxv1f64.nxv1i8(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
8033 define void @test_vsuxseg4_mask_nxv1f64_nxv1i8(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
8034 ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f64_nxv1i8:
8035 ; CHECK: # %bb.0: # %entry
8036 ; CHECK-NEXT: vmv1r.v v10, v8
8037 ; CHECK-NEXT: vmv1r.v v11, v8
8038 ; CHECK-NEXT: vmv1r.v v12, v8
8039 ; CHECK-NEXT: vmv1r.v v13, v8
8040 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8041 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t
8044 tail call void @llvm.riscv.vsuxseg4.mask.nxv1f64.nxv1i8(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
8048 declare void @llvm.riscv.vsuxseg4.nxv1f64.nxv1i32(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i32>, i32)
8049 declare void @llvm.riscv.vsuxseg4.mask.nxv1f64.nxv1i32(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
8051 define void @test_vsuxseg4_nxv1f64_nxv1i32(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
8052 ; CHECK-LABEL: test_vsuxseg4_nxv1f64_nxv1i32:
8053 ; CHECK: # %bb.0: # %entry
8054 ; CHECK-NEXT: vmv1r.v v10, v8
8055 ; CHECK-NEXT: vmv1r.v v11, v8
8056 ; CHECK-NEXT: vmv1r.v v12, v8
8057 ; CHECK-NEXT: vmv1r.v v13, v8
8058 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8059 ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9
8062 tail call void @llvm.riscv.vsuxseg4.nxv1f64.nxv1i32(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
8066 define void @test_vsuxseg4_mask_nxv1f64_nxv1i32(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
8067 ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f64_nxv1i32:
8068 ; CHECK: # %bb.0: # %entry
8069 ; CHECK-NEXT: vmv1r.v v10, v8
8070 ; CHECK-NEXT: vmv1r.v v11, v8
8071 ; CHECK-NEXT: vmv1r.v v12, v8
8072 ; CHECK-NEXT: vmv1r.v v13, v8
8073 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8074 ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t
8077 tail call void @llvm.riscv.vsuxseg4.mask.nxv1f64.nxv1i32(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
8081 declare void @llvm.riscv.vsuxseg4.nxv1f64.nxv1i16(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i16>, i32)
8082 declare void @llvm.riscv.vsuxseg4.mask.nxv1f64.nxv1i16(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
8084 define void @test_vsuxseg4_nxv1f64_nxv1i16(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
8085 ; CHECK-LABEL: test_vsuxseg4_nxv1f64_nxv1i16:
8086 ; CHECK: # %bb.0: # %entry
8087 ; CHECK-NEXT: vmv1r.v v10, v8
8088 ; CHECK-NEXT: vmv1r.v v11, v8
8089 ; CHECK-NEXT: vmv1r.v v12, v8
8090 ; CHECK-NEXT: vmv1r.v v13, v8
8091 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8092 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9
8095 tail call void @llvm.riscv.vsuxseg4.nxv1f64.nxv1i16(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
8099 define void @test_vsuxseg4_mask_nxv1f64_nxv1i16(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
8100 ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f64_nxv1i16:
8101 ; CHECK: # %bb.0: # %entry
8102 ; CHECK-NEXT: vmv1r.v v10, v8
8103 ; CHECK-NEXT: vmv1r.v v11, v8
8104 ; CHECK-NEXT: vmv1r.v v12, v8
8105 ; CHECK-NEXT: vmv1r.v v13, v8
8106 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8107 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t
8110 tail call void @llvm.riscv.vsuxseg4.mask.nxv1f64.nxv1i16(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
8114 declare void @llvm.riscv.vsuxseg5.nxv1f64.nxv1i8(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i8>, i32)
8115 declare void @llvm.riscv.vsuxseg5.mask.nxv1f64.nxv1i8(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
8117 define void @test_vsuxseg5_nxv1f64_nxv1i8(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
8118 ; CHECK-LABEL: test_vsuxseg5_nxv1f64_nxv1i8:
8119 ; CHECK: # %bb.0: # %entry
8120 ; CHECK-NEXT: vmv1r.v v10, v8
8121 ; CHECK-NEXT: vmv1r.v v11, v8
8122 ; CHECK-NEXT: vmv1r.v v12, v8
8123 ; CHECK-NEXT: vmv1r.v v13, v8
8124 ; CHECK-NEXT: vmv1r.v v14, v8
8125 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8126 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9
8129 tail call void @llvm.riscv.vsuxseg5.nxv1f64.nxv1i8(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
8133 define void @test_vsuxseg5_mask_nxv1f64_nxv1i8(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
8134 ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f64_nxv1i8:
8135 ; CHECK: # %bb.0: # %entry
8136 ; CHECK-NEXT: vmv1r.v v10, v8
8137 ; CHECK-NEXT: vmv1r.v v11, v8
8138 ; CHECK-NEXT: vmv1r.v v12, v8
8139 ; CHECK-NEXT: vmv1r.v v13, v8
8140 ; CHECK-NEXT: vmv1r.v v14, v8
8141 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8142 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t
8145 tail call void @llvm.riscv.vsuxseg5.mask.nxv1f64.nxv1i8(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
8149 declare void @llvm.riscv.vsuxseg5.nxv1f64.nxv1i32(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i32>, i32)
8150 declare void @llvm.riscv.vsuxseg5.mask.nxv1f64.nxv1i32(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
8152 define void @test_vsuxseg5_nxv1f64_nxv1i32(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
8153 ; CHECK-LABEL: test_vsuxseg5_nxv1f64_nxv1i32:
8154 ; CHECK: # %bb.0: # %entry
8155 ; CHECK-NEXT: vmv1r.v v10, v8
8156 ; CHECK-NEXT: vmv1r.v v11, v8
8157 ; CHECK-NEXT: vmv1r.v v12, v8
8158 ; CHECK-NEXT: vmv1r.v v13, v8
8159 ; CHECK-NEXT: vmv1r.v v14, v8
8160 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8161 ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9
8164 tail call void @llvm.riscv.vsuxseg5.nxv1f64.nxv1i32(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
8168 define void @test_vsuxseg5_mask_nxv1f64_nxv1i32(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
8169 ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f64_nxv1i32:
8170 ; CHECK: # %bb.0: # %entry
8171 ; CHECK-NEXT: vmv1r.v v10, v8
8172 ; CHECK-NEXT: vmv1r.v v11, v8
8173 ; CHECK-NEXT: vmv1r.v v12, v8
8174 ; CHECK-NEXT: vmv1r.v v13, v8
8175 ; CHECK-NEXT: vmv1r.v v14, v8
8176 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8177 ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t
8180 tail call void @llvm.riscv.vsuxseg5.mask.nxv1f64.nxv1i32(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
8184 declare void @llvm.riscv.vsuxseg5.nxv1f64.nxv1i16(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i16>, i32)
8185 declare void @llvm.riscv.vsuxseg5.mask.nxv1f64.nxv1i16(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
8187 define void @test_vsuxseg5_nxv1f64_nxv1i16(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
8188 ; CHECK-LABEL: test_vsuxseg5_nxv1f64_nxv1i16:
8189 ; CHECK: # %bb.0: # %entry
8190 ; CHECK-NEXT: vmv1r.v v10, v8
8191 ; CHECK-NEXT: vmv1r.v v11, v8
8192 ; CHECK-NEXT: vmv1r.v v12, v8
8193 ; CHECK-NEXT: vmv1r.v v13, v8
8194 ; CHECK-NEXT: vmv1r.v v14, v8
8195 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8196 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9
8199 tail call void @llvm.riscv.vsuxseg5.nxv1f64.nxv1i16(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
8203 define void @test_vsuxseg5_mask_nxv1f64_nxv1i16(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
8204 ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f64_nxv1i16:
8205 ; CHECK: # %bb.0: # %entry
8206 ; CHECK-NEXT: vmv1r.v v10, v8
8207 ; CHECK-NEXT: vmv1r.v v11, v8
8208 ; CHECK-NEXT: vmv1r.v v12, v8
8209 ; CHECK-NEXT: vmv1r.v v13, v8
8210 ; CHECK-NEXT: vmv1r.v v14, v8
8211 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8212 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t
8215 tail call void @llvm.riscv.vsuxseg5.mask.nxv1f64.nxv1i16(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
8219 declare void @llvm.riscv.vsuxseg6.nxv1f64.nxv1i8(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i8>, i32)
8220 declare void @llvm.riscv.vsuxseg6.mask.nxv1f64.nxv1i8(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
8222 define void @test_vsuxseg6_nxv1f64_nxv1i8(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
8223 ; CHECK-LABEL: test_vsuxseg6_nxv1f64_nxv1i8:
8224 ; CHECK: # %bb.0: # %entry
8225 ; CHECK-NEXT: vmv1r.v v10, v8
8226 ; CHECK-NEXT: vmv1r.v v11, v8
8227 ; CHECK-NEXT: vmv1r.v v12, v8
8228 ; CHECK-NEXT: vmv1r.v v13, v8
8229 ; CHECK-NEXT: vmv1r.v v14, v8
8230 ; CHECK-NEXT: vmv1r.v v15, v8
8231 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8232 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9
8235 tail call void @llvm.riscv.vsuxseg6.nxv1f64.nxv1i8(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
8239 define void @test_vsuxseg6_mask_nxv1f64_nxv1i8(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
8240 ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f64_nxv1i8:
8241 ; CHECK: # %bb.0: # %entry
8242 ; CHECK-NEXT: vmv1r.v v10, v8
8243 ; CHECK-NEXT: vmv1r.v v11, v8
8244 ; CHECK-NEXT: vmv1r.v v12, v8
8245 ; CHECK-NEXT: vmv1r.v v13, v8
8246 ; CHECK-NEXT: vmv1r.v v14, v8
8247 ; CHECK-NEXT: vmv1r.v v15, v8
8248 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8249 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t
8252 tail call void @llvm.riscv.vsuxseg6.mask.nxv1f64.nxv1i8(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
8256 declare void @llvm.riscv.vsuxseg6.nxv1f64.nxv1i32(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i32>, i32)
8257 declare void @llvm.riscv.vsuxseg6.mask.nxv1f64.nxv1i32(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
8259 define void @test_vsuxseg6_nxv1f64_nxv1i32(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
8260 ; CHECK-LABEL: test_vsuxseg6_nxv1f64_nxv1i32:
8261 ; CHECK: # %bb.0: # %entry
8262 ; CHECK-NEXT: vmv1r.v v10, v8
8263 ; CHECK-NEXT: vmv1r.v v11, v8
8264 ; CHECK-NEXT: vmv1r.v v12, v8
8265 ; CHECK-NEXT: vmv1r.v v13, v8
8266 ; CHECK-NEXT: vmv1r.v v14, v8
8267 ; CHECK-NEXT: vmv1r.v v15, v8
8268 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8269 ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9
8272 tail call void @llvm.riscv.vsuxseg6.nxv1f64.nxv1i32(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
8276 define void @test_vsuxseg6_mask_nxv1f64_nxv1i32(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
8277 ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f64_nxv1i32:
8278 ; CHECK: # %bb.0: # %entry
8279 ; CHECK-NEXT: vmv1r.v v10, v8
8280 ; CHECK-NEXT: vmv1r.v v11, v8
8281 ; CHECK-NEXT: vmv1r.v v12, v8
8282 ; CHECK-NEXT: vmv1r.v v13, v8
8283 ; CHECK-NEXT: vmv1r.v v14, v8
8284 ; CHECK-NEXT: vmv1r.v v15, v8
8285 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8286 ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t
8289 tail call void @llvm.riscv.vsuxseg6.mask.nxv1f64.nxv1i32(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
8293 declare void @llvm.riscv.vsuxseg6.nxv1f64.nxv1i16(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i16>, i32)
8294 declare void @llvm.riscv.vsuxseg6.mask.nxv1f64.nxv1i16(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
8296 define void @test_vsuxseg6_nxv1f64_nxv1i16(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
8297 ; CHECK-LABEL: test_vsuxseg6_nxv1f64_nxv1i16:
8298 ; CHECK: # %bb.0: # %entry
8299 ; CHECK-NEXT: vmv1r.v v10, v8
8300 ; CHECK-NEXT: vmv1r.v v11, v8
8301 ; CHECK-NEXT: vmv1r.v v12, v8
8302 ; CHECK-NEXT: vmv1r.v v13, v8
8303 ; CHECK-NEXT: vmv1r.v v14, v8
8304 ; CHECK-NEXT: vmv1r.v v15, v8
8305 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8306 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9
8309 tail call void @llvm.riscv.vsuxseg6.nxv1f64.nxv1i16(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
8313 define void @test_vsuxseg6_mask_nxv1f64_nxv1i16(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
8314 ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f64_nxv1i16:
8315 ; CHECK: # %bb.0: # %entry
8316 ; CHECK-NEXT: vmv1r.v v10, v8
8317 ; CHECK-NEXT: vmv1r.v v11, v8
8318 ; CHECK-NEXT: vmv1r.v v12, v8
8319 ; CHECK-NEXT: vmv1r.v v13, v8
8320 ; CHECK-NEXT: vmv1r.v v14, v8
8321 ; CHECK-NEXT: vmv1r.v v15, v8
8322 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8323 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t
8326 tail call void @llvm.riscv.vsuxseg6.mask.nxv1f64.nxv1i16(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
8330 declare void @llvm.riscv.vsuxseg7.nxv1f64.nxv1i8(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i8>, i32)
8331 declare void @llvm.riscv.vsuxseg7.mask.nxv1f64.nxv1i8(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
8333 define void @test_vsuxseg7_nxv1f64_nxv1i8(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
8334 ; CHECK-LABEL: test_vsuxseg7_nxv1f64_nxv1i8:
8335 ; CHECK: # %bb.0: # %entry
8336 ; CHECK-NEXT: vmv1r.v v10, v8
8337 ; CHECK-NEXT: vmv1r.v v11, v8
8338 ; CHECK-NEXT: vmv1r.v v12, v8
8339 ; CHECK-NEXT: vmv1r.v v13, v8
8340 ; CHECK-NEXT: vmv1r.v v14, v8
8341 ; CHECK-NEXT: vmv1r.v v15, v8
8342 ; CHECK-NEXT: vmv1r.v v16, v8
8343 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8344 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9
8347 tail call void @llvm.riscv.vsuxseg7.nxv1f64.nxv1i8(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
8351 define void @test_vsuxseg7_mask_nxv1f64_nxv1i8(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
8352 ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f64_nxv1i8:
8353 ; CHECK: # %bb.0: # %entry
8354 ; CHECK-NEXT: vmv1r.v v10, v8
8355 ; CHECK-NEXT: vmv1r.v v11, v8
8356 ; CHECK-NEXT: vmv1r.v v12, v8
8357 ; CHECK-NEXT: vmv1r.v v13, v8
8358 ; CHECK-NEXT: vmv1r.v v14, v8
8359 ; CHECK-NEXT: vmv1r.v v15, v8
8360 ; CHECK-NEXT: vmv1r.v v16, v8
8361 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8362 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t
8365 tail call void @llvm.riscv.vsuxseg7.mask.nxv1f64.nxv1i8(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
8369 declare void @llvm.riscv.vsuxseg7.nxv1f64.nxv1i32(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i32>, i32)
8370 declare void @llvm.riscv.vsuxseg7.mask.nxv1f64.nxv1i32(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
8372 define void @test_vsuxseg7_nxv1f64_nxv1i32(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
8373 ; CHECK-LABEL: test_vsuxseg7_nxv1f64_nxv1i32:
8374 ; CHECK: # %bb.0: # %entry
8375 ; CHECK-NEXT: vmv1r.v v10, v8
8376 ; CHECK-NEXT: vmv1r.v v11, v8
8377 ; CHECK-NEXT: vmv1r.v v12, v8
8378 ; CHECK-NEXT: vmv1r.v v13, v8
8379 ; CHECK-NEXT: vmv1r.v v14, v8
8380 ; CHECK-NEXT: vmv1r.v v15, v8
8381 ; CHECK-NEXT: vmv1r.v v16, v8
8382 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8383 ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9
8386 tail call void @llvm.riscv.vsuxseg7.nxv1f64.nxv1i32(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
8390 define void @test_vsuxseg7_mask_nxv1f64_nxv1i32(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
8391 ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f64_nxv1i32:
8392 ; CHECK: # %bb.0: # %entry
8393 ; CHECK-NEXT: vmv1r.v v10, v8
8394 ; CHECK-NEXT: vmv1r.v v11, v8
8395 ; CHECK-NEXT: vmv1r.v v12, v8
8396 ; CHECK-NEXT: vmv1r.v v13, v8
8397 ; CHECK-NEXT: vmv1r.v v14, v8
8398 ; CHECK-NEXT: vmv1r.v v15, v8
8399 ; CHECK-NEXT: vmv1r.v v16, v8
8400 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8401 ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t
8404 tail call void @llvm.riscv.vsuxseg7.mask.nxv1f64.nxv1i32(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
8408 declare void @llvm.riscv.vsuxseg7.nxv1f64.nxv1i16(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i16>, i32)
8409 declare void @llvm.riscv.vsuxseg7.mask.nxv1f64.nxv1i16(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
8411 define void @test_vsuxseg7_nxv1f64_nxv1i16(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
8412 ; CHECK-LABEL: test_vsuxseg7_nxv1f64_nxv1i16:
8413 ; CHECK: # %bb.0: # %entry
8414 ; CHECK-NEXT: vmv1r.v v10, v8
8415 ; CHECK-NEXT: vmv1r.v v11, v8
8416 ; CHECK-NEXT: vmv1r.v v12, v8
8417 ; CHECK-NEXT: vmv1r.v v13, v8
8418 ; CHECK-NEXT: vmv1r.v v14, v8
8419 ; CHECK-NEXT: vmv1r.v v15, v8
8420 ; CHECK-NEXT: vmv1r.v v16, v8
8421 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8422 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9
8425 tail call void @llvm.riscv.vsuxseg7.nxv1f64.nxv1i16(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
8429 define void @test_vsuxseg7_mask_nxv1f64_nxv1i16(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
8430 ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f64_nxv1i16:
8431 ; CHECK: # %bb.0: # %entry
8432 ; CHECK-NEXT: vmv1r.v v10, v8
8433 ; CHECK-NEXT: vmv1r.v v11, v8
8434 ; CHECK-NEXT: vmv1r.v v12, v8
8435 ; CHECK-NEXT: vmv1r.v v13, v8
8436 ; CHECK-NEXT: vmv1r.v v14, v8
8437 ; CHECK-NEXT: vmv1r.v v15, v8
8438 ; CHECK-NEXT: vmv1r.v v16, v8
8439 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8440 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t
8443 tail call void @llvm.riscv.vsuxseg7.mask.nxv1f64.nxv1i16(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
8447 declare void @llvm.riscv.vsuxseg8.nxv1f64.nxv1i8(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i8>, i32)
8448 declare void @llvm.riscv.vsuxseg8.mask.nxv1f64.nxv1i8(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
8450 define void @test_vsuxseg8_nxv1f64_nxv1i8(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
8451 ; CHECK-LABEL: test_vsuxseg8_nxv1f64_nxv1i8:
8452 ; CHECK: # %bb.0: # %entry
8453 ; CHECK-NEXT: vmv1r.v v10, v8
8454 ; CHECK-NEXT: vmv1r.v v11, v8
8455 ; CHECK-NEXT: vmv1r.v v12, v8
8456 ; CHECK-NEXT: vmv1r.v v13, v8
8457 ; CHECK-NEXT: vmv1r.v v14, v8
8458 ; CHECK-NEXT: vmv1r.v v15, v8
8459 ; CHECK-NEXT: vmv1r.v v16, v8
8460 ; CHECK-NEXT: vmv1r.v v17, v8
8461 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8462 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9
8465 tail call void @llvm.riscv.vsuxseg8.nxv1f64.nxv1i8(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
8469 define void @test_vsuxseg8_mask_nxv1f64_nxv1i8(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
8470 ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f64_nxv1i8:
8471 ; CHECK: # %bb.0: # %entry
8472 ; CHECK-NEXT: vmv1r.v v10, v8
8473 ; CHECK-NEXT: vmv1r.v v11, v8
8474 ; CHECK-NEXT: vmv1r.v v12, v8
8475 ; CHECK-NEXT: vmv1r.v v13, v8
8476 ; CHECK-NEXT: vmv1r.v v14, v8
8477 ; CHECK-NEXT: vmv1r.v v15, v8
8478 ; CHECK-NEXT: vmv1r.v v16, v8
8479 ; CHECK-NEXT: vmv1r.v v17, v8
8480 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8481 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t
8484 tail call void @llvm.riscv.vsuxseg8.mask.nxv1f64.nxv1i8(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
8488 declare void @llvm.riscv.vsuxseg8.nxv1f64.nxv1i32(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i32>, i32)
8489 declare void @llvm.riscv.vsuxseg8.mask.nxv1f64.nxv1i32(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
8491 define void @test_vsuxseg8_nxv1f64_nxv1i32(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
8492 ; CHECK-LABEL: test_vsuxseg8_nxv1f64_nxv1i32:
8493 ; CHECK: # %bb.0: # %entry
8494 ; CHECK-NEXT: vmv1r.v v10, v8
8495 ; CHECK-NEXT: vmv1r.v v11, v8
8496 ; CHECK-NEXT: vmv1r.v v12, v8
8497 ; CHECK-NEXT: vmv1r.v v13, v8
8498 ; CHECK-NEXT: vmv1r.v v14, v8
8499 ; CHECK-NEXT: vmv1r.v v15, v8
8500 ; CHECK-NEXT: vmv1r.v v16, v8
8501 ; CHECK-NEXT: vmv1r.v v17, v8
8502 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8503 ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9
8506 tail call void @llvm.riscv.vsuxseg8.nxv1f64.nxv1i32(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
8510 define void @test_vsuxseg8_mask_nxv1f64_nxv1i32(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
8511 ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f64_nxv1i32:
8512 ; CHECK: # %bb.0: # %entry
8513 ; CHECK-NEXT: vmv1r.v v10, v8
8514 ; CHECK-NEXT: vmv1r.v v11, v8
8515 ; CHECK-NEXT: vmv1r.v v12, v8
8516 ; CHECK-NEXT: vmv1r.v v13, v8
8517 ; CHECK-NEXT: vmv1r.v v14, v8
8518 ; CHECK-NEXT: vmv1r.v v15, v8
8519 ; CHECK-NEXT: vmv1r.v v16, v8
8520 ; CHECK-NEXT: vmv1r.v v17, v8
8521 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8522 ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t
8525 tail call void @llvm.riscv.vsuxseg8.mask.nxv1f64.nxv1i32(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
8529 declare void @llvm.riscv.vsuxseg8.nxv1f64.nxv1i16(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i16>, i32)
8530 declare void @llvm.riscv.vsuxseg8.mask.nxv1f64.nxv1i16(<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>,<vscale x 1 x double>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
8532 define void @test_vsuxseg8_nxv1f64_nxv1i16(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
8533 ; CHECK-LABEL: test_vsuxseg8_nxv1f64_nxv1i16:
8534 ; CHECK: # %bb.0: # %entry
8535 ; CHECK-NEXT: vmv1r.v v10, v8
8536 ; CHECK-NEXT: vmv1r.v v11, v8
8537 ; CHECK-NEXT: vmv1r.v v12, v8
8538 ; CHECK-NEXT: vmv1r.v v13, v8
8539 ; CHECK-NEXT: vmv1r.v v14, v8
8540 ; CHECK-NEXT: vmv1r.v v15, v8
8541 ; CHECK-NEXT: vmv1r.v v16, v8
8542 ; CHECK-NEXT: vmv1r.v v17, v8
8543 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8544 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9
8547 tail call void @llvm.riscv.vsuxseg8.nxv1f64.nxv1i16(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
8551 define void @test_vsuxseg8_mask_nxv1f64_nxv1i16(<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
8552 ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f64_nxv1i16:
8553 ; CHECK: # %bb.0: # %entry
8554 ; CHECK-NEXT: vmv1r.v v10, v8
8555 ; CHECK-NEXT: vmv1r.v v11, v8
8556 ; CHECK-NEXT: vmv1r.v v12, v8
8557 ; CHECK-NEXT: vmv1r.v v13, v8
8558 ; CHECK-NEXT: vmv1r.v v14, v8
8559 ; CHECK-NEXT: vmv1r.v v15, v8
8560 ; CHECK-NEXT: vmv1r.v v16, v8
8561 ; CHECK-NEXT: vmv1r.v v17, v8
8562 ; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, ma
8563 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t
8566 tail call void @llvm.riscv.vsuxseg8.mask.nxv1f64.nxv1i16(<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val,<vscale x 1 x double> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
8570 declare void @llvm.riscv.vsuxseg2.nxv2f32.nxv2i32(<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i32>, i32)
8571 declare void @llvm.riscv.vsuxseg2.mask.nxv2f32.nxv2i32(<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
8573 define void @test_vsuxseg2_nxv2f32_nxv2i32(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
8574 ; CHECK-LABEL: test_vsuxseg2_nxv2f32_nxv2i32:
8575 ; CHECK: # %bb.0: # %entry
8576 ; CHECK-NEXT: vmv1r.v v10, v9
8577 ; CHECK-NEXT: vmv1r.v v9, v8
8578 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8579 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10
8582 tail call void @llvm.riscv.vsuxseg2.nxv2f32.nxv2i32(<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
8586 define void @test_vsuxseg2_mask_nxv2f32_nxv2i32(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
8587 ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f32_nxv2i32:
8588 ; CHECK: # %bb.0: # %entry
8589 ; CHECK-NEXT: vmv1r.v v10, v9
8590 ; CHECK-NEXT: vmv1r.v v9, v8
8591 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8592 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10, v0.t
8595 tail call void @llvm.riscv.vsuxseg2.mask.nxv2f32.nxv2i32(<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
8599 declare void @llvm.riscv.vsuxseg2.nxv2f32.nxv2i8(<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i8>, i32)
8600 declare void @llvm.riscv.vsuxseg2.mask.nxv2f32.nxv2i8(<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
8602 define void @test_vsuxseg2_nxv2f32_nxv2i8(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
8603 ; CHECK-LABEL: test_vsuxseg2_nxv2f32_nxv2i8:
8604 ; CHECK: # %bb.0: # %entry
8605 ; CHECK-NEXT: vmv1r.v v10, v9
8606 ; CHECK-NEXT: vmv1r.v v9, v8
8607 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8608 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10
8611 tail call void @llvm.riscv.vsuxseg2.nxv2f32.nxv2i8(<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
8615 define void @test_vsuxseg2_mask_nxv2f32_nxv2i8(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
8616 ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f32_nxv2i8:
8617 ; CHECK: # %bb.0: # %entry
8618 ; CHECK-NEXT: vmv1r.v v10, v9
8619 ; CHECK-NEXT: vmv1r.v v9, v8
8620 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8621 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10, v0.t
8624 tail call void @llvm.riscv.vsuxseg2.mask.nxv2f32.nxv2i8(<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
8628 declare void @llvm.riscv.vsuxseg2.nxv2f32.nxv2i16(<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i16>, i32)
8629 declare void @llvm.riscv.vsuxseg2.mask.nxv2f32.nxv2i16(<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
8631 define void @test_vsuxseg2_nxv2f32_nxv2i16(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
8632 ; CHECK-LABEL: test_vsuxseg2_nxv2f32_nxv2i16:
8633 ; CHECK: # %bb.0: # %entry
8634 ; CHECK-NEXT: vmv1r.v v10, v9
8635 ; CHECK-NEXT: vmv1r.v v9, v8
8636 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8637 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10
8640 tail call void @llvm.riscv.vsuxseg2.nxv2f32.nxv2i16(<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
8644 define void @test_vsuxseg2_mask_nxv2f32_nxv2i16(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
8645 ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f32_nxv2i16:
8646 ; CHECK: # %bb.0: # %entry
8647 ; CHECK-NEXT: vmv1r.v v10, v9
8648 ; CHECK-NEXT: vmv1r.v v9, v8
8649 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8650 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10, v0.t
8653 tail call void @llvm.riscv.vsuxseg2.mask.nxv2f32.nxv2i16(<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
8657 declare void @llvm.riscv.vsuxseg3.nxv2f32.nxv2i32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i32>, i32)
8658 declare void @llvm.riscv.vsuxseg3.mask.nxv2f32.nxv2i32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
8660 define void @test_vsuxseg3_nxv2f32_nxv2i32(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
8661 ; CHECK-LABEL: test_vsuxseg3_nxv2f32_nxv2i32:
8662 ; CHECK: # %bb.0: # %entry
8663 ; CHECK-NEXT: vmv1r.v v10, v8
8664 ; CHECK-NEXT: vmv1r.v v11, v8
8665 ; CHECK-NEXT: vmv1r.v v12, v8
8666 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8667 ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9
8670 tail call void @llvm.riscv.vsuxseg3.nxv2f32.nxv2i32(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
8674 define void @test_vsuxseg3_mask_nxv2f32_nxv2i32(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
8675 ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f32_nxv2i32:
8676 ; CHECK: # %bb.0: # %entry
8677 ; CHECK-NEXT: vmv1r.v v10, v8
8678 ; CHECK-NEXT: vmv1r.v v11, v8
8679 ; CHECK-NEXT: vmv1r.v v12, v8
8680 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8681 ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t
8684 tail call void @llvm.riscv.vsuxseg3.mask.nxv2f32.nxv2i32(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
8688 declare void @llvm.riscv.vsuxseg3.nxv2f32.nxv2i8(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i8>, i32)
8689 declare void @llvm.riscv.vsuxseg3.mask.nxv2f32.nxv2i8(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
8691 define void @test_vsuxseg3_nxv2f32_nxv2i8(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
8692 ; CHECK-LABEL: test_vsuxseg3_nxv2f32_nxv2i8:
8693 ; CHECK: # %bb.0: # %entry
8694 ; CHECK-NEXT: vmv1r.v v10, v8
8695 ; CHECK-NEXT: vmv1r.v v11, v8
8696 ; CHECK-NEXT: vmv1r.v v12, v8
8697 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8698 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9
8701 tail call void @llvm.riscv.vsuxseg3.nxv2f32.nxv2i8(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
8705 define void @test_vsuxseg3_mask_nxv2f32_nxv2i8(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
8706 ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f32_nxv2i8:
8707 ; CHECK: # %bb.0: # %entry
8708 ; CHECK-NEXT: vmv1r.v v10, v8
8709 ; CHECK-NEXT: vmv1r.v v11, v8
8710 ; CHECK-NEXT: vmv1r.v v12, v8
8711 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8712 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t
8715 tail call void @llvm.riscv.vsuxseg3.mask.nxv2f32.nxv2i8(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
8719 declare void @llvm.riscv.vsuxseg3.nxv2f32.nxv2i16(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i16>, i32)
8720 declare void @llvm.riscv.vsuxseg3.mask.nxv2f32.nxv2i16(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
8722 define void @test_vsuxseg3_nxv2f32_nxv2i16(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
8723 ; CHECK-LABEL: test_vsuxseg3_nxv2f32_nxv2i16:
8724 ; CHECK: # %bb.0: # %entry
8725 ; CHECK-NEXT: vmv1r.v v10, v8
8726 ; CHECK-NEXT: vmv1r.v v11, v8
8727 ; CHECK-NEXT: vmv1r.v v12, v8
8728 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8729 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9
8732 tail call void @llvm.riscv.vsuxseg3.nxv2f32.nxv2i16(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
8736 define void @test_vsuxseg3_mask_nxv2f32_nxv2i16(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
8737 ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f32_nxv2i16:
8738 ; CHECK: # %bb.0: # %entry
8739 ; CHECK-NEXT: vmv1r.v v10, v8
8740 ; CHECK-NEXT: vmv1r.v v11, v8
8741 ; CHECK-NEXT: vmv1r.v v12, v8
8742 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8743 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t
8746 tail call void @llvm.riscv.vsuxseg3.mask.nxv2f32.nxv2i16(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
8750 declare void @llvm.riscv.vsuxseg4.nxv2f32.nxv2i32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i32>, i32)
8751 declare void @llvm.riscv.vsuxseg4.mask.nxv2f32.nxv2i32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
8753 define void @test_vsuxseg4_nxv2f32_nxv2i32(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
8754 ; CHECK-LABEL: test_vsuxseg4_nxv2f32_nxv2i32:
8755 ; CHECK: # %bb.0: # %entry
8756 ; CHECK-NEXT: vmv1r.v v10, v8
8757 ; CHECK-NEXT: vmv1r.v v11, v8
8758 ; CHECK-NEXT: vmv1r.v v12, v8
8759 ; CHECK-NEXT: vmv1r.v v13, v8
8760 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8761 ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9
8764 tail call void @llvm.riscv.vsuxseg4.nxv2f32.nxv2i32(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
8768 define void @test_vsuxseg4_mask_nxv2f32_nxv2i32(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
8769 ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f32_nxv2i32:
8770 ; CHECK: # %bb.0: # %entry
8771 ; CHECK-NEXT: vmv1r.v v10, v8
8772 ; CHECK-NEXT: vmv1r.v v11, v8
8773 ; CHECK-NEXT: vmv1r.v v12, v8
8774 ; CHECK-NEXT: vmv1r.v v13, v8
8775 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8776 ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t
8779 tail call void @llvm.riscv.vsuxseg4.mask.nxv2f32.nxv2i32(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
8783 declare void @llvm.riscv.vsuxseg4.nxv2f32.nxv2i8(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i8>, i32)
8784 declare void @llvm.riscv.vsuxseg4.mask.nxv2f32.nxv2i8(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
8786 define void @test_vsuxseg4_nxv2f32_nxv2i8(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
8787 ; CHECK-LABEL: test_vsuxseg4_nxv2f32_nxv2i8:
8788 ; CHECK: # %bb.0: # %entry
8789 ; CHECK-NEXT: vmv1r.v v10, v8
8790 ; CHECK-NEXT: vmv1r.v v11, v8
8791 ; CHECK-NEXT: vmv1r.v v12, v8
8792 ; CHECK-NEXT: vmv1r.v v13, v8
8793 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8794 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9
8797 tail call void @llvm.riscv.vsuxseg4.nxv2f32.nxv2i8(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
8801 define void @test_vsuxseg4_mask_nxv2f32_nxv2i8(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
8802 ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f32_nxv2i8:
8803 ; CHECK: # %bb.0: # %entry
8804 ; CHECK-NEXT: vmv1r.v v10, v8
8805 ; CHECK-NEXT: vmv1r.v v11, v8
8806 ; CHECK-NEXT: vmv1r.v v12, v8
8807 ; CHECK-NEXT: vmv1r.v v13, v8
8808 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8809 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t
8812 tail call void @llvm.riscv.vsuxseg4.mask.nxv2f32.nxv2i8(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
8816 declare void @llvm.riscv.vsuxseg4.nxv2f32.nxv2i16(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i16>, i32)
8817 declare void @llvm.riscv.vsuxseg4.mask.nxv2f32.nxv2i16(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
8819 define void @test_vsuxseg4_nxv2f32_nxv2i16(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
8820 ; CHECK-LABEL: test_vsuxseg4_nxv2f32_nxv2i16:
8821 ; CHECK: # %bb.0: # %entry
8822 ; CHECK-NEXT: vmv1r.v v10, v8
8823 ; CHECK-NEXT: vmv1r.v v11, v8
8824 ; CHECK-NEXT: vmv1r.v v12, v8
8825 ; CHECK-NEXT: vmv1r.v v13, v8
8826 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8827 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9
8830 tail call void @llvm.riscv.vsuxseg4.nxv2f32.nxv2i16(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
8834 define void @test_vsuxseg4_mask_nxv2f32_nxv2i16(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
8835 ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f32_nxv2i16:
8836 ; CHECK: # %bb.0: # %entry
8837 ; CHECK-NEXT: vmv1r.v v10, v8
8838 ; CHECK-NEXT: vmv1r.v v11, v8
8839 ; CHECK-NEXT: vmv1r.v v12, v8
8840 ; CHECK-NEXT: vmv1r.v v13, v8
8841 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8842 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t
8845 tail call void @llvm.riscv.vsuxseg4.mask.nxv2f32.nxv2i16(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
8849 declare void @llvm.riscv.vsuxseg5.nxv2f32.nxv2i32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i32>, i32)
8850 declare void @llvm.riscv.vsuxseg5.mask.nxv2f32.nxv2i32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
8852 define void @test_vsuxseg5_nxv2f32_nxv2i32(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
8853 ; CHECK-LABEL: test_vsuxseg5_nxv2f32_nxv2i32:
8854 ; CHECK: # %bb.0: # %entry
8855 ; CHECK-NEXT: vmv1r.v v10, v8
8856 ; CHECK-NEXT: vmv1r.v v11, v8
8857 ; CHECK-NEXT: vmv1r.v v12, v8
8858 ; CHECK-NEXT: vmv1r.v v13, v8
8859 ; CHECK-NEXT: vmv1r.v v14, v8
8860 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8861 ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9
8864 tail call void @llvm.riscv.vsuxseg5.nxv2f32.nxv2i32(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
8868 define void @test_vsuxseg5_mask_nxv2f32_nxv2i32(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
8869 ; CHECK-LABEL: test_vsuxseg5_mask_nxv2f32_nxv2i32:
8870 ; CHECK: # %bb.0: # %entry
8871 ; CHECK-NEXT: vmv1r.v v10, v8
8872 ; CHECK-NEXT: vmv1r.v v11, v8
8873 ; CHECK-NEXT: vmv1r.v v12, v8
8874 ; CHECK-NEXT: vmv1r.v v13, v8
8875 ; CHECK-NEXT: vmv1r.v v14, v8
8876 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8877 ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t
8880 tail call void @llvm.riscv.vsuxseg5.mask.nxv2f32.nxv2i32(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
8884 declare void @llvm.riscv.vsuxseg5.nxv2f32.nxv2i8(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i8>, i32)
8885 declare void @llvm.riscv.vsuxseg5.mask.nxv2f32.nxv2i8(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
8887 define void @test_vsuxseg5_nxv2f32_nxv2i8(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
8888 ; CHECK-LABEL: test_vsuxseg5_nxv2f32_nxv2i8:
8889 ; CHECK: # %bb.0: # %entry
8890 ; CHECK-NEXT: vmv1r.v v10, v8
8891 ; CHECK-NEXT: vmv1r.v v11, v8
8892 ; CHECK-NEXT: vmv1r.v v12, v8
8893 ; CHECK-NEXT: vmv1r.v v13, v8
8894 ; CHECK-NEXT: vmv1r.v v14, v8
8895 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8896 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9
8899 tail call void @llvm.riscv.vsuxseg5.nxv2f32.nxv2i8(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
8903 define void @test_vsuxseg5_mask_nxv2f32_nxv2i8(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
8904 ; CHECK-LABEL: test_vsuxseg5_mask_nxv2f32_nxv2i8:
8905 ; CHECK: # %bb.0: # %entry
8906 ; CHECK-NEXT: vmv1r.v v10, v8
8907 ; CHECK-NEXT: vmv1r.v v11, v8
8908 ; CHECK-NEXT: vmv1r.v v12, v8
8909 ; CHECK-NEXT: vmv1r.v v13, v8
8910 ; CHECK-NEXT: vmv1r.v v14, v8
8911 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8912 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t
8915 tail call void @llvm.riscv.vsuxseg5.mask.nxv2f32.nxv2i8(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
8919 declare void @llvm.riscv.vsuxseg5.nxv2f32.nxv2i16(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i16>, i32)
8920 declare void @llvm.riscv.vsuxseg5.mask.nxv2f32.nxv2i16(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
8922 define void @test_vsuxseg5_nxv2f32_nxv2i16(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
8923 ; CHECK-LABEL: test_vsuxseg5_nxv2f32_nxv2i16:
8924 ; CHECK: # %bb.0: # %entry
8925 ; CHECK-NEXT: vmv1r.v v10, v8
8926 ; CHECK-NEXT: vmv1r.v v11, v8
8927 ; CHECK-NEXT: vmv1r.v v12, v8
8928 ; CHECK-NEXT: vmv1r.v v13, v8
8929 ; CHECK-NEXT: vmv1r.v v14, v8
8930 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8931 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9
8934 tail call void @llvm.riscv.vsuxseg5.nxv2f32.nxv2i16(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
8938 define void @test_vsuxseg5_mask_nxv2f32_nxv2i16(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
8939 ; CHECK-LABEL: test_vsuxseg5_mask_nxv2f32_nxv2i16:
8940 ; CHECK: # %bb.0: # %entry
8941 ; CHECK-NEXT: vmv1r.v v10, v8
8942 ; CHECK-NEXT: vmv1r.v v11, v8
8943 ; CHECK-NEXT: vmv1r.v v12, v8
8944 ; CHECK-NEXT: vmv1r.v v13, v8
8945 ; CHECK-NEXT: vmv1r.v v14, v8
8946 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8947 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t
8950 tail call void @llvm.riscv.vsuxseg5.mask.nxv2f32.nxv2i16(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
8954 declare void @llvm.riscv.vsuxseg6.nxv2f32.nxv2i32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i32>, i32)
8955 declare void @llvm.riscv.vsuxseg6.mask.nxv2f32.nxv2i32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
8957 define void @test_vsuxseg6_nxv2f32_nxv2i32(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
8958 ; CHECK-LABEL: test_vsuxseg6_nxv2f32_nxv2i32:
8959 ; CHECK: # %bb.0: # %entry
8960 ; CHECK-NEXT: vmv1r.v v10, v8
8961 ; CHECK-NEXT: vmv1r.v v11, v8
8962 ; CHECK-NEXT: vmv1r.v v12, v8
8963 ; CHECK-NEXT: vmv1r.v v13, v8
8964 ; CHECK-NEXT: vmv1r.v v14, v8
8965 ; CHECK-NEXT: vmv1r.v v15, v8
8966 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8967 ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9
8970 tail call void @llvm.riscv.vsuxseg6.nxv2f32.nxv2i32(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
8974 define void @test_vsuxseg6_mask_nxv2f32_nxv2i32(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
8975 ; CHECK-LABEL: test_vsuxseg6_mask_nxv2f32_nxv2i32:
8976 ; CHECK: # %bb.0: # %entry
8977 ; CHECK-NEXT: vmv1r.v v10, v8
8978 ; CHECK-NEXT: vmv1r.v v11, v8
8979 ; CHECK-NEXT: vmv1r.v v12, v8
8980 ; CHECK-NEXT: vmv1r.v v13, v8
8981 ; CHECK-NEXT: vmv1r.v v14, v8
8982 ; CHECK-NEXT: vmv1r.v v15, v8
8983 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
8984 ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t
8987 tail call void @llvm.riscv.vsuxseg6.mask.nxv2f32.nxv2i32(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
8991 declare void @llvm.riscv.vsuxseg6.nxv2f32.nxv2i8(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i8>, i32)
8992 declare void @llvm.riscv.vsuxseg6.mask.nxv2f32.nxv2i8(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
8994 define void @test_vsuxseg6_nxv2f32_nxv2i8(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
8995 ; CHECK-LABEL: test_vsuxseg6_nxv2f32_nxv2i8:
8996 ; CHECK: # %bb.0: # %entry
8997 ; CHECK-NEXT: vmv1r.v v10, v8
8998 ; CHECK-NEXT: vmv1r.v v11, v8
8999 ; CHECK-NEXT: vmv1r.v v12, v8
9000 ; CHECK-NEXT: vmv1r.v v13, v8
9001 ; CHECK-NEXT: vmv1r.v v14, v8
9002 ; CHECK-NEXT: vmv1r.v v15, v8
9003 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
9004 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9
9007 tail call void @llvm.riscv.vsuxseg6.nxv2f32.nxv2i8(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
9011 define void @test_vsuxseg6_mask_nxv2f32_nxv2i8(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
9012 ; CHECK-LABEL: test_vsuxseg6_mask_nxv2f32_nxv2i8:
9013 ; CHECK: # %bb.0: # %entry
9014 ; CHECK-NEXT: vmv1r.v v10, v8
9015 ; CHECK-NEXT: vmv1r.v v11, v8
9016 ; CHECK-NEXT: vmv1r.v v12, v8
9017 ; CHECK-NEXT: vmv1r.v v13, v8
9018 ; CHECK-NEXT: vmv1r.v v14, v8
9019 ; CHECK-NEXT: vmv1r.v v15, v8
9020 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
9021 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t
9024 tail call void @llvm.riscv.vsuxseg6.mask.nxv2f32.nxv2i8(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
9028 declare void @llvm.riscv.vsuxseg6.nxv2f32.nxv2i16(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i16>, i32)
9029 declare void @llvm.riscv.vsuxseg6.mask.nxv2f32.nxv2i16(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
9031 define void @test_vsuxseg6_nxv2f32_nxv2i16(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
9032 ; CHECK-LABEL: test_vsuxseg6_nxv2f32_nxv2i16:
9033 ; CHECK: # %bb.0: # %entry
9034 ; CHECK-NEXT: vmv1r.v v10, v8
9035 ; CHECK-NEXT: vmv1r.v v11, v8
9036 ; CHECK-NEXT: vmv1r.v v12, v8
9037 ; CHECK-NEXT: vmv1r.v v13, v8
9038 ; CHECK-NEXT: vmv1r.v v14, v8
9039 ; CHECK-NEXT: vmv1r.v v15, v8
9040 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
9041 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9
9044 tail call void @llvm.riscv.vsuxseg6.nxv2f32.nxv2i16(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
9048 define void @test_vsuxseg6_mask_nxv2f32_nxv2i16(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
9049 ; CHECK-LABEL: test_vsuxseg6_mask_nxv2f32_nxv2i16:
9050 ; CHECK: # %bb.0: # %entry
9051 ; CHECK-NEXT: vmv1r.v v10, v8
9052 ; CHECK-NEXT: vmv1r.v v11, v8
9053 ; CHECK-NEXT: vmv1r.v v12, v8
9054 ; CHECK-NEXT: vmv1r.v v13, v8
9055 ; CHECK-NEXT: vmv1r.v v14, v8
9056 ; CHECK-NEXT: vmv1r.v v15, v8
9057 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
9058 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t
9061 tail call void @llvm.riscv.vsuxseg6.mask.nxv2f32.nxv2i16(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
9065 declare void @llvm.riscv.vsuxseg7.nxv2f32.nxv2i32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i32>, i32)
9066 declare void @llvm.riscv.vsuxseg7.mask.nxv2f32.nxv2i32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
9068 define void @test_vsuxseg7_nxv2f32_nxv2i32(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
9069 ; CHECK-LABEL: test_vsuxseg7_nxv2f32_nxv2i32:
9070 ; CHECK: # %bb.0: # %entry
9071 ; CHECK-NEXT: vmv1r.v v10, v8
9072 ; CHECK-NEXT: vmv1r.v v11, v8
9073 ; CHECK-NEXT: vmv1r.v v12, v8
9074 ; CHECK-NEXT: vmv1r.v v13, v8
9075 ; CHECK-NEXT: vmv1r.v v14, v8
9076 ; CHECK-NEXT: vmv1r.v v15, v8
9077 ; CHECK-NEXT: vmv1r.v v16, v8
9078 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
9079 ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9
9082 tail call void @llvm.riscv.vsuxseg7.nxv2f32.nxv2i32(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
9086 define void @test_vsuxseg7_mask_nxv2f32_nxv2i32(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
9087 ; CHECK-LABEL: test_vsuxseg7_mask_nxv2f32_nxv2i32:
9088 ; CHECK: # %bb.0: # %entry
9089 ; CHECK-NEXT: vmv1r.v v10, v8
9090 ; CHECK-NEXT: vmv1r.v v11, v8
9091 ; CHECK-NEXT: vmv1r.v v12, v8
9092 ; CHECK-NEXT: vmv1r.v v13, v8
9093 ; CHECK-NEXT: vmv1r.v v14, v8
9094 ; CHECK-NEXT: vmv1r.v v15, v8
9095 ; CHECK-NEXT: vmv1r.v v16, v8
9096 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
9097 ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t
9100 tail call void @llvm.riscv.vsuxseg7.mask.nxv2f32.nxv2i32(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
9104 declare void @llvm.riscv.vsuxseg7.nxv2f32.nxv2i8(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i8>, i32)
9105 declare void @llvm.riscv.vsuxseg7.mask.nxv2f32.nxv2i8(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
9107 define void @test_vsuxseg7_nxv2f32_nxv2i8(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
9108 ; CHECK-LABEL: test_vsuxseg7_nxv2f32_nxv2i8:
9109 ; CHECK: # %bb.0: # %entry
9110 ; CHECK-NEXT: vmv1r.v v10, v8
9111 ; CHECK-NEXT: vmv1r.v v11, v8
9112 ; CHECK-NEXT: vmv1r.v v12, v8
9113 ; CHECK-NEXT: vmv1r.v v13, v8
9114 ; CHECK-NEXT: vmv1r.v v14, v8
9115 ; CHECK-NEXT: vmv1r.v v15, v8
9116 ; CHECK-NEXT: vmv1r.v v16, v8
9117 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
9118 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9
9121 tail call void @llvm.riscv.vsuxseg7.nxv2f32.nxv2i8(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
9125 define void @test_vsuxseg7_mask_nxv2f32_nxv2i8(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
9126 ; CHECK-LABEL: test_vsuxseg7_mask_nxv2f32_nxv2i8:
9127 ; CHECK: # %bb.0: # %entry
9128 ; CHECK-NEXT: vmv1r.v v10, v8
9129 ; CHECK-NEXT: vmv1r.v v11, v8
9130 ; CHECK-NEXT: vmv1r.v v12, v8
9131 ; CHECK-NEXT: vmv1r.v v13, v8
9132 ; CHECK-NEXT: vmv1r.v v14, v8
9133 ; CHECK-NEXT: vmv1r.v v15, v8
9134 ; CHECK-NEXT: vmv1r.v v16, v8
9135 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
9136 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t
9139 tail call void @llvm.riscv.vsuxseg7.mask.nxv2f32.nxv2i8(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
9143 declare void @llvm.riscv.vsuxseg7.nxv2f32.nxv2i16(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i16>, i32)
9144 declare void @llvm.riscv.vsuxseg7.mask.nxv2f32.nxv2i16(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
9146 define void @test_vsuxseg7_nxv2f32_nxv2i16(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
9147 ; CHECK-LABEL: test_vsuxseg7_nxv2f32_nxv2i16:
9148 ; CHECK: # %bb.0: # %entry
9149 ; CHECK-NEXT: vmv1r.v v10, v8
9150 ; CHECK-NEXT: vmv1r.v v11, v8
9151 ; CHECK-NEXT: vmv1r.v v12, v8
9152 ; CHECK-NEXT: vmv1r.v v13, v8
9153 ; CHECK-NEXT: vmv1r.v v14, v8
9154 ; CHECK-NEXT: vmv1r.v v15, v8
9155 ; CHECK-NEXT: vmv1r.v v16, v8
9156 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
9157 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9
9160 tail call void @llvm.riscv.vsuxseg7.nxv2f32.nxv2i16(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
9164 define void @test_vsuxseg7_mask_nxv2f32_nxv2i16(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
9165 ; CHECK-LABEL: test_vsuxseg7_mask_nxv2f32_nxv2i16:
9166 ; CHECK: # %bb.0: # %entry
9167 ; CHECK-NEXT: vmv1r.v v10, v8
9168 ; CHECK-NEXT: vmv1r.v v11, v8
9169 ; CHECK-NEXT: vmv1r.v v12, v8
9170 ; CHECK-NEXT: vmv1r.v v13, v8
9171 ; CHECK-NEXT: vmv1r.v v14, v8
9172 ; CHECK-NEXT: vmv1r.v v15, v8
9173 ; CHECK-NEXT: vmv1r.v v16, v8
9174 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
9175 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t
9178 tail call void @llvm.riscv.vsuxseg7.mask.nxv2f32.nxv2i16(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
9182 declare void @llvm.riscv.vsuxseg8.nxv2f32.nxv2i32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i32>, i32)
9183 declare void @llvm.riscv.vsuxseg8.mask.nxv2f32.nxv2i32(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
9185 define void @test_vsuxseg8_nxv2f32_nxv2i32(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
9186 ; CHECK-LABEL: test_vsuxseg8_nxv2f32_nxv2i32:
9187 ; CHECK: # %bb.0: # %entry
9188 ; CHECK-NEXT: vmv1r.v v10, v8
9189 ; CHECK-NEXT: vmv1r.v v11, v8
9190 ; CHECK-NEXT: vmv1r.v v12, v8
9191 ; CHECK-NEXT: vmv1r.v v13, v8
9192 ; CHECK-NEXT: vmv1r.v v14, v8
9193 ; CHECK-NEXT: vmv1r.v v15, v8
9194 ; CHECK-NEXT: vmv1r.v v16, v8
9195 ; CHECK-NEXT: vmv1r.v v17, v8
9196 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
9197 ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9
9200 tail call void @llvm.riscv.vsuxseg8.nxv2f32.nxv2i32(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
9204 define void @test_vsuxseg8_mask_nxv2f32_nxv2i32(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
9205 ; CHECK-LABEL: test_vsuxseg8_mask_nxv2f32_nxv2i32:
9206 ; CHECK: # %bb.0: # %entry
9207 ; CHECK-NEXT: vmv1r.v v10, v8
9208 ; CHECK-NEXT: vmv1r.v v11, v8
9209 ; CHECK-NEXT: vmv1r.v v12, v8
9210 ; CHECK-NEXT: vmv1r.v v13, v8
9211 ; CHECK-NEXT: vmv1r.v v14, v8
9212 ; CHECK-NEXT: vmv1r.v v15, v8
9213 ; CHECK-NEXT: vmv1r.v v16, v8
9214 ; CHECK-NEXT: vmv1r.v v17, v8
9215 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
9216 ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t
9219 tail call void @llvm.riscv.vsuxseg8.mask.nxv2f32.nxv2i32(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
9223 declare void @llvm.riscv.vsuxseg8.nxv2f32.nxv2i8(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i8>, i32)
9224 declare void @llvm.riscv.vsuxseg8.mask.nxv2f32.nxv2i8(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
9226 define void @test_vsuxseg8_nxv2f32_nxv2i8(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
9227 ; CHECK-LABEL: test_vsuxseg8_nxv2f32_nxv2i8:
9228 ; CHECK: # %bb.0: # %entry
9229 ; CHECK-NEXT: vmv1r.v v10, v8
9230 ; CHECK-NEXT: vmv1r.v v11, v8
9231 ; CHECK-NEXT: vmv1r.v v12, v8
9232 ; CHECK-NEXT: vmv1r.v v13, v8
9233 ; CHECK-NEXT: vmv1r.v v14, v8
9234 ; CHECK-NEXT: vmv1r.v v15, v8
9235 ; CHECK-NEXT: vmv1r.v v16, v8
9236 ; CHECK-NEXT: vmv1r.v v17, v8
9237 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
9238 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9
9241 tail call void @llvm.riscv.vsuxseg8.nxv2f32.nxv2i8(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
9245 define void @test_vsuxseg8_mask_nxv2f32_nxv2i8(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
9246 ; CHECK-LABEL: test_vsuxseg8_mask_nxv2f32_nxv2i8:
9247 ; CHECK: # %bb.0: # %entry
9248 ; CHECK-NEXT: vmv1r.v v10, v8
9249 ; CHECK-NEXT: vmv1r.v v11, v8
9250 ; CHECK-NEXT: vmv1r.v v12, v8
9251 ; CHECK-NEXT: vmv1r.v v13, v8
9252 ; CHECK-NEXT: vmv1r.v v14, v8
9253 ; CHECK-NEXT: vmv1r.v v15, v8
9254 ; CHECK-NEXT: vmv1r.v v16, v8
9255 ; CHECK-NEXT: vmv1r.v v17, v8
9256 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
9257 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t
9260 tail call void @llvm.riscv.vsuxseg8.mask.nxv2f32.nxv2i8(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
9264 declare void @llvm.riscv.vsuxseg8.nxv2f32.nxv2i16(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i16>, i32)
9265 declare void @llvm.riscv.vsuxseg8.mask.nxv2f32.nxv2i16(<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>,<vscale x 2 x float>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
9267 define void @test_vsuxseg8_nxv2f32_nxv2i16(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
9268 ; CHECK-LABEL: test_vsuxseg8_nxv2f32_nxv2i16:
9269 ; CHECK: # %bb.0: # %entry
9270 ; CHECK-NEXT: vmv1r.v v10, v8
9271 ; CHECK-NEXT: vmv1r.v v11, v8
9272 ; CHECK-NEXT: vmv1r.v v12, v8
9273 ; CHECK-NEXT: vmv1r.v v13, v8
9274 ; CHECK-NEXT: vmv1r.v v14, v8
9275 ; CHECK-NEXT: vmv1r.v v15, v8
9276 ; CHECK-NEXT: vmv1r.v v16, v8
9277 ; CHECK-NEXT: vmv1r.v v17, v8
9278 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
9279 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9
9282 tail call void @llvm.riscv.vsuxseg8.nxv2f32.nxv2i16(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
9286 define void @test_vsuxseg8_mask_nxv2f32_nxv2i16(<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
9287 ; CHECK-LABEL: test_vsuxseg8_mask_nxv2f32_nxv2i16:
9288 ; CHECK: # %bb.0: # %entry
9289 ; CHECK-NEXT: vmv1r.v v10, v8
9290 ; CHECK-NEXT: vmv1r.v v11, v8
9291 ; CHECK-NEXT: vmv1r.v v12, v8
9292 ; CHECK-NEXT: vmv1r.v v13, v8
9293 ; CHECK-NEXT: vmv1r.v v14, v8
9294 ; CHECK-NEXT: vmv1r.v v15, v8
9295 ; CHECK-NEXT: vmv1r.v v16, v8
9296 ; CHECK-NEXT: vmv1r.v v17, v8
9297 ; CHECK-NEXT: vsetvli zero, a1, e32, m1, ta, ma
9298 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t
9301 tail call void @llvm.riscv.vsuxseg8.mask.nxv2f32.nxv2i16(<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val,<vscale x 2 x float> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
9305 declare void @llvm.riscv.vsuxseg2.nxv1f16.nxv1i8(<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i8>, i32)
9306 declare void @llvm.riscv.vsuxseg2.mask.nxv1f16.nxv1i8(<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
9308 define void @test_vsuxseg2_nxv1f16_nxv1i8(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
9309 ; CHECK-LABEL: test_vsuxseg2_nxv1f16_nxv1i8:
9310 ; CHECK: # %bb.0: # %entry
9311 ; CHECK-NEXT: vmv1r.v v10, v9
9312 ; CHECK-NEXT: vmv1r.v v9, v8
9313 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9314 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10
9317 tail call void @llvm.riscv.vsuxseg2.nxv1f16.nxv1i8(<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
9321 define void @test_vsuxseg2_mask_nxv1f16_nxv1i8(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
9322 ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f16_nxv1i8:
9323 ; CHECK: # %bb.0: # %entry
9324 ; CHECK-NEXT: vmv1r.v v10, v9
9325 ; CHECK-NEXT: vmv1r.v v9, v8
9326 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9327 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10, v0.t
9330 tail call void @llvm.riscv.vsuxseg2.mask.nxv1f16.nxv1i8(<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
9334 declare void @llvm.riscv.vsuxseg2.nxv1f16.nxv1i32(<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i32>, i32)
9335 declare void @llvm.riscv.vsuxseg2.mask.nxv1f16.nxv1i32(<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
9337 define void @test_vsuxseg2_nxv1f16_nxv1i32(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
9338 ; CHECK-LABEL: test_vsuxseg2_nxv1f16_nxv1i32:
9339 ; CHECK: # %bb.0: # %entry
9340 ; CHECK-NEXT: vmv1r.v v10, v9
9341 ; CHECK-NEXT: vmv1r.v v9, v8
9342 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9343 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10
9346 tail call void @llvm.riscv.vsuxseg2.nxv1f16.nxv1i32(<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
9350 define void @test_vsuxseg2_mask_nxv1f16_nxv1i32(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
9351 ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f16_nxv1i32:
9352 ; CHECK: # %bb.0: # %entry
9353 ; CHECK-NEXT: vmv1r.v v10, v9
9354 ; CHECK-NEXT: vmv1r.v v9, v8
9355 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9356 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10, v0.t
9359 tail call void @llvm.riscv.vsuxseg2.mask.nxv1f16.nxv1i32(<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
9363 declare void @llvm.riscv.vsuxseg2.nxv1f16.nxv1i16(<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i16>, i32)
9364 declare void @llvm.riscv.vsuxseg2.mask.nxv1f16.nxv1i16(<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
9366 define void @test_vsuxseg2_nxv1f16_nxv1i16(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
9367 ; CHECK-LABEL: test_vsuxseg2_nxv1f16_nxv1i16:
9368 ; CHECK: # %bb.0: # %entry
9369 ; CHECK-NEXT: vmv1r.v v10, v9
9370 ; CHECK-NEXT: vmv1r.v v9, v8
9371 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9372 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10
9375 tail call void @llvm.riscv.vsuxseg2.nxv1f16.nxv1i16(<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
9379 define void @test_vsuxseg2_mask_nxv1f16_nxv1i16(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
9380 ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f16_nxv1i16:
9381 ; CHECK: # %bb.0: # %entry
9382 ; CHECK-NEXT: vmv1r.v v10, v9
9383 ; CHECK-NEXT: vmv1r.v v9, v8
9384 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9385 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10, v0.t
9388 tail call void @llvm.riscv.vsuxseg2.mask.nxv1f16.nxv1i16(<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
9392 declare void @llvm.riscv.vsuxseg3.nxv1f16.nxv1i8(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i8>, i32)
9393 declare void @llvm.riscv.vsuxseg3.mask.nxv1f16.nxv1i8(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
9395 define void @test_vsuxseg3_nxv1f16_nxv1i8(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
9396 ; CHECK-LABEL: test_vsuxseg3_nxv1f16_nxv1i8:
9397 ; CHECK: # %bb.0: # %entry
9398 ; CHECK-NEXT: vmv1r.v v10, v8
9399 ; CHECK-NEXT: vmv1r.v v11, v8
9400 ; CHECK-NEXT: vmv1r.v v12, v8
9401 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9402 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9
9405 tail call void @llvm.riscv.vsuxseg3.nxv1f16.nxv1i8(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
9409 define void @test_vsuxseg3_mask_nxv1f16_nxv1i8(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
9410 ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f16_nxv1i8:
9411 ; CHECK: # %bb.0: # %entry
9412 ; CHECK-NEXT: vmv1r.v v10, v8
9413 ; CHECK-NEXT: vmv1r.v v11, v8
9414 ; CHECK-NEXT: vmv1r.v v12, v8
9415 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9416 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t
9419 tail call void @llvm.riscv.vsuxseg3.mask.nxv1f16.nxv1i8(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
9423 declare void @llvm.riscv.vsuxseg3.nxv1f16.nxv1i32(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i32>, i32)
9424 declare void @llvm.riscv.vsuxseg3.mask.nxv1f16.nxv1i32(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
9426 define void @test_vsuxseg3_nxv1f16_nxv1i32(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
9427 ; CHECK-LABEL: test_vsuxseg3_nxv1f16_nxv1i32:
9428 ; CHECK: # %bb.0: # %entry
9429 ; CHECK-NEXT: vmv1r.v v10, v8
9430 ; CHECK-NEXT: vmv1r.v v11, v8
9431 ; CHECK-NEXT: vmv1r.v v12, v8
9432 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9433 ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9
9436 tail call void @llvm.riscv.vsuxseg3.nxv1f16.nxv1i32(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
9440 define void @test_vsuxseg3_mask_nxv1f16_nxv1i32(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
9441 ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f16_nxv1i32:
9442 ; CHECK: # %bb.0: # %entry
9443 ; CHECK-NEXT: vmv1r.v v10, v8
9444 ; CHECK-NEXT: vmv1r.v v11, v8
9445 ; CHECK-NEXT: vmv1r.v v12, v8
9446 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9447 ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t
9450 tail call void @llvm.riscv.vsuxseg3.mask.nxv1f16.nxv1i32(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
9454 declare void @llvm.riscv.vsuxseg3.nxv1f16.nxv1i16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i16>, i32)
9455 declare void @llvm.riscv.vsuxseg3.mask.nxv1f16.nxv1i16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
9457 define void @test_vsuxseg3_nxv1f16_nxv1i16(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
9458 ; CHECK-LABEL: test_vsuxseg3_nxv1f16_nxv1i16:
9459 ; CHECK: # %bb.0: # %entry
9460 ; CHECK-NEXT: vmv1r.v v10, v8
9461 ; CHECK-NEXT: vmv1r.v v11, v8
9462 ; CHECK-NEXT: vmv1r.v v12, v8
9463 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9464 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9
9467 tail call void @llvm.riscv.vsuxseg3.nxv1f16.nxv1i16(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
9471 define void @test_vsuxseg3_mask_nxv1f16_nxv1i16(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
9472 ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f16_nxv1i16:
9473 ; CHECK: # %bb.0: # %entry
9474 ; CHECK-NEXT: vmv1r.v v10, v8
9475 ; CHECK-NEXT: vmv1r.v v11, v8
9476 ; CHECK-NEXT: vmv1r.v v12, v8
9477 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9478 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t
9481 tail call void @llvm.riscv.vsuxseg3.mask.nxv1f16.nxv1i16(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
9485 declare void @llvm.riscv.vsuxseg4.nxv1f16.nxv1i8(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i8>, i32)
9486 declare void @llvm.riscv.vsuxseg4.mask.nxv1f16.nxv1i8(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
9488 define void @test_vsuxseg4_nxv1f16_nxv1i8(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
9489 ; CHECK-LABEL: test_vsuxseg4_nxv1f16_nxv1i8:
9490 ; CHECK: # %bb.0: # %entry
9491 ; CHECK-NEXT: vmv1r.v v10, v8
9492 ; CHECK-NEXT: vmv1r.v v11, v8
9493 ; CHECK-NEXT: vmv1r.v v12, v8
9494 ; CHECK-NEXT: vmv1r.v v13, v8
9495 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9496 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9
9499 tail call void @llvm.riscv.vsuxseg4.nxv1f16.nxv1i8(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
9503 define void @test_vsuxseg4_mask_nxv1f16_nxv1i8(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
9504 ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f16_nxv1i8:
9505 ; CHECK: # %bb.0: # %entry
9506 ; CHECK-NEXT: vmv1r.v v10, v8
9507 ; CHECK-NEXT: vmv1r.v v11, v8
9508 ; CHECK-NEXT: vmv1r.v v12, v8
9509 ; CHECK-NEXT: vmv1r.v v13, v8
9510 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9511 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t
9514 tail call void @llvm.riscv.vsuxseg4.mask.nxv1f16.nxv1i8(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
9518 declare void @llvm.riscv.vsuxseg4.nxv1f16.nxv1i32(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i32>, i32)
9519 declare void @llvm.riscv.vsuxseg4.mask.nxv1f16.nxv1i32(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
9521 define void @test_vsuxseg4_nxv1f16_nxv1i32(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
9522 ; CHECK-LABEL: test_vsuxseg4_nxv1f16_nxv1i32:
9523 ; CHECK: # %bb.0: # %entry
9524 ; CHECK-NEXT: vmv1r.v v10, v8
9525 ; CHECK-NEXT: vmv1r.v v11, v8
9526 ; CHECK-NEXT: vmv1r.v v12, v8
9527 ; CHECK-NEXT: vmv1r.v v13, v8
9528 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9529 ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9
9532 tail call void @llvm.riscv.vsuxseg4.nxv1f16.nxv1i32(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
9536 define void @test_vsuxseg4_mask_nxv1f16_nxv1i32(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
9537 ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f16_nxv1i32:
9538 ; CHECK: # %bb.0: # %entry
9539 ; CHECK-NEXT: vmv1r.v v10, v8
9540 ; CHECK-NEXT: vmv1r.v v11, v8
9541 ; CHECK-NEXT: vmv1r.v v12, v8
9542 ; CHECK-NEXT: vmv1r.v v13, v8
9543 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9544 ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t
9547 tail call void @llvm.riscv.vsuxseg4.mask.nxv1f16.nxv1i32(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
9551 declare void @llvm.riscv.vsuxseg4.nxv1f16.nxv1i16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i16>, i32)
9552 declare void @llvm.riscv.vsuxseg4.mask.nxv1f16.nxv1i16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
9554 define void @test_vsuxseg4_nxv1f16_nxv1i16(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
9555 ; CHECK-LABEL: test_vsuxseg4_nxv1f16_nxv1i16:
9556 ; CHECK: # %bb.0: # %entry
9557 ; CHECK-NEXT: vmv1r.v v10, v8
9558 ; CHECK-NEXT: vmv1r.v v11, v8
9559 ; CHECK-NEXT: vmv1r.v v12, v8
9560 ; CHECK-NEXT: vmv1r.v v13, v8
9561 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9562 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9
9565 tail call void @llvm.riscv.vsuxseg4.nxv1f16.nxv1i16(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
9569 define void @test_vsuxseg4_mask_nxv1f16_nxv1i16(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
9570 ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f16_nxv1i16:
9571 ; CHECK: # %bb.0: # %entry
9572 ; CHECK-NEXT: vmv1r.v v10, v8
9573 ; CHECK-NEXT: vmv1r.v v11, v8
9574 ; CHECK-NEXT: vmv1r.v v12, v8
9575 ; CHECK-NEXT: vmv1r.v v13, v8
9576 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9577 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t
9580 tail call void @llvm.riscv.vsuxseg4.mask.nxv1f16.nxv1i16(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
9584 declare void @llvm.riscv.vsuxseg5.nxv1f16.nxv1i8(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i8>, i32)
9585 declare void @llvm.riscv.vsuxseg5.mask.nxv1f16.nxv1i8(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
9587 define void @test_vsuxseg5_nxv1f16_nxv1i8(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
9588 ; CHECK-LABEL: test_vsuxseg5_nxv1f16_nxv1i8:
9589 ; CHECK: # %bb.0: # %entry
9590 ; CHECK-NEXT: vmv1r.v v10, v8
9591 ; CHECK-NEXT: vmv1r.v v11, v8
9592 ; CHECK-NEXT: vmv1r.v v12, v8
9593 ; CHECK-NEXT: vmv1r.v v13, v8
9594 ; CHECK-NEXT: vmv1r.v v14, v8
9595 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9596 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9
9599 tail call void @llvm.riscv.vsuxseg5.nxv1f16.nxv1i8(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
9603 define void @test_vsuxseg5_mask_nxv1f16_nxv1i8(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
9604 ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f16_nxv1i8:
9605 ; CHECK: # %bb.0: # %entry
9606 ; CHECK-NEXT: vmv1r.v v10, v8
9607 ; CHECK-NEXT: vmv1r.v v11, v8
9608 ; CHECK-NEXT: vmv1r.v v12, v8
9609 ; CHECK-NEXT: vmv1r.v v13, v8
9610 ; CHECK-NEXT: vmv1r.v v14, v8
9611 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9612 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t
9615 tail call void @llvm.riscv.vsuxseg5.mask.nxv1f16.nxv1i8(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
9619 declare void @llvm.riscv.vsuxseg5.nxv1f16.nxv1i32(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i32>, i32)
9620 declare void @llvm.riscv.vsuxseg5.mask.nxv1f16.nxv1i32(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
9622 define void @test_vsuxseg5_nxv1f16_nxv1i32(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
9623 ; CHECK-LABEL: test_vsuxseg5_nxv1f16_nxv1i32:
9624 ; CHECK: # %bb.0: # %entry
9625 ; CHECK-NEXT: vmv1r.v v10, v8
9626 ; CHECK-NEXT: vmv1r.v v11, v8
9627 ; CHECK-NEXT: vmv1r.v v12, v8
9628 ; CHECK-NEXT: vmv1r.v v13, v8
9629 ; CHECK-NEXT: vmv1r.v v14, v8
9630 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9631 ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9
9634 tail call void @llvm.riscv.vsuxseg5.nxv1f16.nxv1i32(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
9638 define void @test_vsuxseg5_mask_nxv1f16_nxv1i32(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
9639 ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f16_nxv1i32:
9640 ; CHECK: # %bb.0: # %entry
9641 ; CHECK-NEXT: vmv1r.v v10, v8
9642 ; CHECK-NEXT: vmv1r.v v11, v8
9643 ; CHECK-NEXT: vmv1r.v v12, v8
9644 ; CHECK-NEXT: vmv1r.v v13, v8
9645 ; CHECK-NEXT: vmv1r.v v14, v8
9646 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9647 ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t
9650 tail call void @llvm.riscv.vsuxseg5.mask.nxv1f16.nxv1i32(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
9654 declare void @llvm.riscv.vsuxseg5.nxv1f16.nxv1i16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i16>, i32)
9655 declare void @llvm.riscv.vsuxseg5.mask.nxv1f16.nxv1i16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
9657 define void @test_vsuxseg5_nxv1f16_nxv1i16(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
9658 ; CHECK-LABEL: test_vsuxseg5_nxv1f16_nxv1i16:
9659 ; CHECK: # %bb.0: # %entry
9660 ; CHECK-NEXT: vmv1r.v v10, v8
9661 ; CHECK-NEXT: vmv1r.v v11, v8
9662 ; CHECK-NEXT: vmv1r.v v12, v8
9663 ; CHECK-NEXT: vmv1r.v v13, v8
9664 ; CHECK-NEXT: vmv1r.v v14, v8
9665 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9666 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9
9669 tail call void @llvm.riscv.vsuxseg5.nxv1f16.nxv1i16(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
9673 define void @test_vsuxseg5_mask_nxv1f16_nxv1i16(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
9674 ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f16_nxv1i16:
9675 ; CHECK: # %bb.0: # %entry
9676 ; CHECK-NEXT: vmv1r.v v10, v8
9677 ; CHECK-NEXT: vmv1r.v v11, v8
9678 ; CHECK-NEXT: vmv1r.v v12, v8
9679 ; CHECK-NEXT: vmv1r.v v13, v8
9680 ; CHECK-NEXT: vmv1r.v v14, v8
9681 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9682 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t
9685 tail call void @llvm.riscv.vsuxseg5.mask.nxv1f16.nxv1i16(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
9689 declare void @llvm.riscv.vsuxseg6.nxv1f16.nxv1i8(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i8>, i32)
9690 declare void @llvm.riscv.vsuxseg6.mask.nxv1f16.nxv1i8(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
9692 define void @test_vsuxseg6_nxv1f16_nxv1i8(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
9693 ; CHECK-LABEL: test_vsuxseg6_nxv1f16_nxv1i8:
9694 ; CHECK: # %bb.0: # %entry
9695 ; CHECK-NEXT: vmv1r.v v10, v8
9696 ; CHECK-NEXT: vmv1r.v v11, v8
9697 ; CHECK-NEXT: vmv1r.v v12, v8
9698 ; CHECK-NEXT: vmv1r.v v13, v8
9699 ; CHECK-NEXT: vmv1r.v v14, v8
9700 ; CHECK-NEXT: vmv1r.v v15, v8
9701 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9702 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9
9705 tail call void @llvm.riscv.vsuxseg6.nxv1f16.nxv1i8(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
9709 define void @test_vsuxseg6_mask_nxv1f16_nxv1i8(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
9710 ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f16_nxv1i8:
9711 ; CHECK: # %bb.0: # %entry
9712 ; CHECK-NEXT: vmv1r.v v10, v8
9713 ; CHECK-NEXT: vmv1r.v v11, v8
9714 ; CHECK-NEXT: vmv1r.v v12, v8
9715 ; CHECK-NEXT: vmv1r.v v13, v8
9716 ; CHECK-NEXT: vmv1r.v v14, v8
9717 ; CHECK-NEXT: vmv1r.v v15, v8
9718 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9719 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t
9722 tail call void @llvm.riscv.vsuxseg6.mask.nxv1f16.nxv1i8(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
9726 declare void @llvm.riscv.vsuxseg6.nxv1f16.nxv1i32(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i32>, i32)
9727 declare void @llvm.riscv.vsuxseg6.mask.nxv1f16.nxv1i32(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
9729 define void @test_vsuxseg6_nxv1f16_nxv1i32(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
9730 ; CHECK-LABEL: test_vsuxseg6_nxv1f16_nxv1i32:
9731 ; CHECK: # %bb.0: # %entry
9732 ; CHECK-NEXT: vmv1r.v v10, v8
9733 ; CHECK-NEXT: vmv1r.v v11, v8
9734 ; CHECK-NEXT: vmv1r.v v12, v8
9735 ; CHECK-NEXT: vmv1r.v v13, v8
9736 ; CHECK-NEXT: vmv1r.v v14, v8
9737 ; CHECK-NEXT: vmv1r.v v15, v8
9738 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9739 ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9
9742 tail call void @llvm.riscv.vsuxseg6.nxv1f16.nxv1i32(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
9746 define void @test_vsuxseg6_mask_nxv1f16_nxv1i32(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
9747 ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f16_nxv1i32:
9748 ; CHECK: # %bb.0: # %entry
9749 ; CHECK-NEXT: vmv1r.v v10, v8
9750 ; CHECK-NEXT: vmv1r.v v11, v8
9751 ; CHECK-NEXT: vmv1r.v v12, v8
9752 ; CHECK-NEXT: vmv1r.v v13, v8
9753 ; CHECK-NEXT: vmv1r.v v14, v8
9754 ; CHECK-NEXT: vmv1r.v v15, v8
9755 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9756 ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t
9759 tail call void @llvm.riscv.vsuxseg6.mask.nxv1f16.nxv1i32(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
9763 declare void @llvm.riscv.vsuxseg6.nxv1f16.nxv1i16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i16>, i32)
9764 declare void @llvm.riscv.vsuxseg6.mask.nxv1f16.nxv1i16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
9766 define void @test_vsuxseg6_nxv1f16_nxv1i16(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
9767 ; CHECK-LABEL: test_vsuxseg6_nxv1f16_nxv1i16:
9768 ; CHECK: # %bb.0: # %entry
9769 ; CHECK-NEXT: vmv1r.v v10, v8
9770 ; CHECK-NEXT: vmv1r.v v11, v8
9771 ; CHECK-NEXT: vmv1r.v v12, v8
9772 ; CHECK-NEXT: vmv1r.v v13, v8
9773 ; CHECK-NEXT: vmv1r.v v14, v8
9774 ; CHECK-NEXT: vmv1r.v v15, v8
9775 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9776 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9
9779 tail call void @llvm.riscv.vsuxseg6.nxv1f16.nxv1i16(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
9783 define void @test_vsuxseg6_mask_nxv1f16_nxv1i16(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
9784 ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f16_nxv1i16:
9785 ; CHECK: # %bb.0: # %entry
9786 ; CHECK-NEXT: vmv1r.v v10, v8
9787 ; CHECK-NEXT: vmv1r.v v11, v8
9788 ; CHECK-NEXT: vmv1r.v v12, v8
9789 ; CHECK-NEXT: vmv1r.v v13, v8
9790 ; CHECK-NEXT: vmv1r.v v14, v8
9791 ; CHECK-NEXT: vmv1r.v v15, v8
9792 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9793 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t
9796 tail call void @llvm.riscv.vsuxseg6.mask.nxv1f16.nxv1i16(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
9800 declare void @llvm.riscv.vsuxseg7.nxv1f16.nxv1i8(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i8>, i32)
9801 declare void @llvm.riscv.vsuxseg7.mask.nxv1f16.nxv1i8(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
9803 define void @test_vsuxseg7_nxv1f16_nxv1i8(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
9804 ; CHECK-LABEL: test_vsuxseg7_nxv1f16_nxv1i8:
9805 ; CHECK: # %bb.0: # %entry
9806 ; CHECK-NEXT: vmv1r.v v10, v8
9807 ; CHECK-NEXT: vmv1r.v v11, v8
9808 ; CHECK-NEXT: vmv1r.v v12, v8
9809 ; CHECK-NEXT: vmv1r.v v13, v8
9810 ; CHECK-NEXT: vmv1r.v v14, v8
9811 ; CHECK-NEXT: vmv1r.v v15, v8
9812 ; CHECK-NEXT: vmv1r.v v16, v8
9813 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9814 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9
9817 tail call void @llvm.riscv.vsuxseg7.nxv1f16.nxv1i8(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
9821 define void @test_vsuxseg7_mask_nxv1f16_nxv1i8(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
9822 ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f16_nxv1i8:
9823 ; CHECK: # %bb.0: # %entry
9824 ; CHECK-NEXT: vmv1r.v v10, v8
9825 ; CHECK-NEXT: vmv1r.v v11, v8
9826 ; CHECK-NEXT: vmv1r.v v12, v8
9827 ; CHECK-NEXT: vmv1r.v v13, v8
9828 ; CHECK-NEXT: vmv1r.v v14, v8
9829 ; CHECK-NEXT: vmv1r.v v15, v8
9830 ; CHECK-NEXT: vmv1r.v v16, v8
9831 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9832 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t
9835 tail call void @llvm.riscv.vsuxseg7.mask.nxv1f16.nxv1i8(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
9839 declare void @llvm.riscv.vsuxseg7.nxv1f16.nxv1i32(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i32>, i32)
9840 declare void @llvm.riscv.vsuxseg7.mask.nxv1f16.nxv1i32(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
9842 define void @test_vsuxseg7_nxv1f16_nxv1i32(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
9843 ; CHECK-LABEL: test_vsuxseg7_nxv1f16_nxv1i32:
9844 ; CHECK: # %bb.0: # %entry
9845 ; CHECK-NEXT: vmv1r.v v10, v8
9846 ; CHECK-NEXT: vmv1r.v v11, v8
9847 ; CHECK-NEXT: vmv1r.v v12, v8
9848 ; CHECK-NEXT: vmv1r.v v13, v8
9849 ; CHECK-NEXT: vmv1r.v v14, v8
9850 ; CHECK-NEXT: vmv1r.v v15, v8
9851 ; CHECK-NEXT: vmv1r.v v16, v8
9852 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9853 ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9
9856 tail call void @llvm.riscv.vsuxseg7.nxv1f16.nxv1i32(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
9860 define void @test_vsuxseg7_mask_nxv1f16_nxv1i32(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
9861 ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f16_nxv1i32:
9862 ; CHECK: # %bb.0: # %entry
9863 ; CHECK-NEXT: vmv1r.v v10, v8
9864 ; CHECK-NEXT: vmv1r.v v11, v8
9865 ; CHECK-NEXT: vmv1r.v v12, v8
9866 ; CHECK-NEXT: vmv1r.v v13, v8
9867 ; CHECK-NEXT: vmv1r.v v14, v8
9868 ; CHECK-NEXT: vmv1r.v v15, v8
9869 ; CHECK-NEXT: vmv1r.v v16, v8
9870 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9871 ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t
9874 tail call void @llvm.riscv.vsuxseg7.mask.nxv1f16.nxv1i32(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
9878 declare void @llvm.riscv.vsuxseg7.nxv1f16.nxv1i16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i16>, i32)
9879 declare void @llvm.riscv.vsuxseg7.mask.nxv1f16.nxv1i16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
9881 define void @test_vsuxseg7_nxv1f16_nxv1i16(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
9882 ; CHECK-LABEL: test_vsuxseg7_nxv1f16_nxv1i16:
9883 ; CHECK: # %bb.0: # %entry
9884 ; CHECK-NEXT: vmv1r.v v10, v8
9885 ; CHECK-NEXT: vmv1r.v v11, v8
9886 ; CHECK-NEXT: vmv1r.v v12, v8
9887 ; CHECK-NEXT: vmv1r.v v13, v8
9888 ; CHECK-NEXT: vmv1r.v v14, v8
9889 ; CHECK-NEXT: vmv1r.v v15, v8
9890 ; CHECK-NEXT: vmv1r.v v16, v8
9891 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9892 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9
9895 tail call void @llvm.riscv.vsuxseg7.nxv1f16.nxv1i16(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
9899 define void @test_vsuxseg7_mask_nxv1f16_nxv1i16(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
9900 ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f16_nxv1i16:
9901 ; CHECK: # %bb.0: # %entry
9902 ; CHECK-NEXT: vmv1r.v v10, v8
9903 ; CHECK-NEXT: vmv1r.v v11, v8
9904 ; CHECK-NEXT: vmv1r.v v12, v8
9905 ; CHECK-NEXT: vmv1r.v v13, v8
9906 ; CHECK-NEXT: vmv1r.v v14, v8
9907 ; CHECK-NEXT: vmv1r.v v15, v8
9908 ; CHECK-NEXT: vmv1r.v v16, v8
9909 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9910 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t
9913 tail call void @llvm.riscv.vsuxseg7.mask.nxv1f16.nxv1i16(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
9917 declare void @llvm.riscv.vsuxseg8.nxv1f16.nxv1i8(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i8>, i32)
9918 declare void @llvm.riscv.vsuxseg8.mask.nxv1f16.nxv1i8(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
9920 define void @test_vsuxseg8_nxv1f16_nxv1i8(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
9921 ; CHECK-LABEL: test_vsuxseg8_nxv1f16_nxv1i8:
9922 ; CHECK: # %bb.0: # %entry
9923 ; CHECK-NEXT: vmv1r.v v10, v8
9924 ; CHECK-NEXT: vmv1r.v v11, v8
9925 ; CHECK-NEXT: vmv1r.v v12, v8
9926 ; CHECK-NEXT: vmv1r.v v13, v8
9927 ; CHECK-NEXT: vmv1r.v v14, v8
9928 ; CHECK-NEXT: vmv1r.v v15, v8
9929 ; CHECK-NEXT: vmv1r.v v16, v8
9930 ; CHECK-NEXT: vmv1r.v v17, v8
9931 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9932 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9
9935 tail call void @llvm.riscv.vsuxseg8.nxv1f16.nxv1i8(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
9939 define void @test_vsuxseg8_mask_nxv1f16_nxv1i8(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
9940 ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f16_nxv1i8:
9941 ; CHECK: # %bb.0: # %entry
9942 ; CHECK-NEXT: vmv1r.v v10, v8
9943 ; CHECK-NEXT: vmv1r.v v11, v8
9944 ; CHECK-NEXT: vmv1r.v v12, v8
9945 ; CHECK-NEXT: vmv1r.v v13, v8
9946 ; CHECK-NEXT: vmv1r.v v14, v8
9947 ; CHECK-NEXT: vmv1r.v v15, v8
9948 ; CHECK-NEXT: vmv1r.v v16, v8
9949 ; CHECK-NEXT: vmv1r.v v17, v8
9950 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9951 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t
9954 tail call void @llvm.riscv.vsuxseg8.mask.nxv1f16.nxv1i8(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
9958 declare void @llvm.riscv.vsuxseg8.nxv1f16.nxv1i32(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i32>, i32)
9959 declare void @llvm.riscv.vsuxseg8.mask.nxv1f16.nxv1i32(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
9961 define void @test_vsuxseg8_nxv1f16_nxv1i32(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
9962 ; CHECK-LABEL: test_vsuxseg8_nxv1f16_nxv1i32:
9963 ; CHECK: # %bb.0: # %entry
9964 ; CHECK-NEXT: vmv1r.v v10, v8
9965 ; CHECK-NEXT: vmv1r.v v11, v8
9966 ; CHECK-NEXT: vmv1r.v v12, v8
9967 ; CHECK-NEXT: vmv1r.v v13, v8
9968 ; CHECK-NEXT: vmv1r.v v14, v8
9969 ; CHECK-NEXT: vmv1r.v v15, v8
9970 ; CHECK-NEXT: vmv1r.v v16, v8
9971 ; CHECK-NEXT: vmv1r.v v17, v8
9972 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9973 ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9
9976 tail call void @llvm.riscv.vsuxseg8.nxv1f16.nxv1i32(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
9980 define void @test_vsuxseg8_mask_nxv1f16_nxv1i32(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
9981 ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f16_nxv1i32:
9982 ; CHECK: # %bb.0: # %entry
9983 ; CHECK-NEXT: vmv1r.v v10, v8
9984 ; CHECK-NEXT: vmv1r.v v11, v8
9985 ; CHECK-NEXT: vmv1r.v v12, v8
9986 ; CHECK-NEXT: vmv1r.v v13, v8
9987 ; CHECK-NEXT: vmv1r.v v14, v8
9988 ; CHECK-NEXT: vmv1r.v v15, v8
9989 ; CHECK-NEXT: vmv1r.v v16, v8
9990 ; CHECK-NEXT: vmv1r.v v17, v8
9991 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
9992 ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t
9995 tail call void @llvm.riscv.vsuxseg8.mask.nxv1f16.nxv1i32(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
9999 declare void @llvm.riscv.vsuxseg8.nxv1f16.nxv1i16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i16>, i32)
10000 declare void @llvm.riscv.vsuxseg8.mask.nxv1f16.nxv1i16(<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>,<vscale x 1 x half>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
10002 define void @test_vsuxseg8_nxv1f16_nxv1i16(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
10003 ; CHECK-LABEL: test_vsuxseg8_nxv1f16_nxv1i16:
10004 ; CHECK: # %bb.0: # %entry
10005 ; CHECK-NEXT: vmv1r.v v10, v8
10006 ; CHECK-NEXT: vmv1r.v v11, v8
10007 ; CHECK-NEXT: vmv1r.v v12, v8
10008 ; CHECK-NEXT: vmv1r.v v13, v8
10009 ; CHECK-NEXT: vmv1r.v v14, v8
10010 ; CHECK-NEXT: vmv1r.v v15, v8
10011 ; CHECK-NEXT: vmv1r.v v16, v8
10012 ; CHECK-NEXT: vmv1r.v v17, v8
10013 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
10014 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9
10017 tail call void @llvm.riscv.vsuxseg8.nxv1f16.nxv1i16(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
10021 define void @test_vsuxseg8_mask_nxv1f16_nxv1i16(<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
10022 ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f16_nxv1i16:
10023 ; CHECK: # %bb.0: # %entry
10024 ; CHECK-NEXT: vmv1r.v v10, v8
10025 ; CHECK-NEXT: vmv1r.v v11, v8
10026 ; CHECK-NEXT: vmv1r.v v12, v8
10027 ; CHECK-NEXT: vmv1r.v v13, v8
10028 ; CHECK-NEXT: vmv1r.v v14, v8
10029 ; CHECK-NEXT: vmv1r.v v15, v8
10030 ; CHECK-NEXT: vmv1r.v v16, v8
10031 ; CHECK-NEXT: vmv1r.v v17, v8
10032 ; CHECK-NEXT: vsetvli zero, a1, e16, mf4, ta, ma
10033 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t
10036 tail call void @llvm.riscv.vsuxseg8.mask.nxv1f16.nxv1i16(<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val,<vscale x 1 x half> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
10040 declare void @llvm.riscv.vsuxseg2.nxv1f32.nxv1i8(<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i8>, i32)
10041 declare void @llvm.riscv.vsuxseg2.mask.nxv1f32.nxv1i8(<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
10043 define void @test_vsuxseg2_nxv1f32_nxv1i8(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
10044 ; CHECK-LABEL: test_vsuxseg2_nxv1f32_nxv1i8:
10045 ; CHECK: # %bb.0: # %entry
10046 ; CHECK-NEXT: vmv1r.v v10, v9
10047 ; CHECK-NEXT: vmv1r.v v9, v8
10048 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10049 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10
10052 tail call void @llvm.riscv.vsuxseg2.nxv1f32.nxv1i8(<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
10056 define void @test_vsuxseg2_mask_nxv1f32_nxv1i8(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
10057 ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f32_nxv1i8:
10058 ; CHECK: # %bb.0: # %entry
10059 ; CHECK-NEXT: vmv1r.v v10, v9
10060 ; CHECK-NEXT: vmv1r.v v9, v8
10061 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10062 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10, v0.t
10065 tail call void @llvm.riscv.vsuxseg2.mask.nxv1f32.nxv1i8(<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
10069 declare void @llvm.riscv.vsuxseg2.nxv1f32.nxv1i32(<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i32>, i32)
10070 declare void @llvm.riscv.vsuxseg2.mask.nxv1f32.nxv1i32(<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
10072 define void @test_vsuxseg2_nxv1f32_nxv1i32(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
10073 ; CHECK-LABEL: test_vsuxseg2_nxv1f32_nxv1i32:
10074 ; CHECK: # %bb.0: # %entry
10075 ; CHECK-NEXT: vmv1r.v v10, v9
10076 ; CHECK-NEXT: vmv1r.v v9, v8
10077 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10078 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10
10081 tail call void @llvm.riscv.vsuxseg2.nxv1f32.nxv1i32(<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
10085 define void @test_vsuxseg2_mask_nxv1f32_nxv1i32(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
10086 ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f32_nxv1i32:
10087 ; CHECK: # %bb.0: # %entry
10088 ; CHECK-NEXT: vmv1r.v v10, v9
10089 ; CHECK-NEXT: vmv1r.v v9, v8
10090 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10091 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10, v0.t
10094 tail call void @llvm.riscv.vsuxseg2.mask.nxv1f32.nxv1i32(<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
10098 declare void @llvm.riscv.vsuxseg2.nxv1f32.nxv1i16(<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i16>, i32)
10099 declare void @llvm.riscv.vsuxseg2.mask.nxv1f32.nxv1i16(<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
10101 define void @test_vsuxseg2_nxv1f32_nxv1i16(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
10102 ; CHECK-LABEL: test_vsuxseg2_nxv1f32_nxv1i16:
10103 ; CHECK: # %bb.0: # %entry
10104 ; CHECK-NEXT: vmv1r.v v10, v9
10105 ; CHECK-NEXT: vmv1r.v v9, v8
10106 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10107 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10
10110 tail call void @llvm.riscv.vsuxseg2.nxv1f32.nxv1i16(<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
10114 define void @test_vsuxseg2_mask_nxv1f32_nxv1i16(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
10115 ; CHECK-LABEL: test_vsuxseg2_mask_nxv1f32_nxv1i16:
10116 ; CHECK: # %bb.0: # %entry
10117 ; CHECK-NEXT: vmv1r.v v10, v9
10118 ; CHECK-NEXT: vmv1r.v v9, v8
10119 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10120 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10, v0.t
10123 tail call void @llvm.riscv.vsuxseg2.mask.nxv1f32.nxv1i16(<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
10127 declare void @llvm.riscv.vsuxseg3.nxv1f32.nxv1i8(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i8>, i32)
10128 declare void @llvm.riscv.vsuxseg3.mask.nxv1f32.nxv1i8(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
10130 define void @test_vsuxseg3_nxv1f32_nxv1i8(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
10131 ; CHECK-LABEL: test_vsuxseg3_nxv1f32_nxv1i8:
10132 ; CHECK: # %bb.0: # %entry
10133 ; CHECK-NEXT: vmv1r.v v10, v8
10134 ; CHECK-NEXT: vmv1r.v v11, v8
10135 ; CHECK-NEXT: vmv1r.v v12, v8
10136 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10137 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9
10140 tail call void @llvm.riscv.vsuxseg3.nxv1f32.nxv1i8(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
10144 define void @test_vsuxseg3_mask_nxv1f32_nxv1i8(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
10145 ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f32_nxv1i8:
10146 ; CHECK: # %bb.0: # %entry
10147 ; CHECK-NEXT: vmv1r.v v10, v8
10148 ; CHECK-NEXT: vmv1r.v v11, v8
10149 ; CHECK-NEXT: vmv1r.v v12, v8
10150 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10151 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t
10154 tail call void @llvm.riscv.vsuxseg3.mask.nxv1f32.nxv1i8(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
10158 declare void @llvm.riscv.vsuxseg3.nxv1f32.nxv1i32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i32>, i32)
10159 declare void @llvm.riscv.vsuxseg3.mask.nxv1f32.nxv1i32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
10161 define void @test_vsuxseg3_nxv1f32_nxv1i32(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
10162 ; CHECK-LABEL: test_vsuxseg3_nxv1f32_nxv1i32:
10163 ; CHECK: # %bb.0: # %entry
10164 ; CHECK-NEXT: vmv1r.v v10, v8
10165 ; CHECK-NEXT: vmv1r.v v11, v8
10166 ; CHECK-NEXT: vmv1r.v v12, v8
10167 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10168 ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9
10171 tail call void @llvm.riscv.vsuxseg3.nxv1f32.nxv1i32(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
10175 define void @test_vsuxseg3_mask_nxv1f32_nxv1i32(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
10176 ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f32_nxv1i32:
10177 ; CHECK: # %bb.0: # %entry
10178 ; CHECK-NEXT: vmv1r.v v10, v8
10179 ; CHECK-NEXT: vmv1r.v v11, v8
10180 ; CHECK-NEXT: vmv1r.v v12, v8
10181 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10182 ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t
10185 tail call void @llvm.riscv.vsuxseg3.mask.nxv1f32.nxv1i32(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
10189 declare void @llvm.riscv.vsuxseg3.nxv1f32.nxv1i16(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i16>, i32)
10190 declare void @llvm.riscv.vsuxseg3.mask.nxv1f32.nxv1i16(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
10192 define void @test_vsuxseg3_nxv1f32_nxv1i16(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
10193 ; CHECK-LABEL: test_vsuxseg3_nxv1f32_nxv1i16:
10194 ; CHECK: # %bb.0: # %entry
10195 ; CHECK-NEXT: vmv1r.v v10, v8
10196 ; CHECK-NEXT: vmv1r.v v11, v8
10197 ; CHECK-NEXT: vmv1r.v v12, v8
10198 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10199 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9
10202 tail call void @llvm.riscv.vsuxseg3.nxv1f32.nxv1i16(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
10206 define void @test_vsuxseg3_mask_nxv1f32_nxv1i16(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
10207 ; CHECK-LABEL: test_vsuxseg3_mask_nxv1f32_nxv1i16:
10208 ; CHECK: # %bb.0: # %entry
10209 ; CHECK-NEXT: vmv1r.v v10, v8
10210 ; CHECK-NEXT: vmv1r.v v11, v8
10211 ; CHECK-NEXT: vmv1r.v v12, v8
10212 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10213 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t
10216 tail call void @llvm.riscv.vsuxseg3.mask.nxv1f32.nxv1i16(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
10220 declare void @llvm.riscv.vsuxseg4.nxv1f32.nxv1i8(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i8>, i32)
10221 declare void @llvm.riscv.vsuxseg4.mask.nxv1f32.nxv1i8(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
10223 define void @test_vsuxseg4_nxv1f32_nxv1i8(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
10224 ; CHECK-LABEL: test_vsuxseg4_nxv1f32_nxv1i8:
10225 ; CHECK: # %bb.0: # %entry
10226 ; CHECK-NEXT: vmv1r.v v10, v8
10227 ; CHECK-NEXT: vmv1r.v v11, v8
10228 ; CHECK-NEXT: vmv1r.v v12, v8
10229 ; CHECK-NEXT: vmv1r.v v13, v8
10230 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10231 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9
10234 tail call void @llvm.riscv.vsuxseg4.nxv1f32.nxv1i8(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
10238 define void @test_vsuxseg4_mask_nxv1f32_nxv1i8(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
10239 ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f32_nxv1i8:
10240 ; CHECK: # %bb.0: # %entry
10241 ; CHECK-NEXT: vmv1r.v v10, v8
10242 ; CHECK-NEXT: vmv1r.v v11, v8
10243 ; CHECK-NEXT: vmv1r.v v12, v8
10244 ; CHECK-NEXT: vmv1r.v v13, v8
10245 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10246 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t
10249 tail call void @llvm.riscv.vsuxseg4.mask.nxv1f32.nxv1i8(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
10253 declare void @llvm.riscv.vsuxseg4.nxv1f32.nxv1i32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i32>, i32)
10254 declare void @llvm.riscv.vsuxseg4.mask.nxv1f32.nxv1i32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
10256 define void @test_vsuxseg4_nxv1f32_nxv1i32(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
10257 ; CHECK-LABEL: test_vsuxseg4_nxv1f32_nxv1i32:
10258 ; CHECK: # %bb.0: # %entry
10259 ; CHECK-NEXT: vmv1r.v v10, v8
10260 ; CHECK-NEXT: vmv1r.v v11, v8
10261 ; CHECK-NEXT: vmv1r.v v12, v8
10262 ; CHECK-NEXT: vmv1r.v v13, v8
10263 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10264 ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9
10267 tail call void @llvm.riscv.vsuxseg4.nxv1f32.nxv1i32(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
10271 define void @test_vsuxseg4_mask_nxv1f32_nxv1i32(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
10272 ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f32_nxv1i32:
10273 ; CHECK: # %bb.0: # %entry
10274 ; CHECK-NEXT: vmv1r.v v10, v8
10275 ; CHECK-NEXT: vmv1r.v v11, v8
10276 ; CHECK-NEXT: vmv1r.v v12, v8
10277 ; CHECK-NEXT: vmv1r.v v13, v8
10278 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10279 ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t
10282 tail call void @llvm.riscv.vsuxseg4.mask.nxv1f32.nxv1i32(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
10286 declare void @llvm.riscv.vsuxseg4.nxv1f32.nxv1i16(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i16>, i32)
10287 declare void @llvm.riscv.vsuxseg4.mask.nxv1f32.nxv1i16(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
10289 define void @test_vsuxseg4_nxv1f32_nxv1i16(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
10290 ; CHECK-LABEL: test_vsuxseg4_nxv1f32_nxv1i16:
10291 ; CHECK: # %bb.0: # %entry
10292 ; CHECK-NEXT: vmv1r.v v10, v8
10293 ; CHECK-NEXT: vmv1r.v v11, v8
10294 ; CHECK-NEXT: vmv1r.v v12, v8
10295 ; CHECK-NEXT: vmv1r.v v13, v8
10296 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10297 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9
10300 tail call void @llvm.riscv.vsuxseg4.nxv1f32.nxv1i16(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
10304 define void @test_vsuxseg4_mask_nxv1f32_nxv1i16(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
10305 ; CHECK-LABEL: test_vsuxseg4_mask_nxv1f32_nxv1i16:
10306 ; CHECK: # %bb.0: # %entry
10307 ; CHECK-NEXT: vmv1r.v v10, v8
10308 ; CHECK-NEXT: vmv1r.v v11, v8
10309 ; CHECK-NEXT: vmv1r.v v12, v8
10310 ; CHECK-NEXT: vmv1r.v v13, v8
10311 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10312 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t
10315 tail call void @llvm.riscv.vsuxseg4.mask.nxv1f32.nxv1i16(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
10319 declare void @llvm.riscv.vsuxseg5.nxv1f32.nxv1i8(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i8>, i32)
10320 declare void @llvm.riscv.vsuxseg5.mask.nxv1f32.nxv1i8(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
10322 define void @test_vsuxseg5_nxv1f32_nxv1i8(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
10323 ; CHECK-LABEL: test_vsuxseg5_nxv1f32_nxv1i8:
10324 ; CHECK: # %bb.0: # %entry
10325 ; CHECK-NEXT: vmv1r.v v10, v8
10326 ; CHECK-NEXT: vmv1r.v v11, v8
10327 ; CHECK-NEXT: vmv1r.v v12, v8
10328 ; CHECK-NEXT: vmv1r.v v13, v8
10329 ; CHECK-NEXT: vmv1r.v v14, v8
10330 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10331 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9
10334 tail call void @llvm.riscv.vsuxseg5.nxv1f32.nxv1i8(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
10338 define void @test_vsuxseg5_mask_nxv1f32_nxv1i8(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
10339 ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f32_nxv1i8:
10340 ; CHECK: # %bb.0: # %entry
10341 ; CHECK-NEXT: vmv1r.v v10, v8
10342 ; CHECK-NEXT: vmv1r.v v11, v8
10343 ; CHECK-NEXT: vmv1r.v v12, v8
10344 ; CHECK-NEXT: vmv1r.v v13, v8
10345 ; CHECK-NEXT: vmv1r.v v14, v8
10346 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10347 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t
10350 tail call void @llvm.riscv.vsuxseg5.mask.nxv1f32.nxv1i8(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
10354 declare void @llvm.riscv.vsuxseg5.nxv1f32.nxv1i32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i32>, i32)
10355 declare void @llvm.riscv.vsuxseg5.mask.nxv1f32.nxv1i32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
10357 define void @test_vsuxseg5_nxv1f32_nxv1i32(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
10358 ; CHECK-LABEL: test_vsuxseg5_nxv1f32_nxv1i32:
10359 ; CHECK: # %bb.0: # %entry
10360 ; CHECK-NEXT: vmv1r.v v10, v8
10361 ; CHECK-NEXT: vmv1r.v v11, v8
10362 ; CHECK-NEXT: vmv1r.v v12, v8
10363 ; CHECK-NEXT: vmv1r.v v13, v8
10364 ; CHECK-NEXT: vmv1r.v v14, v8
10365 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10366 ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9
10369 tail call void @llvm.riscv.vsuxseg5.nxv1f32.nxv1i32(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
10373 define void @test_vsuxseg5_mask_nxv1f32_nxv1i32(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
10374 ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f32_nxv1i32:
10375 ; CHECK: # %bb.0: # %entry
10376 ; CHECK-NEXT: vmv1r.v v10, v8
10377 ; CHECK-NEXT: vmv1r.v v11, v8
10378 ; CHECK-NEXT: vmv1r.v v12, v8
10379 ; CHECK-NEXT: vmv1r.v v13, v8
10380 ; CHECK-NEXT: vmv1r.v v14, v8
10381 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10382 ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t
10385 tail call void @llvm.riscv.vsuxseg5.mask.nxv1f32.nxv1i32(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
10389 declare void @llvm.riscv.vsuxseg5.nxv1f32.nxv1i16(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i16>, i32)
10390 declare void @llvm.riscv.vsuxseg5.mask.nxv1f32.nxv1i16(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
10392 define void @test_vsuxseg5_nxv1f32_nxv1i16(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
10393 ; CHECK-LABEL: test_vsuxseg5_nxv1f32_nxv1i16:
10394 ; CHECK: # %bb.0: # %entry
10395 ; CHECK-NEXT: vmv1r.v v10, v8
10396 ; CHECK-NEXT: vmv1r.v v11, v8
10397 ; CHECK-NEXT: vmv1r.v v12, v8
10398 ; CHECK-NEXT: vmv1r.v v13, v8
10399 ; CHECK-NEXT: vmv1r.v v14, v8
10400 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10401 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9
10404 tail call void @llvm.riscv.vsuxseg5.nxv1f32.nxv1i16(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
10408 define void @test_vsuxseg5_mask_nxv1f32_nxv1i16(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
10409 ; CHECK-LABEL: test_vsuxseg5_mask_nxv1f32_nxv1i16:
10410 ; CHECK: # %bb.0: # %entry
10411 ; CHECK-NEXT: vmv1r.v v10, v8
10412 ; CHECK-NEXT: vmv1r.v v11, v8
10413 ; CHECK-NEXT: vmv1r.v v12, v8
10414 ; CHECK-NEXT: vmv1r.v v13, v8
10415 ; CHECK-NEXT: vmv1r.v v14, v8
10416 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10417 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t
10420 tail call void @llvm.riscv.vsuxseg5.mask.nxv1f32.nxv1i16(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
10424 declare void @llvm.riscv.vsuxseg6.nxv1f32.nxv1i8(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i8>, i32)
10425 declare void @llvm.riscv.vsuxseg6.mask.nxv1f32.nxv1i8(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
10427 define void @test_vsuxseg6_nxv1f32_nxv1i8(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
10428 ; CHECK-LABEL: test_vsuxseg6_nxv1f32_nxv1i8:
10429 ; CHECK: # %bb.0: # %entry
10430 ; CHECK-NEXT: vmv1r.v v10, v8
10431 ; CHECK-NEXT: vmv1r.v v11, v8
10432 ; CHECK-NEXT: vmv1r.v v12, v8
10433 ; CHECK-NEXT: vmv1r.v v13, v8
10434 ; CHECK-NEXT: vmv1r.v v14, v8
10435 ; CHECK-NEXT: vmv1r.v v15, v8
10436 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10437 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9
10440 tail call void @llvm.riscv.vsuxseg6.nxv1f32.nxv1i8(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
10444 define void @test_vsuxseg6_mask_nxv1f32_nxv1i8(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
10445 ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f32_nxv1i8:
10446 ; CHECK: # %bb.0: # %entry
10447 ; CHECK-NEXT: vmv1r.v v10, v8
10448 ; CHECK-NEXT: vmv1r.v v11, v8
10449 ; CHECK-NEXT: vmv1r.v v12, v8
10450 ; CHECK-NEXT: vmv1r.v v13, v8
10451 ; CHECK-NEXT: vmv1r.v v14, v8
10452 ; CHECK-NEXT: vmv1r.v v15, v8
10453 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10454 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t
10457 tail call void @llvm.riscv.vsuxseg6.mask.nxv1f32.nxv1i8(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
10461 declare void @llvm.riscv.vsuxseg6.nxv1f32.nxv1i32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i32>, i32)
10462 declare void @llvm.riscv.vsuxseg6.mask.nxv1f32.nxv1i32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
10464 define void @test_vsuxseg6_nxv1f32_nxv1i32(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
10465 ; CHECK-LABEL: test_vsuxseg6_nxv1f32_nxv1i32:
10466 ; CHECK: # %bb.0: # %entry
10467 ; CHECK-NEXT: vmv1r.v v10, v8
10468 ; CHECK-NEXT: vmv1r.v v11, v8
10469 ; CHECK-NEXT: vmv1r.v v12, v8
10470 ; CHECK-NEXT: vmv1r.v v13, v8
10471 ; CHECK-NEXT: vmv1r.v v14, v8
10472 ; CHECK-NEXT: vmv1r.v v15, v8
10473 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10474 ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9
10477 tail call void @llvm.riscv.vsuxseg6.nxv1f32.nxv1i32(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
10481 define void @test_vsuxseg6_mask_nxv1f32_nxv1i32(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
10482 ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f32_nxv1i32:
10483 ; CHECK: # %bb.0: # %entry
10484 ; CHECK-NEXT: vmv1r.v v10, v8
10485 ; CHECK-NEXT: vmv1r.v v11, v8
10486 ; CHECK-NEXT: vmv1r.v v12, v8
10487 ; CHECK-NEXT: vmv1r.v v13, v8
10488 ; CHECK-NEXT: vmv1r.v v14, v8
10489 ; CHECK-NEXT: vmv1r.v v15, v8
10490 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10491 ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t
10494 tail call void @llvm.riscv.vsuxseg6.mask.nxv1f32.nxv1i32(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
10498 declare void @llvm.riscv.vsuxseg6.nxv1f32.nxv1i16(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i16>, i32)
10499 declare void @llvm.riscv.vsuxseg6.mask.nxv1f32.nxv1i16(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
10501 define void @test_vsuxseg6_nxv1f32_nxv1i16(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
10502 ; CHECK-LABEL: test_vsuxseg6_nxv1f32_nxv1i16:
10503 ; CHECK: # %bb.0: # %entry
10504 ; CHECK-NEXT: vmv1r.v v10, v8
10505 ; CHECK-NEXT: vmv1r.v v11, v8
10506 ; CHECK-NEXT: vmv1r.v v12, v8
10507 ; CHECK-NEXT: vmv1r.v v13, v8
10508 ; CHECK-NEXT: vmv1r.v v14, v8
10509 ; CHECK-NEXT: vmv1r.v v15, v8
10510 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10511 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9
10514 tail call void @llvm.riscv.vsuxseg6.nxv1f32.nxv1i16(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
10518 define void @test_vsuxseg6_mask_nxv1f32_nxv1i16(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
10519 ; CHECK-LABEL: test_vsuxseg6_mask_nxv1f32_nxv1i16:
10520 ; CHECK: # %bb.0: # %entry
10521 ; CHECK-NEXT: vmv1r.v v10, v8
10522 ; CHECK-NEXT: vmv1r.v v11, v8
10523 ; CHECK-NEXT: vmv1r.v v12, v8
10524 ; CHECK-NEXT: vmv1r.v v13, v8
10525 ; CHECK-NEXT: vmv1r.v v14, v8
10526 ; CHECK-NEXT: vmv1r.v v15, v8
10527 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10528 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t
10531 tail call void @llvm.riscv.vsuxseg6.mask.nxv1f32.nxv1i16(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
10535 declare void @llvm.riscv.vsuxseg7.nxv1f32.nxv1i8(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i8>, i32)
10536 declare void @llvm.riscv.vsuxseg7.mask.nxv1f32.nxv1i8(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
10538 define void @test_vsuxseg7_nxv1f32_nxv1i8(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
10539 ; CHECK-LABEL: test_vsuxseg7_nxv1f32_nxv1i8:
10540 ; CHECK: # %bb.0: # %entry
10541 ; CHECK-NEXT: vmv1r.v v10, v8
10542 ; CHECK-NEXT: vmv1r.v v11, v8
10543 ; CHECK-NEXT: vmv1r.v v12, v8
10544 ; CHECK-NEXT: vmv1r.v v13, v8
10545 ; CHECK-NEXT: vmv1r.v v14, v8
10546 ; CHECK-NEXT: vmv1r.v v15, v8
10547 ; CHECK-NEXT: vmv1r.v v16, v8
10548 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10549 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9
10552 tail call void @llvm.riscv.vsuxseg7.nxv1f32.nxv1i8(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
10556 define void @test_vsuxseg7_mask_nxv1f32_nxv1i8(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
10557 ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f32_nxv1i8:
10558 ; CHECK: # %bb.0: # %entry
10559 ; CHECK-NEXT: vmv1r.v v10, v8
10560 ; CHECK-NEXT: vmv1r.v v11, v8
10561 ; CHECK-NEXT: vmv1r.v v12, v8
10562 ; CHECK-NEXT: vmv1r.v v13, v8
10563 ; CHECK-NEXT: vmv1r.v v14, v8
10564 ; CHECK-NEXT: vmv1r.v v15, v8
10565 ; CHECK-NEXT: vmv1r.v v16, v8
10566 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10567 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t
10570 tail call void @llvm.riscv.vsuxseg7.mask.nxv1f32.nxv1i8(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
10574 declare void @llvm.riscv.vsuxseg7.nxv1f32.nxv1i32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i32>, i32)
10575 declare void @llvm.riscv.vsuxseg7.mask.nxv1f32.nxv1i32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
10577 define void @test_vsuxseg7_nxv1f32_nxv1i32(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
10578 ; CHECK-LABEL: test_vsuxseg7_nxv1f32_nxv1i32:
10579 ; CHECK: # %bb.0: # %entry
10580 ; CHECK-NEXT: vmv1r.v v10, v8
10581 ; CHECK-NEXT: vmv1r.v v11, v8
10582 ; CHECK-NEXT: vmv1r.v v12, v8
10583 ; CHECK-NEXT: vmv1r.v v13, v8
10584 ; CHECK-NEXT: vmv1r.v v14, v8
10585 ; CHECK-NEXT: vmv1r.v v15, v8
10586 ; CHECK-NEXT: vmv1r.v v16, v8
10587 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10588 ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9
10591 tail call void @llvm.riscv.vsuxseg7.nxv1f32.nxv1i32(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
10595 define void @test_vsuxseg7_mask_nxv1f32_nxv1i32(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
10596 ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f32_nxv1i32:
10597 ; CHECK: # %bb.0: # %entry
10598 ; CHECK-NEXT: vmv1r.v v10, v8
10599 ; CHECK-NEXT: vmv1r.v v11, v8
10600 ; CHECK-NEXT: vmv1r.v v12, v8
10601 ; CHECK-NEXT: vmv1r.v v13, v8
10602 ; CHECK-NEXT: vmv1r.v v14, v8
10603 ; CHECK-NEXT: vmv1r.v v15, v8
10604 ; CHECK-NEXT: vmv1r.v v16, v8
10605 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10606 ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t
10609 tail call void @llvm.riscv.vsuxseg7.mask.nxv1f32.nxv1i32(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
10613 declare void @llvm.riscv.vsuxseg7.nxv1f32.nxv1i16(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i16>, i32)
10614 declare void @llvm.riscv.vsuxseg7.mask.nxv1f32.nxv1i16(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
10616 define void @test_vsuxseg7_nxv1f32_nxv1i16(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
10617 ; CHECK-LABEL: test_vsuxseg7_nxv1f32_nxv1i16:
10618 ; CHECK: # %bb.0: # %entry
10619 ; CHECK-NEXT: vmv1r.v v10, v8
10620 ; CHECK-NEXT: vmv1r.v v11, v8
10621 ; CHECK-NEXT: vmv1r.v v12, v8
10622 ; CHECK-NEXT: vmv1r.v v13, v8
10623 ; CHECK-NEXT: vmv1r.v v14, v8
10624 ; CHECK-NEXT: vmv1r.v v15, v8
10625 ; CHECK-NEXT: vmv1r.v v16, v8
10626 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10627 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9
10630 tail call void @llvm.riscv.vsuxseg7.nxv1f32.nxv1i16(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
10634 define void @test_vsuxseg7_mask_nxv1f32_nxv1i16(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
10635 ; CHECK-LABEL: test_vsuxseg7_mask_nxv1f32_nxv1i16:
10636 ; CHECK: # %bb.0: # %entry
10637 ; CHECK-NEXT: vmv1r.v v10, v8
10638 ; CHECK-NEXT: vmv1r.v v11, v8
10639 ; CHECK-NEXT: vmv1r.v v12, v8
10640 ; CHECK-NEXT: vmv1r.v v13, v8
10641 ; CHECK-NEXT: vmv1r.v v14, v8
10642 ; CHECK-NEXT: vmv1r.v v15, v8
10643 ; CHECK-NEXT: vmv1r.v v16, v8
10644 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10645 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t
10648 tail call void @llvm.riscv.vsuxseg7.mask.nxv1f32.nxv1i16(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
10652 declare void @llvm.riscv.vsuxseg8.nxv1f32.nxv1i8(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i8>, i32)
10653 declare void @llvm.riscv.vsuxseg8.mask.nxv1f32.nxv1i8(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i8>, <vscale x 1 x i1>, i32)
10655 define void @test_vsuxseg8_nxv1f32_nxv1i8(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl) {
10656 ; CHECK-LABEL: test_vsuxseg8_nxv1f32_nxv1i8:
10657 ; CHECK: # %bb.0: # %entry
10658 ; CHECK-NEXT: vmv1r.v v10, v8
10659 ; CHECK-NEXT: vmv1r.v v11, v8
10660 ; CHECK-NEXT: vmv1r.v v12, v8
10661 ; CHECK-NEXT: vmv1r.v v13, v8
10662 ; CHECK-NEXT: vmv1r.v v14, v8
10663 ; CHECK-NEXT: vmv1r.v v15, v8
10664 ; CHECK-NEXT: vmv1r.v v16, v8
10665 ; CHECK-NEXT: vmv1r.v v17, v8
10666 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10667 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9
10670 tail call void @llvm.riscv.vsuxseg8.nxv1f32.nxv1i8(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, i32 %vl)
10674 define void @test_vsuxseg8_mask_nxv1f32_nxv1i8(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl) {
10675 ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f32_nxv1i8:
10676 ; CHECK: # %bb.0: # %entry
10677 ; CHECK-NEXT: vmv1r.v v10, v8
10678 ; CHECK-NEXT: vmv1r.v v11, v8
10679 ; CHECK-NEXT: vmv1r.v v12, v8
10680 ; CHECK-NEXT: vmv1r.v v13, v8
10681 ; CHECK-NEXT: vmv1r.v v14, v8
10682 ; CHECK-NEXT: vmv1r.v v15, v8
10683 ; CHECK-NEXT: vmv1r.v v16, v8
10684 ; CHECK-NEXT: vmv1r.v v17, v8
10685 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10686 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t
10689 tail call void @llvm.riscv.vsuxseg8.mask.nxv1f32.nxv1i8(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i32 %vl)
10693 declare void @llvm.riscv.vsuxseg8.nxv1f32.nxv1i32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i32>, i32)
10694 declare void @llvm.riscv.vsuxseg8.mask.nxv1f32.nxv1i32(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i32>, <vscale x 1 x i1>, i32)
10696 define void @test_vsuxseg8_nxv1f32_nxv1i32(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl) {
10697 ; CHECK-LABEL: test_vsuxseg8_nxv1f32_nxv1i32:
10698 ; CHECK: # %bb.0: # %entry
10699 ; CHECK-NEXT: vmv1r.v v10, v8
10700 ; CHECK-NEXT: vmv1r.v v11, v8
10701 ; CHECK-NEXT: vmv1r.v v12, v8
10702 ; CHECK-NEXT: vmv1r.v v13, v8
10703 ; CHECK-NEXT: vmv1r.v v14, v8
10704 ; CHECK-NEXT: vmv1r.v v15, v8
10705 ; CHECK-NEXT: vmv1r.v v16, v8
10706 ; CHECK-NEXT: vmv1r.v v17, v8
10707 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10708 ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9
10711 tail call void @llvm.riscv.vsuxseg8.nxv1f32.nxv1i32(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, i32 %vl)
10715 define void @test_vsuxseg8_mask_nxv1f32_nxv1i32(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl) {
10716 ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f32_nxv1i32:
10717 ; CHECK: # %bb.0: # %entry
10718 ; CHECK-NEXT: vmv1r.v v10, v8
10719 ; CHECK-NEXT: vmv1r.v v11, v8
10720 ; CHECK-NEXT: vmv1r.v v12, v8
10721 ; CHECK-NEXT: vmv1r.v v13, v8
10722 ; CHECK-NEXT: vmv1r.v v14, v8
10723 ; CHECK-NEXT: vmv1r.v v15, v8
10724 ; CHECK-NEXT: vmv1r.v v16, v8
10725 ; CHECK-NEXT: vmv1r.v v17, v8
10726 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10727 ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t
10730 tail call void @llvm.riscv.vsuxseg8.mask.nxv1f32.nxv1i32(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i32 %vl)
10734 declare void @llvm.riscv.vsuxseg8.nxv1f32.nxv1i16(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i16>, i32)
10735 declare void @llvm.riscv.vsuxseg8.mask.nxv1f32.nxv1i16(<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>,<vscale x 1 x float>, ptr, <vscale x 1 x i16>, <vscale x 1 x i1>, i32)
10737 define void @test_vsuxseg8_nxv1f32_nxv1i16(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl) {
10738 ; CHECK-LABEL: test_vsuxseg8_nxv1f32_nxv1i16:
10739 ; CHECK: # %bb.0: # %entry
10740 ; CHECK-NEXT: vmv1r.v v10, v8
10741 ; CHECK-NEXT: vmv1r.v v11, v8
10742 ; CHECK-NEXT: vmv1r.v v12, v8
10743 ; CHECK-NEXT: vmv1r.v v13, v8
10744 ; CHECK-NEXT: vmv1r.v v14, v8
10745 ; CHECK-NEXT: vmv1r.v v15, v8
10746 ; CHECK-NEXT: vmv1r.v v16, v8
10747 ; CHECK-NEXT: vmv1r.v v17, v8
10748 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10749 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9
10752 tail call void @llvm.riscv.vsuxseg8.nxv1f32.nxv1i16(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, i32 %vl)
10756 define void @test_vsuxseg8_mask_nxv1f32_nxv1i16(<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl) {
10757 ; CHECK-LABEL: test_vsuxseg8_mask_nxv1f32_nxv1i16:
10758 ; CHECK: # %bb.0: # %entry
10759 ; CHECK-NEXT: vmv1r.v v10, v8
10760 ; CHECK-NEXT: vmv1r.v v11, v8
10761 ; CHECK-NEXT: vmv1r.v v12, v8
10762 ; CHECK-NEXT: vmv1r.v v13, v8
10763 ; CHECK-NEXT: vmv1r.v v14, v8
10764 ; CHECK-NEXT: vmv1r.v v15, v8
10765 ; CHECK-NEXT: vmv1r.v v16, v8
10766 ; CHECK-NEXT: vmv1r.v v17, v8
10767 ; CHECK-NEXT: vsetvli zero, a1, e32, mf2, ta, ma
10768 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t
10771 tail call void @llvm.riscv.vsuxseg8.mask.nxv1f32.nxv1i16(<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val,<vscale x 1 x float> %val, ptr %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i32 %vl)
10775 declare void @llvm.riscv.vsuxseg2.nxv8f16.nxv8i16(<vscale x 8 x half>,<vscale x 8 x half>, ptr, <vscale x 8 x i16>, i32)
10776 declare void @llvm.riscv.vsuxseg2.mask.nxv8f16.nxv8i16(<vscale x 8 x half>,<vscale x 8 x half>, ptr, <vscale x 8 x i16>, <vscale x 8 x i1>, i32)
10778 define void @test_vsuxseg2_nxv8f16_nxv8i16(<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl) {
10779 ; CHECK-LABEL: test_vsuxseg2_nxv8f16_nxv8i16:
10780 ; CHECK: # %bb.0: # %entry
10781 ; CHECK-NEXT: vmv2r.v v12, v10
10782 ; CHECK-NEXT: vmv2r.v v10, v8
10783 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
10784 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v12
10787 tail call void @llvm.riscv.vsuxseg2.nxv8f16.nxv8i16(<vscale x 8 x half> %val,<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl)
10791 define void @test_vsuxseg2_mask_nxv8f16_nxv8i16(<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl) {
10792 ; CHECK-LABEL: test_vsuxseg2_mask_nxv8f16_nxv8i16:
10793 ; CHECK: # %bb.0: # %entry
10794 ; CHECK-NEXT: vmv2r.v v12, v10
10795 ; CHECK-NEXT: vmv2r.v v10, v8
10796 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
10797 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v12, v0.t
10800 tail call void @llvm.riscv.vsuxseg2.mask.nxv8f16.nxv8i16(<vscale x 8 x half> %val,<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl)
10804 declare void @llvm.riscv.vsuxseg2.nxv8f16.nxv8i8(<vscale x 8 x half>,<vscale x 8 x half>, ptr, <vscale x 8 x i8>, i32)
10805 declare void @llvm.riscv.vsuxseg2.mask.nxv8f16.nxv8i8(<vscale x 8 x half>,<vscale x 8 x half>, ptr, <vscale x 8 x i8>, <vscale x 8 x i1>, i32)
10807 define void @test_vsuxseg2_nxv8f16_nxv8i8(<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl) {
10808 ; CHECK-LABEL: test_vsuxseg2_nxv8f16_nxv8i8:
10809 ; CHECK: # %bb.0: # %entry
10810 ; CHECK-NEXT: vmv1r.v v12, v10
10811 ; CHECK-NEXT: vmv2r.v v10, v8
10812 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
10813 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v12
10816 tail call void @llvm.riscv.vsuxseg2.nxv8f16.nxv8i8(<vscale x 8 x half> %val,<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl)
10820 define void @test_vsuxseg2_mask_nxv8f16_nxv8i8(<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl) {
10821 ; CHECK-LABEL: test_vsuxseg2_mask_nxv8f16_nxv8i8:
10822 ; CHECK: # %bb.0: # %entry
10823 ; CHECK-NEXT: vmv1r.v v12, v10
10824 ; CHECK-NEXT: vmv2r.v v10, v8
10825 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
10826 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v12, v0.t
10829 tail call void @llvm.riscv.vsuxseg2.mask.nxv8f16.nxv8i8(<vscale x 8 x half> %val,<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl)
10833 declare void @llvm.riscv.vsuxseg2.nxv8f16.nxv8i32(<vscale x 8 x half>,<vscale x 8 x half>, ptr, <vscale x 8 x i32>, i32)
10834 declare void @llvm.riscv.vsuxseg2.mask.nxv8f16.nxv8i32(<vscale x 8 x half>,<vscale x 8 x half>, ptr, <vscale x 8 x i32>, <vscale x 8 x i1>, i32)
10836 define void @test_vsuxseg2_nxv8f16_nxv8i32(<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl) {
10837 ; CHECK-LABEL: test_vsuxseg2_nxv8f16_nxv8i32:
10838 ; CHECK: # %bb.0: # %entry
10839 ; CHECK-NEXT: vmv2r.v v10, v8
10840 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
10841 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12
10844 tail call void @llvm.riscv.vsuxseg2.nxv8f16.nxv8i32(<vscale x 8 x half> %val,<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl)
10848 define void @test_vsuxseg2_mask_nxv8f16_nxv8i32(<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl) {
10849 ; CHECK-LABEL: test_vsuxseg2_mask_nxv8f16_nxv8i32:
10850 ; CHECK: # %bb.0: # %entry
10851 ; CHECK-NEXT: vmv2r.v v10, v8
10852 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
10853 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12, v0.t
10856 tail call void @llvm.riscv.vsuxseg2.mask.nxv8f16.nxv8i32(<vscale x 8 x half> %val,<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl)
10860 declare void @llvm.riscv.vsuxseg3.nxv8f16.nxv8i16(<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, ptr, <vscale x 8 x i16>, i32)
10861 declare void @llvm.riscv.vsuxseg3.mask.nxv8f16.nxv8i16(<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, ptr, <vscale x 8 x i16>, <vscale x 8 x i1>, i32)
10863 define void @test_vsuxseg3_nxv8f16_nxv8i16(<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl) {
10864 ; CHECK-LABEL: test_vsuxseg3_nxv8f16_nxv8i16:
10865 ; CHECK: # %bb.0: # %entry
10866 ; CHECK-NEXT: vmv2r.v v12, v8
10867 ; CHECK-NEXT: vmv2r.v v14, v8
10868 ; CHECK-NEXT: vmv2r.v v16, v8
10869 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
10870 ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10
10873 tail call void @llvm.riscv.vsuxseg3.nxv8f16.nxv8i16(<vscale x 8 x half> %val,<vscale x 8 x half> %val,<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl)
10877 define void @test_vsuxseg3_mask_nxv8f16_nxv8i16(<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl) {
10878 ; CHECK-LABEL: test_vsuxseg3_mask_nxv8f16_nxv8i16:
10879 ; CHECK: # %bb.0: # %entry
10880 ; CHECK-NEXT: vmv2r.v v12, v8
10881 ; CHECK-NEXT: vmv2r.v v14, v8
10882 ; CHECK-NEXT: vmv2r.v v16, v8
10883 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
10884 ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10, v0.t
10887 tail call void @llvm.riscv.vsuxseg3.mask.nxv8f16.nxv8i16(<vscale x 8 x half> %val,<vscale x 8 x half> %val,<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl)
10891 declare void @llvm.riscv.vsuxseg3.nxv8f16.nxv8i8(<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, ptr, <vscale x 8 x i8>, i32)
10892 declare void @llvm.riscv.vsuxseg3.mask.nxv8f16.nxv8i8(<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, ptr, <vscale x 8 x i8>, <vscale x 8 x i1>, i32)
10894 define void @test_vsuxseg3_nxv8f16_nxv8i8(<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl) {
10895 ; CHECK-LABEL: test_vsuxseg3_nxv8f16_nxv8i8:
10896 ; CHECK: # %bb.0: # %entry
10897 ; CHECK-NEXT: vmv2r.v v12, v8
10898 ; CHECK-NEXT: vmv2r.v v14, v8
10899 ; CHECK-NEXT: vmv2r.v v16, v8
10900 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
10901 ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10
10904 tail call void @llvm.riscv.vsuxseg3.nxv8f16.nxv8i8(<vscale x 8 x half> %val,<vscale x 8 x half> %val,<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl)
10908 define void @test_vsuxseg3_mask_nxv8f16_nxv8i8(<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl) {
10909 ; CHECK-LABEL: test_vsuxseg3_mask_nxv8f16_nxv8i8:
10910 ; CHECK: # %bb.0: # %entry
10911 ; CHECK-NEXT: vmv2r.v v12, v8
10912 ; CHECK-NEXT: vmv2r.v v14, v8
10913 ; CHECK-NEXT: vmv2r.v v16, v8
10914 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
10915 ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10, v0.t
10918 tail call void @llvm.riscv.vsuxseg3.mask.nxv8f16.nxv8i8(<vscale x 8 x half> %val,<vscale x 8 x half> %val,<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl)
10922 declare void @llvm.riscv.vsuxseg3.nxv8f16.nxv8i32(<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, ptr, <vscale x 8 x i32>, i32)
10923 declare void @llvm.riscv.vsuxseg3.mask.nxv8f16.nxv8i32(<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, ptr, <vscale x 8 x i32>, <vscale x 8 x i1>, i32)
10925 define void @test_vsuxseg3_nxv8f16_nxv8i32(<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl) {
10926 ; CHECK-LABEL: test_vsuxseg3_nxv8f16_nxv8i32:
10927 ; CHECK: # %bb.0: # %entry
10928 ; CHECK-NEXT: vmv2r.v v10, v8
10929 ; CHECK-NEXT: vmv4r.v v16, v12
10930 ; CHECK-NEXT: vmv2r.v v12, v8
10931 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
10932 ; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v16
10935 tail call void @llvm.riscv.vsuxseg3.nxv8f16.nxv8i32(<vscale x 8 x half> %val,<vscale x 8 x half> %val,<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl)
10939 define void @test_vsuxseg3_mask_nxv8f16_nxv8i32(<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl) {
10940 ; CHECK-LABEL: test_vsuxseg3_mask_nxv8f16_nxv8i32:
10941 ; CHECK: # %bb.0: # %entry
10942 ; CHECK-NEXT: vmv2r.v v10, v8
10943 ; CHECK-NEXT: vmv4r.v v16, v12
10944 ; CHECK-NEXT: vmv2r.v v12, v8
10945 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
10946 ; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v16, v0.t
10949 tail call void @llvm.riscv.vsuxseg3.mask.nxv8f16.nxv8i32(<vscale x 8 x half> %val,<vscale x 8 x half> %val,<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl)
10953 declare void @llvm.riscv.vsuxseg4.nxv8f16.nxv8i16(<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, ptr, <vscale x 8 x i16>, i32)
10954 declare void @llvm.riscv.vsuxseg4.mask.nxv8f16.nxv8i16(<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, ptr, <vscale x 8 x i16>, <vscale x 8 x i1>, i32)
10956 define void @test_vsuxseg4_nxv8f16_nxv8i16(<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl) {
10957 ; CHECK-LABEL: test_vsuxseg4_nxv8f16_nxv8i16:
10958 ; CHECK: # %bb.0: # %entry
10959 ; CHECK-NEXT: vmv2r.v v12, v8
10960 ; CHECK-NEXT: vmv2r.v v14, v8
10961 ; CHECK-NEXT: vmv2r.v v16, v8
10962 ; CHECK-NEXT: vmv2r.v v18, v8
10963 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
10964 ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10
10967 tail call void @llvm.riscv.vsuxseg4.nxv8f16.nxv8i16(<vscale x 8 x half> %val,<vscale x 8 x half> %val,<vscale x 8 x half> %val,<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl)
10971 define void @test_vsuxseg4_mask_nxv8f16_nxv8i16(<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl) {
10972 ; CHECK-LABEL: test_vsuxseg4_mask_nxv8f16_nxv8i16:
10973 ; CHECK: # %bb.0: # %entry
10974 ; CHECK-NEXT: vmv2r.v v12, v8
10975 ; CHECK-NEXT: vmv2r.v v14, v8
10976 ; CHECK-NEXT: vmv2r.v v16, v8
10977 ; CHECK-NEXT: vmv2r.v v18, v8
10978 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
10979 ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10, v0.t
10982 tail call void @llvm.riscv.vsuxseg4.mask.nxv8f16.nxv8i16(<vscale x 8 x half> %val,<vscale x 8 x half> %val,<vscale x 8 x half> %val,<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl)
10986 declare void @llvm.riscv.vsuxseg4.nxv8f16.nxv8i8(<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, ptr, <vscale x 8 x i8>, i32)
10987 declare void @llvm.riscv.vsuxseg4.mask.nxv8f16.nxv8i8(<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, ptr, <vscale x 8 x i8>, <vscale x 8 x i1>, i32)
10989 define void @test_vsuxseg4_nxv8f16_nxv8i8(<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl) {
10990 ; CHECK-LABEL: test_vsuxseg4_nxv8f16_nxv8i8:
10991 ; CHECK: # %bb.0: # %entry
10992 ; CHECK-NEXT: vmv2r.v v12, v8
10993 ; CHECK-NEXT: vmv2r.v v14, v8
10994 ; CHECK-NEXT: vmv2r.v v16, v8
10995 ; CHECK-NEXT: vmv2r.v v18, v8
10996 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
10997 ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10
11000 tail call void @llvm.riscv.vsuxseg4.nxv8f16.nxv8i8(<vscale x 8 x half> %val,<vscale x 8 x half> %val,<vscale x 8 x half> %val,<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl)
11004 define void @test_vsuxseg4_mask_nxv8f16_nxv8i8(<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl) {
11005 ; CHECK-LABEL: test_vsuxseg4_mask_nxv8f16_nxv8i8:
11006 ; CHECK: # %bb.0: # %entry
11007 ; CHECK-NEXT: vmv2r.v v12, v8
11008 ; CHECK-NEXT: vmv2r.v v14, v8
11009 ; CHECK-NEXT: vmv2r.v v16, v8
11010 ; CHECK-NEXT: vmv2r.v v18, v8
11011 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
11012 ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10, v0.t
11015 tail call void @llvm.riscv.vsuxseg4.mask.nxv8f16.nxv8i8(<vscale x 8 x half> %val,<vscale x 8 x half> %val,<vscale x 8 x half> %val,<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl)
11019 declare void @llvm.riscv.vsuxseg4.nxv8f16.nxv8i32(<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, ptr, <vscale x 8 x i32>, i32)
11020 declare void @llvm.riscv.vsuxseg4.mask.nxv8f16.nxv8i32(<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>,<vscale x 8 x half>, ptr, <vscale x 8 x i32>, <vscale x 8 x i1>, i32)
11022 define void @test_vsuxseg4_nxv8f16_nxv8i32(<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl) {
11023 ; CHECK-LABEL: test_vsuxseg4_nxv8f16_nxv8i32:
11024 ; CHECK: # %bb.0: # %entry
11025 ; CHECK-NEXT: vmv2r.v v16, v8
11026 ; CHECK-NEXT: vmv2r.v v18, v8
11027 ; CHECK-NEXT: vmv2r.v v20, v8
11028 ; CHECK-NEXT: vmv2r.v v22, v8
11029 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
11030 ; CHECK-NEXT: vsuxseg4ei32.v v16, (a0), v12
11033 tail call void @llvm.riscv.vsuxseg4.nxv8f16.nxv8i32(<vscale x 8 x half> %val,<vscale x 8 x half> %val,<vscale x 8 x half> %val,<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl)
11037 define void @test_vsuxseg4_mask_nxv8f16_nxv8i32(<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl) {
11038 ; CHECK-LABEL: test_vsuxseg4_mask_nxv8f16_nxv8i32:
11039 ; CHECK: # %bb.0: # %entry
11040 ; CHECK-NEXT: vmv2r.v v16, v8
11041 ; CHECK-NEXT: vmv2r.v v18, v8
11042 ; CHECK-NEXT: vmv2r.v v20, v8
11043 ; CHECK-NEXT: vmv2r.v v22, v8
11044 ; CHECK-NEXT: vsetvli zero, a1, e16, m2, ta, ma
11045 ; CHECK-NEXT: vsuxseg4ei32.v v16, (a0), v12, v0.t
11048 tail call void @llvm.riscv.vsuxseg4.mask.nxv8f16.nxv8i32(<vscale x 8 x half> %val,<vscale x 8 x half> %val,<vscale x 8 x half> %val,<vscale x 8 x half> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl)
11052 declare void @llvm.riscv.vsuxseg2.nxv8f32.nxv8i16(<vscale x 8 x float>,<vscale x 8 x float>, ptr, <vscale x 8 x i16>, i32)
11053 declare void @llvm.riscv.vsuxseg2.mask.nxv8f32.nxv8i16(<vscale x 8 x float>,<vscale x 8 x float>, ptr, <vscale x 8 x i16>, <vscale x 8 x i1>, i32)
11055 define void @test_vsuxseg2_nxv8f32_nxv8i16(<vscale x 8 x float> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl) {
11056 ; CHECK-LABEL: test_vsuxseg2_nxv8f32_nxv8i16:
11057 ; CHECK: # %bb.0: # %entry
11058 ; CHECK-NEXT: vmv2r.v v16, v12
11059 ; CHECK-NEXT: vmv4r.v v12, v8
11060 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, ma
11061 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v16
11064 tail call void @llvm.riscv.vsuxseg2.nxv8f32.nxv8i16(<vscale x 8 x float> %val,<vscale x 8 x float> %val, ptr %base, <vscale x 8 x i16> %index, i32 %vl)
11068 define void @test_vsuxseg2_mask_nxv8f32_nxv8i16(<vscale x 8 x float> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl) {
11069 ; CHECK-LABEL: test_vsuxseg2_mask_nxv8f32_nxv8i16:
11070 ; CHECK: # %bb.0: # %entry
11071 ; CHECK-NEXT: vmv2r.v v16, v12
11072 ; CHECK-NEXT: vmv4r.v v12, v8
11073 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, ma
11074 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v16, v0.t
11077 tail call void @llvm.riscv.vsuxseg2.mask.nxv8f32.nxv8i16(<vscale x 8 x float> %val,<vscale x 8 x float> %val, ptr %base, <vscale x 8 x i16> %index, <vscale x 8 x i1> %mask, i32 %vl)
11081 declare void @llvm.riscv.vsuxseg2.nxv8f32.nxv8i8(<vscale x 8 x float>,<vscale x 8 x float>, ptr, <vscale x 8 x i8>, i32)
11082 declare void @llvm.riscv.vsuxseg2.mask.nxv8f32.nxv8i8(<vscale x 8 x float>,<vscale x 8 x float>, ptr, <vscale x 8 x i8>, <vscale x 8 x i1>, i32)
11084 define void @test_vsuxseg2_nxv8f32_nxv8i8(<vscale x 8 x float> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl) {
11085 ; CHECK-LABEL: test_vsuxseg2_nxv8f32_nxv8i8:
11086 ; CHECK: # %bb.0: # %entry
11087 ; CHECK-NEXT: vmv1r.v v16, v12
11088 ; CHECK-NEXT: vmv4r.v v12, v8
11089 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, ma
11090 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v16
11093 tail call void @llvm.riscv.vsuxseg2.nxv8f32.nxv8i8(<vscale x 8 x float> %val,<vscale x 8 x float> %val, ptr %base, <vscale x 8 x i8> %index, i32 %vl)
11097 define void @test_vsuxseg2_mask_nxv8f32_nxv8i8(<vscale x 8 x float> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl) {
11098 ; CHECK-LABEL: test_vsuxseg2_mask_nxv8f32_nxv8i8:
11099 ; CHECK: # %bb.0: # %entry
11100 ; CHECK-NEXT: vmv1r.v v16, v12
11101 ; CHECK-NEXT: vmv4r.v v12, v8
11102 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, ma
11103 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v16, v0.t
11106 tail call void @llvm.riscv.vsuxseg2.mask.nxv8f32.nxv8i8(<vscale x 8 x float> %val,<vscale x 8 x float> %val, ptr %base, <vscale x 8 x i8> %index, <vscale x 8 x i1> %mask, i32 %vl)
11110 declare void @llvm.riscv.vsuxseg2.nxv8f32.nxv8i32(<vscale x 8 x float>,<vscale x 8 x float>, ptr, <vscale x 8 x i32>, i32)
11111 declare void @llvm.riscv.vsuxseg2.mask.nxv8f32.nxv8i32(<vscale x 8 x float>,<vscale x 8 x float>, ptr, <vscale x 8 x i32>, <vscale x 8 x i1>, i32)
11113 define void @test_vsuxseg2_nxv8f32_nxv8i32(<vscale x 8 x float> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl) {
11114 ; CHECK-LABEL: test_vsuxseg2_nxv8f32_nxv8i32:
11115 ; CHECK: # %bb.0: # %entry
11116 ; CHECK-NEXT: vmv4r.v v16, v12
11117 ; CHECK-NEXT: vmv4r.v v12, v8
11118 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, ma
11119 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16
11122 tail call void @llvm.riscv.vsuxseg2.nxv8f32.nxv8i32(<vscale x 8 x float> %val,<vscale x 8 x float> %val, ptr %base, <vscale x 8 x i32> %index, i32 %vl)
11126 define void @test_vsuxseg2_mask_nxv8f32_nxv8i32(<vscale x 8 x float> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl) {
11127 ; CHECK-LABEL: test_vsuxseg2_mask_nxv8f32_nxv8i32:
11128 ; CHECK: # %bb.0: # %entry
11129 ; CHECK-NEXT: vmv4r.v v16, v12
11130 ; CHECK-NEXT: vmv4r.v v12, v8
11131 ; CHECK-NEXT: vsetvli zero, a1, e32, m4, ta, ma
11132 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16, v0.t
11135 tail call void @llvm.riscv.vsuxseg2.mask.nxv8f32.nxv8i32(<vscale x 8 x float> %val,<vscale x 8 x float> %val, ptr %base, <vscale x 8 x i32> %index, <vscale x 8 x i1> %mask, i32 %vl)
11139 declare void @llvm.riscv.vsuxseg2.nxv2f64.nxv2i32(<vscale x 2 x double>,<vscale x 2 x double>, ptr, <vscale x 2 x i32>, i32)
11140 declare void @llvm.riscv.vsuxseg2.mask.nxv2f64.nxv2i32(<vscale x 2 x double>,<vscale x 2 x double>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
11142 define void @test_vsuxseg2_nxv2f64_nxv2i32(<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
11143 ; CHECK-LABEL: test_vsuxseg2_nxv2f64_nxv2i32:
11144 ; CHECK: # %bb.0: # %entry
11145 ; CHECK-NEXT: vmv1r.v v12, v10
11146 ; CHECK-NEXT: vmv2r.v v10, v8
11147 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
11148 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12
11151 tail call void @llvm.riscv.vsuxseg2.nxv2f64.nxv2i32(<vscale x 2 x double> %val,<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
11155 define void @test_vsuxseg2_mask_nxv2f64_nxv2i32(<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
11156 ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f64_nxv2i32:
11157 ; CHECK: # %bb.0: # %entry
11158 ; CHECK-NEXT: vmv1r.v v12, v10
11159 ; CHECK-NEXT: vmv2r.v v10, v8
11160 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
11161 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12, v0.t
11164 tail call void @llvm.riscv.vsuxseg2.mask.nxv2f64.nxv2i32(<vscale x 2 x double> %val,<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
11168 declare void @llvm.riscv.vsuxseg2.nxv2f64.nxv2i8(<vscale x 2 x double>,<vscale x 2 x double>, ptr, <vscale x 2 x i8>, i32)
11169 declare void @llvm.riscv.vsuxseg2.mask.nxv2f64.nxv2i8(<vscale x 2 x double>,<vscale x 2 x double>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
11171 define void @test_vsuxseg2_nxv2f64_nxv2i8(<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
11172 ; CHECK-LABEL: test_vsuxseg2_nxv2f64_nxv2i8:
11173 ; CHECK: # %bb.0: # %entry
11174 ; CHECK-NEXT: vmv1r.v v12, v10
11175 ; CHECK-NEXT: vmv2r.v v10, v8
11176 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
11177 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v12
11180 tail call void @llvm.riscv.vsuxseg2.nxv2f64.nxv2i8(<vscale x 2 x double> %val,<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
11184 define void @test_vsuxseg2_mask_nxv2f64_nxv2i8(<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
11185 ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f64_nxv2i8:
11186 ; CHECK: # %bb.0: # %entry
11187 ; CHECK-NEXT: vmv1r.v v12, v10
11188 ; CHECK-NEXT: vmv2r.v v10, v8
11189 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
11190 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v12, v0.t
11193 tail call void @llvm.riscv.vsuxseg2.mask.nxv2f64.nxv2i8(<vscale x 2 x double> %val,<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
11197 declare void @llvm.riscv.vsuxseg2.nxv2f64.nxv2i16(<vscale x 2 x double>,<vscale x 2 x double>, ptr, <vscale x 2 x i16>, i32)
11198 declare void @llvm.riscv.vsuxseg2.mask.nxv2f64.nxv2i16(<vscale x 2 x double>,<vscale x 2 x double>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
11200 define void @test_vsuxseg2_nxv2f64_nxv2i16(<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
11201 ; CHECK-LABEL: test_vsuxseg2_nxv2f64_nxv2i16:
11202 ; CHECK: # %bb.0: # %entry
11203 ; CHECK-NEXT: vmv1r.v v12, v10
11204 ; CHECK-NEXT: vmv2r.v v10, v8
11205 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
11206 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v12
11209 tail call void @llvm.riscv.vsuxseg2.nxv2f64.nxv2i16(<vscale x 2 x double> %val,<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
11213 define void @test_vsuxseg2_mask_nxv2f64_nxv2i16(<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
11214 ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f64_nxv2i16:
11215 ; CHECK: # %bb.0: # %entry
11216 ; CHECK-NEXT: vmv1r.v v12, v10
11217 ; CHECK-NEXT: vmv2r.v v10, v8
11218 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
11219 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v12, v0.t
11222 tail call void @llvm.riscv.vsuxseg2.mask.nxv2f64.nxv2i16(<vscale x 2 x double> %val,<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
11226 declare void @llvm.riscv.vsuxseg3.nxv2f64.nxv2i32(<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, ptr, <vscale x 2 x i32>, i32)
11227 declare void @llvm.riscv.vsuxseg3.mask.nxv2f64.nxv2i32(<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
11229 define void @test_vsuxseg3_nxv2f64_nxv2i32(<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
11230 ; CHECK-LABEL: test_vsuxseg3_nxv2f64_nxv2i32:
11231 ; CHECK: # %bb.0: # %entry
11232 ; CHECK-NEXT: vmv2r.v v12, v8
11233 ; CHECK-NEXT: vmv2r.v v14, v8
11234 ; CHECK-NEXT: vmv2r.v v16, v8
11235 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
11236 ; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10
11239 tail call void @llvm.riscv.vsuxseg3.nxv2f64.nxv2i32(<vscale x 2 x double> %val,<vscale x 2 x double> %val,<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
11243 define void @test_vsuxseg3_mask_nxv2f64_nxv2i32(<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
11244 ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f64_nxv2i32:
11245 ; CHECK: # %bb.0: # %entry
11246 ; CHECK-NEXT: vmv2r.v v12, v8
11247 ; CHECK-NEXT: vmv2r.v v14, v8
11248 ; CHECK-NEXT: vmv2r.v v16, v8
11249 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
11250 ; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10, v0.t
11253 tail call void @llvm.riscv.vsuxseg3.mask.nxv2f64.nxv2i32(<vscale x 2 x double> %val,<vscale x 2 x double> %val,<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
11257 declare void @llvm.riscv.vsuxseg3.nxv2f64.nxv2i8(<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, ptr, <vscale x 2 x i8>, i32)
11258 declare void @llvm.riscv.vsuxseg3.mask.nxv2f64.nxv2i8(<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
11260 define void @test_vsuxseg3_nxv2f64_nxv2i8(<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
11261 ; CHECK-LABEL: test_vsuxseg3_nxv2f64_nxv2i8:
11262 ; CHECK: # %bb.0: # %entry
11263 ; CHECK-NEXT: vmv2r.v v12, v8
11264 ; CHECK-NEXT: vmv2r.v v14, v8
11265 ; CHECK-NEXT: vmv2r.v v16, v8
11266 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
11267 ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10
11270 tail call void @llvm.riscv.vsuxseg3.nxv2f64.nxv2i8(<vscale x 2 x double> %val,<vscale x 2 x double> %val,<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
11274 define void @test_vsuxseg3_mask_nxv2f64_nxv2i8(<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
11275 ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f64_nxv2i8:
11276 ; CHECK: # %bb.0: # %entry
11277 ; CHECK-NEXT: vmv2r.v v12, v8
11278 ; CHECK-NEXT: vmv2r.v v14, v8
11279 ; CHECK-NEXT: vmv2r.v v16, v8
11280 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
11281 ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10, v0.t
11284 tail call void @llvm.riscv.vsuxseg3.mask.nxv2f64.nxv2i8(<vscale x 2 x double> %val,<vscale x 2 x double> %val,<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
11288 declare void @llvm.riscv.vsuxseg3.nxv2f64.nxv2i16(<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, ptr, <vscale x 2 x i16>, i32)
11289 declare void @llvm.riscv.vsuxseg3.mask.nxv2f64.nxv2i16(<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
11291 define void @test_vsuxseg3_nxv2f64_nxv2i16(<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
11292 ; CHECK-LABEL: test_vsuxseg3_nxv2f64_nxv2i16:
11293 ; CHECK: # %bb.0: # %entry
11294 ; CHECK-NEXT: vmv2r.v v12, v8
11295 ; CHECK-NEXT: vmv2r.v v14, v8
11296 ; CHECK-NEXT: vmv2r.v v16, v8
11297 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
11298 ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10
11301 tail call void @llvm.riscv.vsuxseg3.nxv2f64.nxv2i16(<vscale x 2 x double> %val,<vscale x 2 x double> %val,<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
11305 define void @test_vsuxseg3_mask_nxv2f64_nxv2i16(<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
11306 ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f64_nxv2i16:
11307 ; CHECK: # %bb.0: # %entry
11308 ; CHECK-NEXT: vmv2r.v v12, v8
11309 ; CHECK-NEXT: vmv2r.v v14, v8
11310 ; CHECK-NEXT: vmv2r.v v16, v8
11311 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
11312 ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10, v0.t
11315 tail call void @llvm.riscv.vsuxseg3.mask.nxv2f64.nxv2i16(<vscale x 2 x double> %val,<vscale x 2 x double> %val,<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
11319 declare void @llvm.riscv.vsuxseg4.nxv2f64.nxv2i32(<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, ptr, <vscale x 2 x i32>, i32)
11320 declare void @llvm.riscv.vsuxseg4.mask.nxv2f64.nxv2i32(<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
11322 define void @test_vsuxseg4_nxv2f64_nxv2i32(<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
11323 ; CHECK-LABEL: test_vsuxseg4_nxv2f64_nxv2i32:
11324 ; CHECK: # %bb.0: # %entry
11325 ; CHECK-NEXT: vmv2r.v v12, v8
11326 ; CHECK-NEXT: vmv2r.v v14, v8
11327 ; CHECK-NEXT: vmv2r.v v16, v8
11328 ; CHECK-NEXT: vmv2r.v v18, v8
11329 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
11330 ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10
11333 tail call void @llvm.riscv.vsuxseg4.nxv2f64.nxv2i32(<vscale x 2 x double> %val,<vscale x 2 x double> %val,<vscale x 2 x double> %val,<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
11337 define void @test_vsuxseg4_mask_nxv2f64_nxv2i32(<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
11338 ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f64_nxv2i32:
11339 ; CHECK: # %bb.0: # %entry
11340 ; CHECK-NEXT: vmv2r.v v12, v8
11341 ; CHECK-NEXT: vmv2r.v v14, v8
11342 ; CHECK-NEXT: vmv2r.v v16, v8
11343 ; CHECK-NEXT: vmv2r.v v18, v8
11344 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
11345 ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10, v0.t
11348 tail call void @llvm.riscv.vsuxseg4.mask.nxv2f64.nxv2i32(<vscale x 2 x double> %val,<vscale x 2 x double> %val,<vscale x 2 x double> %val,<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
11352 declare void @llvm.riscv.vsuxseg4.nxv2f64.nxv2i8(<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, ptr, <vscale x 2 x i8>, i32)
11353 declare void @llvm.riscv.vsuxseg4.mask.nxv2f64.nxv2i8(<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
11355 define void @test_vsuxseg4_nxv2f64_nxv2i8(<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
11356 ; CHECK-LABEL: test_vsuxseg4_nxv2f64_nxv2i8:
11357 ; CHECK: # %bb.0: # %entry
11358 ; CHECK-NEXT: vmv2r.v v12, v8
11359 ; CHECK-NEXT: vmv2r.v v14, v8
11360 ; CHECK-NEXT: vmv2r.v v16, v8
11361 ; CHECK-NEXT: vmv2r.v v18, v8
11362 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
11363 ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10
11366 tail call void @llvm.riscv.vsuxseg4.nxv2f64.nxv2i8(<vscale x 2 x double> %val,<vscale x 2 x double> %val,<vscale x 2 x double> %val,<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
11370 define void @test_vsuxseg4_mask_nxv2f64_nxv2i8(<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
11371 ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f64_nxv2i8:
11372 ; CHECK: # %bb.0: # %entry
11373 ; CHECK-NEXT: vmv2r.v v12, v8
11374 ; CHECK-NEXT: vmv2r.v v14, v8
11375 ; CHECK-NEXT: vmv2r.v v16, v8
11376 ; CHECK-NEXT: vmv2r.v v18, v8
11377 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
11378 ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10, v0.t
11381 tail call void @llvm.riscv.vsuxseg4.mask.nxv2f64.nxv2i8(<vscale x 2 x double> %val,<vscale x 2 x double> %val,<vscale x 2 x double> %val,<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
11385 declare void @llvm.riscv.vsuxseg4.nxv2f64.nxv2i16(<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, ptr, <vscale x 2 x i16>, i32)
11386 declare void @llvm.riscv.vsuxseg4.mask.nxv2f64.nxv2i16(<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>,<vscale x 2 x double>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
11388 define void @test_vsuxseg4_nxv2f64_nxv2i16(<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
11389 ; CHECK-LABEL: test_vsuxseg4_nxv2f64_nxv2i16:
11390 ; CHECK: # %bb.0: # %entry
11391 ; CHECK-NEXT: vmv2r.v v12, v8
11392 ; CHECK-NEXT: vmv2r.v v14, v8
11393 ; CHECK-NEXT: vmv2r.v v16, v8
11394 ; CHECK-NEXT: vmv2r.v v18, v8
11395 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
11396 ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10
11399 tail call void @llvm.riscv.vsuxseg4.nxv2f64.nxv2i16(<vscale x 2 x double> %val,<vscale x 2 x double> %val,<vscale x 2 x double> %val,<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
11403 define void @test_vsuxseg4_mask_nxv2f64_nxv2i16(<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
11404 ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f64_nxv2i16:
11405 ; CHECK: # %bb.0: # %entry
11406 ; CHECK-NEXT: vmv2r.v v12, v8
11407 ; CHECK-NEXT: vmv2r.v v14, v8
11408 ; CHECK-NEXT: vmv2r.v v16, v8
11409 ; CHECK-NEXT: vmv2r.v v18, v8
11410 ; CHECK-NEXT: vsetvli zero, a1, e64, m2, ta, ma
11411 ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10, v0.t
11414 tail call void @llvm.riscv.vsuxseg4.mask.nxv2f64.nxv2i16(<vscale x 2 x double> %val,<vscale x 2 x double> %val,<vscale x 2 x double> %val,<vscale x 2 x double> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
11418 declare void @llvm.riscv.vsuxseg2.nxv4f16.nxv4i16(<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i16>, i32)
11419 declare void @llvm.riscv.vsuxseg2.mask.nxv4f16.nxv4i16(<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
11421 define void @test_vsuxseg2_nxv4f16_nxv4i16(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
11422 ; CHECK-LABEL: test_vsuxseg2_nxv4f16_nxv4i16:
11423 ; CHECK: # %bb.0: # %entry
11424 ; CHECK-NEXT: vmv1r.v v10, v9
11425 ; CHECK-NEXT: vmv1r.v v9, v8
11426 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11427 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10
11430 tail call void @llvm.riscv.vsuxseg2.nxv4f16.nxv4i16(<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
11434 define void @test_vsuxseg2_mask_nxv4f16_nxv4i16(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
11435 ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f16_nxv4i16:
11436 ; CHECK: # %bb.0: # %entry
11437 ; CHECK-NEXT: vmv1r.v v10, v9
11438 ; CHECK-NEXT: vmv1r.v v9, v8
11439 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11440 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10, v0.t
11443 tail call void @llvm.riscv.vsuxseg2.mask.nxv4f16.nxv4i16(<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
11447 declare void @llvm.riscv.vsuxseg2.nxv4f16.nxv4i8(<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i8>, i32)
11448 declare void @llvm.riscv.vsuxseg2.mask.nxv4f16.nxv4i8(<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
11450 define void @test_vsuxseg2_nxv4f16_nxv4i8(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
11451 ; CHECK-LABEL: test_vsuxseg2_nxv4f16_nxv4i8:
11452 ; CHECK: # %bb.0: # %entry
11453 ; CHECK-NEXT: vmv1r.v v10, v9
11454 ; CHECK-NEXT: vmv1r.v v9, v8
11455 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11456 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10
11459 tail call void @llvm.riscv.vsuxseg2.nxv4f16.nxv4i8(<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
11463 define void @test_vsuxseg2_mask_nxv4f16_nxv4i8(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
11464 ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f16_nxv4i8:
11465 ; CHECK: # %bb.0: # %entry
11466 ; CHECK-NEXT: vmv1r.v v10, v9
11467 ; CHECK-NEXT: vmv1r.v v9, v8
11468 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11469 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10, v0.t
11472 tail call void @llvm.riscv.vsuxseg2.mask.nxv4f16.nxv4i8(<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
11476 declare void @llvm.riscv.vsuxseg2.nxv4f16.nxv4i32(<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i32>, i32)
11477 declare void @llvm.riscv.vsuxseg2.mask.nxv4f16.nxv4i32(<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
11479 define void @test_vsuxseg2_nxv4f16_nxv4i32(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
11480 ; CHECK-LABEL: test_vsuxseg2_nxv4f16_nxv4i32:
11481 ; CHECK: # %bb.0: # %entry
11482 ; CHECK-NEXT: vmv1r.v v9, v8
11483 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11484 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10
11487 tail call void @llvm.riscv.vsuxseg2.nxv4f16.nxv4i32(<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
11491 define void @test_vsuxseg2_mask_nxv4f16_nxv4i32(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
11492 ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f16_nxv4i32:
11493 ; CHECK: # %bb.0: # %entry
11494 ; CHECK-NEXT: vmv1r.v v9, v8
11495 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11496 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10, v0.t
11499 tail call void @llvm.riscv.vsuxseg2.mask.nxv4f16.nxv4i32(<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
11503 declare void @llvm.riscv.vsuxseg3.nxv4f16.nxv4i16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i16>, i32)
11504 declare void @llvm.riscv.vsuxseg3.mask.nxv4f16.nxv4i16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
11506 define void @test_vsuxseg3_nxv4f16_nxv4i16(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
11507 ; CHECK-LABEL: test_vsuxseg3_nxv4f16_nxv4i16:
11508 ; CHECK: # %bb.0: # %entry
11509 ; CHECK-NEXT: vmv1r.v v10, v8
11510 ; CHECK-NEXT: vmv1r.v v11, v8
11511 ; CHECK-NEXT: vmv1r.v v12, v8
11512 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11513 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9
11516 tail call void @llvm.riscv.vsuxseg3.nxv4f16.nxv4i16(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
11520 define void @test_vsuxseg3_mask_nxv4f16_nxv4i16(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
11521 ; CHECK-LABEL: test_vsuxseg3_mask_nxv4f16_nxv4i16:
11522 ; CHECK: # %bb.0: # %entry
11523 ; CHECK-NEXT: vmv1r.v v10, v8
11524 ; CHECK-NEXT: vmv1r.v v11, v8
11525 ; CHECK-NEXT: vmv1r.v v12, v8
11526 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11527 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t
11530 tail call void @llvm.riscv.vsuxseg3.mask.nxv4f16.nxv4i16(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
11534 declare void @llvm.riscv.vsuxseg3.nxv4f16.nxv4i8(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i8>, i32)
11535 declare void @llvm.riscv.vsuxseg3.mask.nxv4f16.nxv4i8(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
11537 define void @test_vsuxseg3_nxv4f16_nxv4i8(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
11538 ; CHECK-LABEL: test_vsuxseg3_nxv4f16_nxv4i8:
11539 ; CHECK: # %bb.0: # %entry
11540 ; CHECK-NEXT: vmv1r.v v10, v8
11541 ; CHECK-NEXT: vmv1r.v v11, v8
11542 ; CHECK-NEXT: vmv1r.v v12, v8
11543 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11544 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9
11547 tail call void @llvm.riscv.vsuxseg3.nxv4f16.nxv4i8(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
11551 define void @test_vsuxseg3_mask_nxv4f16_nxv4i8(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
11552 ; CHECK-LABEL: test_vsuxseg3_mask_nxv4f16_nxv4i8:
11553 ; CHECK: # %bb.0: # %entry
11554 ; CHECK-NEXT: vmv1r.v v10, v8
11555 ; CHECK-NEXT: vmv1r.v v11, v8
11556 ; CHECK-NEXT: vmv1r.v v12, v8
11557 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11558 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t
11561 tail call void @llvm.riscv.vsuxseg3.mask.nxv4f16.nxv4i8(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
11565 declare void @llvm.riscv.vsuxseg3.nxv4f16.nxv4i32(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i32>, i32)
11566 declare void @llvm.riscv.vsuxseg3.mask.nxv4f16.nxv4i32(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
11568 define void @test_vsuxseg3_nxv4f16_nxv4i32(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
11569 ; CHECK-LABEL: test_vsuxseg3_nxv4f16_nxv4i32:
11570 ; CHECK: # %bb.0: # %entry
11571 ; CHECK-NEXT: vmv1r.v v9, v8
11572 ; CHECK-NEXT: vmv2r.v v12, v10
11573 ; CHECK-NEXT: vmv1r.v v10, v8
11574 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11575 ; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v12
11578 tail call void @llvm.riscv.vsuxseg3.nxv4f16.nxv4i32(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
11582 define void @test_vsuxseg3_mask_nxv4f16_nxv4i32(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
11583 ; CHECK-LABEL: test_vsuxseg3_mask_nxv4f16_nxv4i32:
11584 ; CHECK: # %bb.0: # %entry
11585 ; CHECK-NEXT: vmv1r.v v9, v8
11586 ; CHECK-NEXT: vmv2r.v v12, v10
11587 ; CHECK-NEXT: vmv1r.v v10, v8
11588 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11589 ; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v12, v0.t
11592 tail call void @llvm.riscv.vsuxseg3.mask.nxv4f16.nxv4i32(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
11596 declare void @llvm.riscv.vsuxseg4.nxv4f16.nxv4i16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i16>, i32)
11597 declare void @llvm.riscv.vsuxseg4.mask.nxv4f16.nxv4i16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
11599 define void @test_vsuxseg4_nxv4f16_nxv4i16(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
11600 ; CHECK-LABEL: test_vsuxseg4_nxv4f16_nxv4i16:
11601 ; CHECK: # %bb.0: # %entry
11602 ; CHECK-NEXT: vmv1r.v v10, v8
11603 ; CHECK-NEXT: vmv1r.v v11, v8
11604 ; CHECK-NEXT: vmv1r.v v12, v8
11605 ; CHECK-NEXT: vmv1r.v v13, v8
11606 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11607 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9
11610 tail call void @llvm.riscv.vsuxseg4.nxv4f16.nxv4i16(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
11614 define void @test_vsuxseg4_mask_nxv4f16_nxv4i16(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
11615 ; CHECK-LABEL: test_vsuxseg4_mask_nxv4f16_nxv4i16:
11616 ; CHECK: # %bb.0: # %entry
11617 ; CHECK-NEXT: vmv1r.v v10, v8
11618 ; CHECK-NEXT: vmv1r.v v11, v8
11619 ; CHECK-NEXT: vmv1r.v v12, v8
11620 ; CHECK-NEXT: vmv1r.v v13, v8
11621 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11622 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t
11625 tail call void @llvm.riscv.vsuxseg4.mask.nxv4f16.nxv4i16(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
11629 declare void @llvm.riscv.vsuxseg4.nxv4f16.nxv4i8(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i8>, i32)
11630 declare void @llvm.riscv.vsuxseg4.mask.nxv4f16.nxv4i8(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
11632 define void @test_vsuxseg4_nxv4f16_nxv4i8(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
11633 ; CHECK-LABEL: test_vsuxseg4_nxv4f16_nxv4i8:
11634 ; CHECK: # %bb.0: # %entry
11635 ; CHECK-NEXT: vmv1r.v v10, v8
11636 ; CHECK-NEXT: vmv1r.v v11, v8
11637 ; CHECK-NEXT: vmv1r.v v12, v8
11638 ; CHECK-NEXT: vmv1r.v v13, v8
11639 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11640 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9
11643 tail call void @llvm.riscv.vsuxseg4.nxv4f16.nxv4i8(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
11647 define void @test_vsuxseg4_mask_nxv4f16_nxv4i8(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
11648 ; CHECK-LABEL: test_vsuxseg4_mask_nxv4f16_nxv4i8:
11649 ; CHECK: # %bb.0: # %entry
11650 ; CHECK-NEXT: vmv1r.v v10, v8
11651 ; CHECK-NEXT: vmv1r.v v11, v8
11652 ; CHECK-NEXT: vmv1r.v v12, v8
11653 ; CHECK-NEXT: vmv1r.v v13, v8
11654 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11655 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t
11658 tail call void @llvm.riscv.vsuxseg4.mask.nxv4f16.nxv4i8(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
11662 declare void @llvm.riscv.vsuxseg4.nxv4f16.nxv4i32(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i32>, i32)
11663 declare void @llvm.riscv.vsuxseg4.mask.nxv4f16.nxv4i32(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
11665 define void @test_vsuxseg4_nxv4f16_nxv4i32(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
11666 ; CHECK-LABEL: test_vsuxseg4_nxv4f16_nxv4i32:
11667 ; CHECK: # %bb.0: # %entry
11668 ; CHECK-NEXT: vmv1r.v v12, v8
11669 ; CHECK-NEXT: vmv1r.v v13, v8
11670 ; CHECK-NEXT: vmv1r.v v14, v8
11671 ; CHECK-NEXT: vmv1r.v v15, v8
11672 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11673 ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10
11676 tail call void @llvm.riscv.vsuxseg4.nxv4f16.nxv4i32(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
11680 define void @test_vsuxseg4_mask_nxv4f16_nxv4i32(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
11681 ; CHECK-LABEL: test_vsuxseg4_mask_nxv4f16_nxv4i32:
11682 ; CHECK: # %bb.0: # %entry
11683 ; CHECK-NEXT: vmv1r.v v12, v8
11684 ; CHECK-NEXT: vmv1r.v v13, v8
11685 ; CHECK-NEXT: vmv1r.v v14, v8
11686 ; CHECK-NEXT: vmv1r.v v15, v8
11687 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11688 ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10, v0.t
11691 tail call void @llvm.riscv.vsuxseg4.mask.nxv4f16.nxv4i32(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
11695 declare void @llvm.riscv.vsuxseg5.nxv4f16.nxv4i16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i16>, i32)
11696 declare void @llvm.riscv.vsuxseg5.mask.nxv4f16.nxv4i16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
11698 define void @test_vsuxseg5_nxv4f16_nxv4i16(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
11699 ; CHECK-LABEL: test_vsuxseg5_nxv4f16_nxv4i16:
11700 ; CHECK: # %bb.0: # %entry
11701 ; CHECK-NEXT: vmv1r.v v10, v8
11702 ; CHECK-NEXT: vmv1r.v v11, v8
11703 ; CHECK-NEXT: vmv1r.v v12, v8
11704 ; CHECK-NEXT: vmv1r.v v13, v8
11705 ; CHECK-NEXT: vmv1r.v v14, v8
11706 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11707 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9
11710 tail call void @llvm.riscv.vsuxseg5.nxv4f16.nxv4i16(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
11714 define void @test_vsuxseg5_mask_nxv4f16_nxv4i16(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
11715 ; CHECK-LABEL: test_vsuxseg5_mask_nxv4f16_nxv4i16:
11716 ; CHECK: # %bb.0: # %entry
11717 ; CHECK-NEXT: vmv1r.v v10, v8
11718 ; CHECK-NEXT: vmv1r.v v11, v8
11719 ; CHECK-NEXT: vmv1r.v v12, v8
11720 ; CHECK-NEXT: vmv1r.v v13, v8
11721 ; CHECK-NEXT: vmv1r.v v14, v8
11722 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11723 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t
11726 tail call void @llvm.riscv.vsuxseg5.mask.nxv4f16.nxv4i16(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
11730 declare void @llvm.riscv.vsuxseg5.nxv4f16.nxv4i8(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i8>, i32)
11731 declare void @llvm.riscv.vsuxseg5.mask.nxv4f16.nxv4i8(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
11733 define void @test_vsuxseg5_nxv4f16_nxv4i8(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
11734 ; CHECK-LABEL: test_vsuxseg5_nxv4f16_nxv4i8:
11735 ; CHECK: # %bb.0: # %entry
11736 ; CHECK-NEXT: vmv1r.v v10, v8
11737 ; CHECK-NEXT: vmv1r.v v11, v8
11738 ; CHECK-NEXT: vmv1r.v v12, v8
11739 ; CHECK-NEXT: vmv1r.v v13, v8
11740 ; CHECK-NEXT: vmv1r.v v14, v8
11741 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11742 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9
11745 tail call void @llvm.riscv.vsuxseg5.nxv4f16.nxv4i8(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
11749 define void @test_vsuxseg5_mask_nxv4f16_nxv4i8(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
11750 ; CHECK-LABEL: test_vsuxseg5_mask_nxv4f16_nxv4i8:
11751 ; CHECK: # %bb.0: # %entry
11752 ; CHECK-NEXT: vmv1r.v v10, v8
11753 ; CHECK-NEXT: vmv1r.v v11, v8
11754 ; CHECK-NEXT: vmv1r.v v12, v8
11755 ; CHECK-NEXT: vmv1r.v v13, v8
11756 ; CHECK-NEXT: vmv1r.v v14, v8
11757 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11758 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t
11761 tail call void @llvm.riscv.vsuxseg5.mask.nxv4f16.nxv4i8(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
11765 declare void @llvm.riscv.vsuxseg5.nxv4f16.nxv4i32(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i32>, i32)
11766 declare void @llvm.riscv.vsuxseg5.mask.nxv4f16.nxv4i32(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
11768 define void @test_vsuxseg5_nxv4f16_nxv4i32(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
11769 ; CHECK-LABEL: test_vsuxseg5_nxv4f16_nxv4i32:
11770 ; CHECK: # %bb.0: # %entry
11771 ; CHECK-NEXT: vmv1r.v v12, v8
11772 ; CHECK-NEXT: vmv1r.v v13, v8
11773 ; CHECK-NEXT: vmv1r.v v14, v8
11774 ; CHECK-NEXT: vmv1r.v v15, v8
11775 ; CHECK-NEXT: vmv1r.v v16, v8
11776 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11777 ; CHECK-NEXT: vsuxseg5ei32.v v12, (a0), v10
11780 tail call void @llvm.riscv.vsuxseg5.nxv4f16.nxv4i32(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
11784 define void @test_vsuxseg5_mask_nxv4f16_nxv4i32(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
11785 ; CHECK-LABEL: test_vsuxseg5_mask_nxv4f16_nxv4i32:
11786 ; CHECK: # %bb.0: # %entry
11787 ; CHECK-NEXT: vmv1r.v v12, v8
11788 ; CHECK-NEXT: vmv1r.v v13, v8
11789 ; CHECK-NEXT: vmv1r.v v14, v8
11790 ; CHECK-NEXT: vmv1r.v v15, v8
11791 ; CHECK-NEXT: vmv1r.v v16, v8
11792 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11793 ; CHECK-NEXT: vsuxseg5ei32.v v12, (a0), v10, v0.t
11796 tail call void @llvm.riscv.vsuxseg5.mask.nxv4f16.nxv4i32(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
11800 declare void @llvm.riscv.vsuxseg6.nxv4f16.nxv4i16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i16>, i32)
11801 declare void @llvm.riscv.vsuxseg6.mask.nxv4f16.nxv4i16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
11803 define void @test_vsuxseg6_nxv4f16_nxv4i16(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
11804 ; CHECK-LABEL: test_vsuxseg6_nxv4f16_nxv4i16:
11805 ; CHECK: # %bb.0: # %entry
11806 ; CHECK-NEXT: vmv1r.v v10, v8
11807 ; CHECK-NEXT: vmv1r.v v11, v8
11808 ; CHECK-NEXT: vmv1r.v v12, v8
11809 ; CHECK-NEXT: vmv1r.v v13, v8
11810 ; CHECK-NEXT: vmv1r.v v14, v8
11811 ; CHECK-NEXT: vmv1r.v v15, v8
11812 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11813 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9
11816 tail call void @llvm.riscv.vsuxseg6.nxv4f16.nxv4i16(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
11820 define void @test_vsuxseg6_mask_nxv4f16_nxv4i16(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
11821 ; CHECK-LABEL: test_vsuxseg6_mask_nxv4f16_nxv4i16:
11822 ; CHECK: # %bb.0: # %entry
11823 ; CHECK-NEXT: vmv1r.v v10, v8
11824 ; CHECK-NEXT: vmv1r.v v11, v8
11825 ; CHECK-NEXT: vmv1r.v v12, v8
11826 ; CHECK-NEXT: vmv1r.v v13, v8
11827 ; CHECK-NEXT: vmv1r.v v14, v8
11828 ; CHECK-NEXT: vmv1r.v v15, v8
11829 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11830 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t
11833 tail call void @llvm.riscv.vsuxseg6.mask.nxv4f16.nxv4i16(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
11837 declare void @llvm.riscv.vsuxseg6.nxv4f16.nxv4i8(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i8>, i32)
11838 declare void @llvm.riscv.vsuxseg6.mask.nxv4f16.nxv4i8(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
11840 define void @test_vsuxseg6_nxv4f16_nxv4i8(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
11841 ; CHECK-LABEL: test_vsuxseg6_nxv4f16_nxv4i8:
11842 ; CHECK: # %bb.0: # %entry
11843 ; CHECK-NEXT: vmv1r.v v10, v8
11844 ; CHECK-NEXT: vmv1r.v v11, v8
11845 ; CHECK-NEXT: vmv1r.v v12, v8
11846 ; CHECK-NEXT: vmv1r.v v13, v8
11847 ; CHECK-NEXT: vmv1r.v v14, v8
11848 ; CHECK-NEXT: vmv1r.v v15, v8
11849 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11850 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9
11853 tail call void @llvm.riscv.vsuxseg6.nxv4f16.nxv4i8(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
11857 define void @test_vsuxseg6_mask_nxv4f16_nxv4i8(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
11858 ; CHECK-LABEL: test_vsuxseg6_mask_nxv4f16_nxv4i8:
11859 ; CHECK: # %bb.0: # %entry
11860 ; CHECK-NEXT: vmv1r.v v10, v8
11861 ; CHECK-NEXT: vmv1r.v v11, v8
11862 ; CHECK-NEXT: vmv1r.v v12, v8
11863 ; CHECK-NEXT: vmv1r.v v13, v8
11864 ; CHECK-NEXT: vmv1r.v v14, v8
11865 ; CHECK-NEXT: vmv1r.v v15, v8
11866 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11867 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t
11870 tail call void @llvm.riscv.vsuxseg6.mask.nxv4f16.nxv4i8(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
11874 declare void @llvm.riscv.vsuxseg6.nxv4f16.nxv4i32(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i32>, i32)
11875 declare void @llvm.riscv.vsuxseg6.mask.nxv4f16.nxv4i32(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
11877 define void @test_vsuxseg6_nxv4f16_nxv4i32(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
11878 ; CHECK-LABEL: test_vsuxseg6_nxv4f16_nxv4i32:
11879 ; CHECK: # %bb.0: # %entry
11880 ; CHECK-NEXT: vmv1r.v v12, v8
11881 ; CHECK-NEXT: vmv1r.v v13, v8
11882 ; CHECK-NEXT: vmv1r.v v14, v8
11883 ; CHECK-NEXT: vmv1r.v v15, v8
11884 ; CHECK-NEXT: vmv1r.v v16, v8
11885 ; CHECK-NEXT: vmv1r.v v17, v8
11886 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11887 ; CHECK-NEXT: vsuxseg6ei32.v v12, (a0), v10
11890 tail call void @llvm.riscv.vsuxseg6.nxv4f16.nxv4i32(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
11894 define void @test_vsuxseg6_mask_nxv4f16_nxv4i32(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
11895 ; CHECK-LABEL: test_vsuxseg6_mask_nxv4f16_nxv4i32:
11896 ; CHECK: # %bb.0: # %entry
11897 ; CHECK-NEXT: vmv1r.v v12, v8
11898 ; CHECK-NEXT: vmv1r.v v13, v8
11899 ; CHECK-NEXT: vmv1r.v v14, v8
11900 ; CHECK-NEXT: vmv1r.v v15, v8
11901 ; CHECK-NEXT: vmv1r.v v16, v8
11902 ; CHECK-NEXT: vmv1r.v v17, v8
11903 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11904 ; CHECK-NEXT: vsuxseg6ei32.v v12, (a0), v10, v0.t
11907 tail call void @llvm.riscv.vsuxseg6.mask.nxv4f16.nxv4i32(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
11911 declare void @llvm.riscv.vsuxseg7.nxv4f16.nxv4i16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i16>, i32)
11912 declare void @llvm.riscv.vsuxseg7.mask.nxv4f16.nxv4i16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
11914 define void @test_vsuxseg7_nxv4f16_nxv4i16(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
11915 ; CHECK-LABEL: test_vsuxseg7_nxv4f16_nxv4i16:
11916 ; CHECK: # %bb.0: # %entry
11917 ; CHECK-NEXT: vmv1r.v v10, v8
11918 ; CHECK-NEXT: vmv1r.v v11, v8
11919 ; CHECK-NEXT: vmv1r.v v12, v8
11920 ; CHECK-NEXT: vmv1r.v v13, v8
11921 ; CHECK-NEXT: vmv1r.v v14, v8
11922 ; CHECK-NEXT: vmv1r.v v15, v8
11923 ; CHECK-NEXT: vmv1r.v v16, v8
11924 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11925 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9
11928 tail call void @llvm.riscv.vsuxseg7.nxv4f16.nxv4i16(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
11932 define void @test_vsuxseg7_mask_nxv4f16_nxv4i16(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
11933 ; CHECK-LABEL: test_vsuxseg7_mask_nxv4f16_nxv4i16:
11934 ; CHECK: # %bb.0: # %entry
11935 ; CHECK-NEXT: vmv1r.v v10, v8
11936 ; CHECK-NEXT: vmv1r.v v11, v8
11937 ; CHECK-NEXT: vmv1r.v v12, v8
11938 ; CHECK-NEXT: vmv1r.v v13, v8
11939 ; CHECK-NEXT: vmv1r.v v14, v8
11940 ; CHECK-NEXT: vmv1r.v v15, v8
11941 ; CHECK-NEXT: vmv1r.v v16, v8
11942 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11943 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t
11946 tail call void @llvm.riscv.vsuxseg7.mask.nxv4f16.nxv4i16(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
11950 declare void @llvm.riscv.vsuxseg7.nxv4f16.nxv4i8(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i8>, i32)
11951 declare void @llvm.riscv.vsuxseg7.mask.nxv4f16.nxv4i8(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
11953 define void @test_vsuxseg7_nxv4f16_nxv4i8(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
11954 ; CHECK-LABEL: test_vsuxseg7_nxv4f16_nxv4i8:
11955 ; CHECK: # %bb.0: # %entry
11956 ; CHECK-NEXT: vmv1r.v v10, v8
11957 ; CHECK-NEXT: vmv1r.v v11, v8
11958 ; CHECK-NEXT: vmv1r.v v12, v8
11959 ; CHECK-NEXT: vmv1r.v v13, v8
11960 ; CHECK-NEXT: vmv1r.v v14, v8
11961 ; CHECK-NEXT: vmv1r.v v15, v8
11962 ; CHECK-NEXT: vmv1r.v v16, v8
11963 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11964 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9
11967 tail call void @llvm.riscv.vsuxseg7.nxv4f16.nxv4i8(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
11971 define void @test_vsuxseg7_mask_nxv4f16_nxv4i8(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
11972 ; CHECK-LABEL: test_vsuxseg7_mask_nxv4f16_nxv4i8:
11973 ; CHECK: # %bb.0: # %entry
11974 ; CHECK-NEXT: vmv1r.v v10, v8
11975 ; CHECK-NEXT: vmv1r.v v11, v8
11976 ; CHECK-NEXT: vmv1r.v v12, v8
11977 ; CHECK-NEXT: vmv1r.v v13, v8
11978 ; CHECK-NEXT: vmv1r.v v14, v8
11979 ; CHECK-NEXT: vmv1r.v v15, v8
11980 ; CHECK-NEXT: vmv1r.v v16, v8
11981 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
11982 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t
11985 tail call void @llvm.riscv.vsuxseg7.mask.nxv4f16.nxv4i8(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
11989 declare void @llvm.riscv.vsuxseg7.nxv4f16.nxv4i32(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i32>, i32)
11990 declare void @llvm.riscv.vsuxseg7.mask.nxv4f16.nxv4i32(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
11992 define void @test_vsuxseg7_nxv4f16_nxv4i32(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
11993 ; CHECK-LABEL: test_vsuxseg7_nxv4f16_nxv4i32:
11994 ; CHECK: # %bb.0: # %entry
11995 ; CHECK-NEXT: vmv1r.v v12, v8
11996 ; CHECK-NEXT: vmv1r.v v13, v8
11997 ; CHECK-NEXT: vmv1r.v v14, v8
11998 ; CHECK-NEXT: vmv1r.v v15, v8
11999 ; CHECK-NEXT: vmv1r.v v16, v8
12000 ; CHECK-NEXT: vmv1r.v v17, v8
12001 ; CHECK-NEXT: vmv1r.v v18, v8
12002 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
12003 ; CHECK-NEXT: vsuxseg7ei32.v v12, (a0), v10
12006 tail call void @llvm.riscv.vsuxseg7.nxv4f16.nxv4i32(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
12010 define void @test_vsuxseg7_mask_nxv4f16_nxv4i32(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
12011 ; CHECK-LABEL: test_vsuxseg7_mask_nxv4f16_nxv4i32:
12012 ; CHECK: # %bb.0: # %entry
12013 ; CHECK-NEXT: vmv1r.v v12, v8
12014 ; CHECK-NEXT: vmv1r.v v13, v8
12015 ; CHECK-NEXT: vmv1r.v v14, v8
12016 ; CHECK-NEXT: vmv1r.v v15, v8
12017 ; CHECK-NEXT: vmv1r.v v16, v8
12018 ; CHECK-NEXT: vmv1r.v v17, v8
12019 ; CHECK-NEXT: vmv1r.v v18, v8
12020 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
12021 ; CHECK-NEXT: vsuxseg7ei32.v v12, (a0), v10, v0.t
12024 tail call void @llvm.riscv.vsuxseg7.mask.nxv4f16.nxv4i32(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
12028 declare void @llvm.riscv.vsuxseg8.nxv4f16.nxv4i16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i16>, i32)
12029 declare void @llvm.riscv.vsuxseg8.mask.nxv4f16.nxv4i16(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
12031 define void @test_vsuxseg8_nxv4f16_nxv4i16(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
12032 ; CHECK-LABEL: test_vsuxseg8_nxv4f16_nxv4i16:
12033 ; CHECK: # %bb.0: # %entry
12034 ; CHECK-NEXT: vmv1r.v v10, v8
12035 ; CHECK-NEXT: vmv1r.v v11, v8
12036 ; CHECK-NEXT: vmv1r.v v12, v8
12037 ; CHECK-NEXT: vmv1r.v v13, v8
12038 ; CHECK-NEXT: vmv1r.v v14, v8
12039 ; CHECK-NEXT: vmv1r.v v15, v8
12040 ; CHECK-NEXT: vmv1r.v v16, v8
12041 ; CHECK-NEXT: vmv1r.v v17, v8
12042 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
12043 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9
12046 tail call void @llvm.riscv.vsuxseg8.nxv4f16.nxv4i16(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
12050 define void @test_vsuxseg8_mask_nxv4f16_nxv4i16(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
12051 ; CHECK-LABEL: test_vsuxseg8_mask_nxv4f16_nxv4i16:
12052 ; CHECK: # %bb.0: # %entry
12053 ; CHECK-NEXT: vmv1r.v v10, v8
12054 ; CHECK-NEXT: vmv1r.v v11, v8
12055 ; CHECK-NEXT: vmv1r.v v12, v8
12056 ; CHECK-NEXT: vmv1r.v v13, v8
12057 ; CHECK-NEXT: vmv1r.v v14, v8
12058 ; CHECK-NEXT: vmv1r.v v15, v8
12059 ; CHECK-NEXT: vmv1r.v v16, v8
12060 ; CHECK-NEXT: vmv1r.v v17, v8
12061 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
12062 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t
12065 tail call void @llvm.riscv.vsuxseg8.mask.nxv4f16.nxv4i16(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
12069 declare void @llvm.riscv.vsuxseg8.nxv4f16.nxv4i8(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i8>, i32)
12070 declare void @llvm.riscv.vsuxseg8.mask.nxv4f16.nxv4i8(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
12072 define void @test_vsuxseg8_nxv4f16_nxv4i8(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
12073 ; CHECK-LABEL: test_vsuxseg8_nxv4f16_nxv4i8:
12074 ; CHECK: # %bb.0: # %entry
12075 ; CHECK-NEXT: vmv1r.v v10, v8
12076 ; CHECK-NEXT: vmv1r.v v11, v8
12077 ; CHECK-NEXT: vmv1r.v v12, v8
12078 ; CHECK-NEXT: vmv1r.v v13, v8
12079 ; CHECK-NEXT: vmv1r.v v14, v8
12080 ; CHECK-NEXT: vmv1r.v v15, v8
12081 ; CHECK-NEXT: vmv1r.v v16, v8
12082 ; CHECK-NEXT: vmv1r.v v17, v8
12083 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
12084 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9
12087 tail call void @llvm.riscv.vsuxseg8.nxv4f16.nxv4i8(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
12091 define void @test_vsuxseg8_mask_nxv4f16_nxv4i8(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
12092 ; CHECK-LABEL: test_vsuxseg8_mask_nxv4f16_nxv4i8:
12093 ; CHECK: # %bb.0: # %entry
12094 ; CHECK-NEXT: vmv1r.v v10, v8
12095 ; CHECK-NEXT: vmv1r.v v11, v8
12096 ; CHECK-NEXT: vmv1r.v v12, v8
12097 ; CHECK-NEXT: vmv1r.v v13, v8
12098 ; CHECK-NEXT: vmv1r.v v14, v8
12099 ; CHECK-NEXT: vmv1r.v v15, v8
12100 ; CHECK-NEXT: vmv1r.v v16, v8
12101 ; CHECK-NEXT: vmv1r.v v17, v8
12102 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
12103 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t
12106 tail call void @llvm.riscv.vsuxseg8.mask.nxv4f16.nxv4i8(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
12110 declare void @llvm.riscv.vsuxseg8.nxv4f16.nxv4i32(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i32>, i32)
12111 declare void @llvm.riscv.vsuxseg8.mask.nxv4f16.nxv4i32(<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>,<vscale x 4 x half>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
12113 define void @test_vsuxseg8_nxv4f16_nxv4i32(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
12114 ; CHECK-LABEL: test_vsuxseg8_nxv4f16_nxv4i32:
12115 ; CHECK: # %bb.0: # %entry
12116 ; CHECK-NEXT: vmv1r.v v12, v8
12117 ; CHECK-NEXT: vmv1r.v v13, v8
12118 ; CHECK-NEXT: vmv1r.v v14, v8
12119 ; CHECK-NEXT: vmv1r.v v15, v8
12120 ; CHECK-NEXT: vmv1r.v v16, v8
12121 ; CHECK-NEXT: vmv1r.v v17, v8
12122 ; CHECK-NEXT: vmv1r.v v18, v8
12123 ; CHECK-NEXT: vmv1r.v v19, v8
12124 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
12125 ; CHECK-NEXT: vsuxseg8ei32.v v12, (a0), v10
12128 tail call void @llvm.riscv.vsuxseg8.nxv4f16.nxv4i32(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
12132 define void @test_vsuxseg8_mask_nxv4f16_nxv4i32(<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
12133 ; CHECK-LABEL: test_vsuxseg8_mask_nxv4f16_nxv4i32:
12134 ; CHECK: # %bb.0: # %entry
12135 ; CHECK-NEXT: vmv1r.v v12, v8
12136 ; CHECK-NEXT: vmv1r.v v13, v8
12137 ; CHECK-NEXT: vmv1r.v v14, v8
12138 ; CHECK-NEXT: vmv1r.v v15, v8
12139 ; CHECK-NEXT: vmv1r.v v16, v8
12140 ; CHECK-NEXT: vmv1r.v v17, v8
12141 ; CHECK-NEXT: vmv1r.v v18, v8
12142 ; CHECK-NEXT: vmv1r.v v19, v8
12143 ; CHECK-NEXT: vsetvli zero, a1, e16, m1, ta, ma
12144 ; CHECK-NEXT: vsuxseg8ei32.v v12, (a0), v10, v0.t
12147 tail call void @llvm.riscv.vsuxseg8.mask.nxv4f16.nxv4i32(<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val,<vscale x 4 x half> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
12151 declare void @llvm.riscv.vsuxseg2.nxv2f16.nxv2i32(<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i32>, i32)
12152 declare void @llvm.riscv.vsuxseg2.mask.nxv2f16.nxv2i32(<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
12154 define void @test_vsuxseg2_nxv2f16_nxv2i32(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
12155 ; CHECK-LABEL: test_vsuxseg2_nxv2f16_nxv2i32:
12156 ; CHECK: # %bb.0: # %entry
12157 ; CHECK-NEXT: vmv1r.v v10, v9
12158 ; CHECK-NEXT: vmv1r.v v9, v8
12159 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12160 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10
12163 tail call void @llvm.riscv.vsuxseg2.nxv2f16.nxv2i32(<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
12167 define void @test_vsuxseg2_mask_nxv2f16_nxv2i32(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
12168 ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f16_nxv2i32:
12169 ; CHECK: # %bb.0: # %entry
12170 ; CHECK-NEXT: vmv1r.v v10, v9
12171 ; CHECK-NEXT: vmv1r.v v9, v8
12172 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12173 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v10, v0.t
12176 tail call void @llvm.riscv.vsuxseg2.mask.nxv2f16.nxv2i32(<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
12180 declare void @llvm.riscv.vsuxseg2.nxv2f16.nxv2i8(<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i8>, i32)
12181 declare void @llvm.riscv.vsuxseg2.mask.nxv2f16.nxv2i8(<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
12183 define void @test_vsuxseg2_nxv2f16_nxv2i8(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
12184 ; CHECK-LABEL: test_vsuxseg2_nxv2f16_nxv2i8:
12185 ; CHECK: # %bb.0: # %entry
12186 ; CHECK-NEXT: vmv1r.v v10, v9
12187 ; CHECK-NEXT: vmv1r.v v9, v8
12188 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12189 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10
12192 tail call void @llvm.riscv.vsuxseg2.nxv2f16.nxv2i8(<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
12196 define void @test_vsuxseg2_mask_nxv2f16_nxv2i8(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
12197 ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f16_nxv2i8:
12198 ; CHECK: # %bb.0: # %entry
12199 ; CHECK-NEXT: vmv1r.v v10, v9
12200 ; CHECK-NEXT: vmv1r.v v9, v8
12201 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12202 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v10, v0.t
12205 tail call void @llvm.riscv.vsuxseg2.mask.nxv2f16.nxv2i8(<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
12209 declare void @llvm.riscv.vsuxseg2.nxv2f16.nxv2i16(<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i16>, i32)
12210 declare void @llvm.riscv.vsuxseg2.mask.nxv2f16.nxv2i16(<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
12212 define void @test_vsuxseg2_nxv2f16_nxv2i16(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
12213 ; CHECK-LABEL: test_vsuxseg2_nxv2f16_nxv2i16:
12214 ; CHECK: # %bb.0: # %entry
12215 ; CHECK-NEXT: vmv1r.v v10, v9
12216 ; CHECK-NEXT: vmv1r.v v9, v8
12217 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12218 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10
12221 tail call void @llvm.riscv.vsuxseg2.nxv2f16.nxv2i16(<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
12225 define void @test_vsuxseg2_mask_nxv2f16_nxv2i16(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
12226 ; CHECK-LABEL: test_vsuxseg2_mask_nxv2f16_nxv2i16:
12227 ; CHECK: # %bb.0: # %entry
12228 ; CHECK-NEXT: vmv1r.v v10, v9
12229 ; CHECK-NEXT: vmv1r.v v9, v8
12230 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12231 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v10, v0.t
12234 tail call void @llvm.riscv.vsuxseg2.mask.nxv2f16.nxv2i16(<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
12238 declare void @llvm.riscv.vsuxseg3.nxv2f16.nxv2i32(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i32>, i32)
12239 declare void @llvm.riscv.vsuxseg3.mask.nxv2f16.nxv2i32(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
12241 define void @test_vsuxseg3_nxv2f16_nxv2i32(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
12242 ; CHECK-LABEL: test_vsuxseg3_nxv2f16_nxv2i32:
12243 ; CHECK: # %bb.0: # %entry
12244 ; CHECK-NEXT: vmv1r.v v10, v8
12245 ; CHECK-NEXT: vmv1r.v v11, v8
12246 ; CHECK-NEXT: vmv1r.v v12, v8
12247 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12248 ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9
12251 tail call void @llvm.riscv.vsuxseg3.nxv2f16.nxv2i32(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
12255 define void @test_vsuxseg3_mask_nxv2f16_nxv2i32(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
12256 ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f16_nxv2i32:
12257 ; CHECK: # %bb.0: # %entry
12258 ; CHECK-NEXT: vmv1r.v v10, v8
12259 ; CHECK-NEXT: vmv1r.v v11, v8
12260 ; CHECK-NEXT: vmv1r.v v12, v8
12261 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12262 ; CHECK-NEXT: vsuxseg3ei32.v v10, (a0), v9, v0.t
12265 tail call void @llvm.riscv.vsuxseg3.mask.nxv2f16.nxv2i32(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
12269 declare void @llvm.riscv.vsuxseg3.nxv2f16.nxv2i8(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i8>, i32)
12270 declare void @llvm.riscv.vsuxseg3.mask.nxv2f16.nxv2i8(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
12272 define void @test_vsuxseg3_nxv2f16_nxv2i8(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
12273 ; CHECK-LABEL: test_vsuxseg3_nxv2f16_nxv2i8:
12274 ; CHECK: # %bb.0: # %entry
12275 ; CHECK-NEXT: vmv1r.v v10, v8
12276 ; CHECK-NEXT: vmv1r.v v11, v8
12277 ; CHECK-NEXT: vmv1r.v v12, v8
12278 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12279 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9
12282 tail call void @llvm.riscv.vsuxseg3.nxv2f16.nxv2i8(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
12286 define void @test_vsuxseg3_mask_nxv2f16_nxv2i8(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
12287 ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f16_nxv2i8:
12288 ; CHECK: # %bb.0: # %entry
12289 ; CHECK-NEXT: vmv1r.v v10, v8
12290 ; CHECK-NEXT: vmv1r.v v11, v8
12291 ; CHECK-NEXT: vmv1r.v v12, v8
12292 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12293 ; CHECK-NEXT: vsuxseg3ei8.v v10, (a0), v9, v0.t
12296 tail call void @llvm.riscv.vsuxseg3.mask.nxv2f16.nxv2i8(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
12300 declare void @llvm.riscv.vsuxseg3.nxv2f16.nxv2i16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i16>, i32)
12301 declare void @llvm.riscv.vsuxseg3.mask.nxv2f16.nxv2i16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
12303 define void @test_vsuxseg3_nxv2f16_nxv2i16(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
12304 ; CHECK-LABEL: test_vsuxseg3_nxv2f16_nxv2i16:
12305 ; CHECK: # %bb.0: # %entry
12306 ; CHECK-NEXT: vmv1r.v v10, v8
12307 ; CHECK-NEXT: vmv1r.v v11, v8
12308 ; CHECK-NEXT: vmv1r.v v12, v8
12309 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12310 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9
12313 tail call void @llvm.riscv.vsuxseg3.nxv2f16.nxv2i16(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
12317 define void @test_vsuxseg3_mask_nxv2f16_nxv2i16(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
12318 ; CHECK-LABEL: test_vsuxseg3_mask_nxv2f16_nxv2i16:
12319 ; CHECK: # %bb.0: # %entry
12320 ; CHECK-NEXT: vmv1r.v v10, v8
12321 ; CHECK-NEXT: vmv1r.v v11, v8
12322 ; CHECK-NEXT: vmv1r.v v12, v8
12323 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12324 ; CHECK-NEXT: vsuxseg3ei16.v v10, (a0), v9, v0.t
12327 tail call void @llvm.riscv.vsuxseg3.mask.nxv2f16.nxv2i16(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
12331 declare void @llvm.riscv.vsuxseg4.nxv2f16.nxv2i32(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i32>, i32)
12332 declare void @llvm.riscv.vsuxseg4.mask.nxv2f16.nxv2i32(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
12334 define void @test_vsuxseg4_nxv2f16_nxv2i32(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
12335 ; CHECK-LABEL: test_vsuxseg4_nxv2f16_nxv2i32:
12336 ; CHECK: # %bb.0: # %entry
12337 ; CHECK-NEXT: vmv1r.v v10, v8
12338 ; CHECK-NEXT: vmv1r.v v11, v8
12339 ; CHECK-NEXT: vmv1r.v v12, v8
12340 ; CHECK-NEXT: vmv1r.v v13, v8
12341 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12342 ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9
12345 tail call void @llvm.riscv.vsuxseg4.nxv2f16.nxv2i32(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
12349 define void @test_vsuxseg4_mask_nxv2f16_nxv2i32(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
12350 ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f16_nxv2i32:
12351 ; CHECK: # %bb.0: # %entry
12352 ; CHECK-NEXT: vmv1r.v v10, v8
12353 ; CHECK-NEXT: vmv1r.v v11, v8
12354 ; CHECK-NEXT: vmv1r.v v12, v8
12355 ; CHECK-NEXT: vmv1r.v v13, v8
12356 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12357 ; CHECK-NEXT: vsuxseg4ei32.v v10, (a0), v9, v0.t
12360 tail call void @llvm.riscv.vsuxseg4.mask.nxv2f16.nxv2i32(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
12364 declare void @llvm.riscv.vsuxseg4.nxv2f16.nxv2i8(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i8>, i32)
12365 declare void @llvm.riscv.vsuxseg4.mask.nxv2f16.nxv2i8(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
12367 define void @test_vsuxseg4_nxv2f16_nxv2i8(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
12368 ; CHECK-LABEL: test_vsuxseg4_nxv2f16_nxv2i8:
12369 ; CHECK: # %bb.0: # %entry
12370 ; CHECK-NEXT: vmv1r.v v10, v8
12371 ; CHECK-NEXT: vmv1r.v v11, v8
12372 ; CHECK-NEXT: vmv1r.v v12, v8
12373 ; CHECK-NEXT: vmv1r.v v13, v8
12374 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12375 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9
12378 tail call void @llvm.riscv.vsuxseg4.nxv2f16.nxv2i8(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
12382 define void @test_vsuxseg4_mask_nxv2f16_nxv2i8(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
12383 ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f16_nxv2i8:
12384 ; CHECK: # %bb.0: # %entry
12385 ; CHECK-NEXT: vmv1r.v v10, v8
12386 ; CHECK-NEXT: vmv1r.v v11, v8
12387 ; CHECK-NEXT: vmv1r.v v12, v8
12388 ; CHECK-NEXT: vmv1r.v v13, v8
12389 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12390 ; CHECK-NEXT: vsuxseg4ei8.v v10, (a0), v9, v0.t
12393 tail call void @llvm.riscv.vsuxseg4.mask.nxv2f16.nxv2i8(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
12397 declare void @llvm.riscv.vsuxseg4.nxv2f16.nxv2i16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i16>, i32)
12398 declare void @llvm.riscv.vsuxseg4.mask.nxv2f16.nxv2i16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
12400 define void @test_vsuxseg4_nxv2f16_nxv2i16(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
12401 ; CHECK-LABEL: test_vsuxseg4_nxv2f16_nxv2i16:
12402 ; CHECK: # %bb.0: # %entry
12403 ; CHECK-NEXT: vmv1r.v v10, v8
12404 ; CHECK-NEXT: vmv1r.v v11, v8
12405 ; CHECK-NEXT: vmv1r.v v12, v8
12406 ; CHECK-NEXT: vmv1r.v v13, v8
12407 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12408 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9
12411 tail call void @llvm.riscv.vsuxseg4.nxv2f16.nxv2i16(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
12415 define void @test_vsuxseg4_mask_nxv2f16_nxv2i16(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
12416 ; CHECK-LABEL: test_vsuxseg4_mask_nxv2f16_nxv2i16:
12417 ; CHECK: # %bb.0: # %entry
12418 ; CHECK-NEXT: vmv1r.v v10, v8
12419 ; CHECK-NEXT: vmv1r.v v11, v8
12420 ; CHECK-NEXT: vmv1r.v v12, v8
12421 ; CHECK-NEXT: vmv1r.v v13, v8
12422 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12423 ; CHECK-NEXT: vsuxseg4ei16.v v10, (a0), v9, v0.t
12426 tail call void @llvm.riscv.vsuxseg4.mask.nxv2f16.nxv2i16(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
12430 declare void @llvm.riscv.vsuxseg5.nxv2f16.nxv2i32(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i32>, i32)
12431 declare void @llvm.riscv.vsuxseg5.mask.nxv2f16.nxv2i32(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
12433 define void @test_vsuxseg5_nxv2f16_nxv2i32(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
12434 ; CHECK-LABEL: test_vsuxseg5_nxv2f16_nxv2i32:
12435 ; CHECK: # %bb.0: # %entry
12436 ; CHECK-NEXT: vmv1r.v v10, v8
12437 ; CHECK-NEXT: vmv1r.v v11, v8
12438 ; CHECK-NEXT: vmv1r.v v12, v8
12439 ; CHECK-NEXT: vmv1r.v v13, v8
12440 ; CHECK-NEXT: vmv1r.v v14, v8
12441 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12442 ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9
12445 tail call void @llvm.riscv.vsuxseg5.nxv2f16.nxv2i32(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
12449 define void @test_vsuxseg5_mask_nxv2f16_nxv2i32(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
12450 ; CHECK-LABEL: test_vsuxseg5_mask_nxv2f16_nxv2i32:
12451 ; CHECK: # %bb.0: # %entry
12452 ; CHECK-NEXT: vmv1r.v v10, v8
12453 ; CHECK-NEXT: vmv1r.v v11, v8
12454 ; CHECK-NEXT: vmv1r.v v12, v8
12455 ; CHECK-NEXT: vmv1r.v v13, v8
12456 ; CHECK-NEXT: vmv1r.v v14, v8
12457 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12458 ; CHECK-NEXT: vsuxseg5ei32.v v10, (a0), v9, v0.t
12461 tail call void @llvm.riscv.vsuxseg5.mask.nxv2f16.nxv2i32(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
12465 declare void @llvm.riscv.vsuxseg5.nxv2f16.nxv2i8(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i8>, i32)
12466 declare void @llvm.riscv.vsuxseg5.mask.nxv2f16.nxv2i8(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
12468 define void @test_vsuxseg5_nxv2f16_nxv2i8(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
12469 ; CHECK-LABEL: test_vsuxseg5_nxv2f16_nxv2i8:
12470 ; CHECK: # %bb.0: # %entry
12471 ; CHECK-NEXT: vmv1r.v v10, v8
12472 ; CHECK-NEXT: vmv1r.v v11, v8
12473 ; CHECK-NEXT: vmv1r.v v12, v8
12474 ; CHECK-NEXT: vmv1r.v v13, v8
12475 ; CHECK-NEXT: vmv1r.v v14, v8
12476 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12477 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9
12480 tail call void @llvm.riscv.vsuxseg5.nxv2f16.nxv2i8(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
12484 define void @test_vsuxseg5_mask_nxv2f16_nxv2i8(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
12485 ; CHECK-LABEL: test_vsuxseg5_mask_nxv2f16_nxv2i8:
12486 ; CHECK: # %bb.0: # %entry
12487 ; CHECK-NEXT: vmv1r.v v10, v8
12488 ; CHECK-NEXT: vmv1r.v v11, v8
12489 ; CHECK-NEXT: vmv1r.v v12, v8
12490 ; CHECK-NEXT: vmv1r.v v13, v8
12491 ; CHECK-NEXT: vmv1r.v v14, v8
12492 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12493 ; CHECK-NEXT: vsuxseg5ei8.v v10, (a0), v9, v0.t
12496 tail call void @llvm.riscv.vsuxseg5.mask.nxv2f16.nxv2i8(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
12500 declare void @llvm.riscv.vsuxseg5.nxv2f16.nxv2i16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i16>, i32)
12501 declare void @llvm.riscv.vsuxseg5.mask.nxv2f16.nxv2i16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
12503 define void @test_vsuxseg5_nxv2f16_nxv2i16(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
12504 ; CHECK-LABEL: test_vsuxseg5_nxv2f16_nxv2i16:
12505 ; CHECK: # %bb.0: # %entry
12506 ; CHECK-NEXT: vmv1r.v v10, v8
12507 ; CHECK-NEXT: vmv1r.v v11, v8
12508 ; CHECK-NEXT: vmv1r.v v12, v8
12509 ; CHECK-NEXT: vmv1r.v v13, v8
12510 ; CHECK-NEXT: vmv1r.v v14, v8
12511 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12512 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9
12515 tail call void @llvm.riscv.vsuxseg5.nxv2f16.nxv2i16(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
12519 define void @test_vsuxseg5_mask_nxv2f16_nxv2i16(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
12520 ; CHECK-LABEL: test_vsuxseg5_mask_nxv2f16_nxv2i16:
12521 ; CHECK: # %bb.0: # %entry
12522 ; CHECK-NEXT: vmv1r.v v10, v8
12523 ; CHECK-NEXT: vmv1r.v v11, v8
12524 ; CHECK-NEXT: vmv1r.v v12, v8
12525 ; CHECK-NEXT: vmv1r.v v13, v8
12526 ; CHECK-NEXT: vmv1r.v v14, v8
12527 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12528 ; CHECK-NEXT: vsuxseg5ei16.v v10, (a0), v9, v0.t
12531 tail call void @llvm.riscv.vsuxseg5.mask.nxv2f16.nxv2i16(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
12535 declare void @llvm.riscv.vsuxseg6.nxv2f16.nxv2i32(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i32>, i32)
12536 declare void @llvm.riscv.vsuxseg6.mask.nxv2f16.nxv2i32(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
12538 define void @test_vsuxseg6_nxv2f16_nxv2i32(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
12539 ; CHECK-LABEL: test_vsuxseg6_nxv2f16_nxv2i32:
12540 ; CHECK: # %bb.0: # %entry
12541 ; CHECK-NEXT: vmv1r.v v10, v8
12542 ; CHECK-NEXT: vmv1r.v v11, v8
12543 ; CHECK-NEXT: vmv1r.v v12, v8
12544 ; CHECK-NEXT: vmv1r.v v13, v8
12545 ; CHECK-NEXT: vmv1r.v v14, v8
12546 ; CHECK-NEXT: vmv1r.v v15, v8
12547 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12548 ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9
12551 tail call void @llvm.riscv.vsuxseg6.nxv2f16.nxv2i32(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
12555 define void @test_vsuxseg6_mask_nxv2f16_nxv2i32(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
12556 ; CHECK-LABEL: test_vsuxseg6_mask_nxv2f16_nxv2i32:
12557 ; CHECK: # %bb.0: # %entry
12558 ; CHECK-NEXT: vmv1r.v v10, v8
12559 ; CHECK-NEXT: vmv1r.v v11, v8
12560 ; CHECK-NEXT: vmv1r.v v12, v8
12561 ; CHECK-NEXT: vmv1r.v v13, v8
12562 ; CHECK-NEXT: vmv1r.v v14, v8
12563 ; CHECK-NEXT: vmv1r.v v15, v8
12564 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12565 ; CHECK-NEXT: vsuxseg6ei32.v v10, (a0), v9, v0.t
12568 tail call void @llvm.riscv.vsuxseg6.mask.nxv2f16.nxv2i32(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
12572 declare void @llvm.riscv.vsuxseg6.nxv2f16.nxv2i8(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i8>, i32)
12573 declare void @llvm.riscv.vsuxseg6.mask.nxv2f16.nxv2i8(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
12575 define void @test_vsuxseg6_nxv2f16_nxv2i8(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
12576 ; CHECK-LABEL: test_vsuxseg6_nxv2f16_nxv2i8:
12577 ; CHECK: # %bb.0: # %entry
12578 ; CHECK-NEXT: vmv1r.v v10, v8
12579 ; CHECK-NEXT: vmv1r.v v11, v8
12580 ; CHECK-NEXT: vmv1r.v v12, v8
12581 ; CHECK-NEXT: vmv1r.v v13, v8
12582 ; CHECK-NEXT: vmv1r.v v14, v8
12583 ; CHECK-NEXT: vmv1r.v v15, v8
12584 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12585 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9
12588 tail call void @llvm.riscv.vsuxseg6.nxv2f16.nxv2i8(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
12592 define void @test_vsuxseg6_mask_nxv2f16_nxv2i8(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
12593 ; CHECK-LABEL: test_vsuxseg6_mask_nxv2f16_nxv2i8:
12594 ; CHECK: # %bb.0: # %entry
12595 ; CHECK-NEXT: vmv1r.v v10, v8
12596 ; CHECK-NEXT: vmv1r.v v11, v8
12597 ; CHECK-NEXT: vmv1r.v v12, v8
12598 ; CHECK-NEXT: vmv1r.v v13, v8
12599 ; CHECK-NEXT: vmv1r.v v14, v8
12600 ; CHECK-NEXT: vmv1r.v v15, v8
12601 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12602 ; CHECK-NEXT: vsuxseg6ei8.v v10, (a0), v9, v0.t
12605 tail call void @llvm.riscv.vsuxseg6.mask.nxv2f16.nxv2i8(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
12609 declare void @llvm.riscv.vsuxseg6.nxv2f16.nxv2i16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i16>, i32)
12610 declare void @llvm.riscv.vsuxseg6.mask.nxv2f16.nxv2i16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
12612 define void @test_vsuxseg6_nxv2f16_nxv2i16(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
12613 ; CHECK-LABEL: test_vsuxseg6_nxv2f16_nxv2i16:
12614 ; CHECK: # %bb.0: # %entry
12615 ; CHECK-NEXT: vmv1r.v v10, v8
12616 ; CHECK-NEXT: vmv1r.v v11, v8
12617 ; CHECK-NEXT: vmv1r.v v12, v8
12618 ; CHECK-NEXT: vmv1r.v v13, v8
12619 ; CHECK-NEXT: vmv1r.v v14, v8
12620 ; CHECK-NEXT: vmv1r.v v15, v8
12621 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12622 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9
12625 tail call void @llvm.riscv.vsuxseg6.nxv2f16.nxv2i16(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
12629 define void @test_vsuxseg6_mask_nxv2f16_nxv2i16(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
12630 ; CHECK-LABEL: test_vsuxseg6_mask_nxv2f16_nxv2i16:
12631 ; CHECK: # %bb.0: # %entry
12632 ; CHECK-NEXT: vmv1r.v v10, v8
12633 ; CHECK-NEXT: vmv1r.v v11, v8
12634 ; CHECK-NEXT: vmv1r.v v12, v8
12635 ; CHECK-NEXT: vmv1r.v v13, v8
12636 ; CHECK-NEXT: vmv1r.v v14, v8
12637 ; CHECK-NEXT: vmv1r.v v15, v8
12638 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12639 ; CHECK-NEXT: vsuxseg6ei16.v v10, (a0), v9, v0.t
12642 tail call void @llvm.riscv.vsuxseg6.mask.nxv2f16.nxv2i16(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
12646 declare void @llvm.riscv.vsuxseg7.nxv2f16.nxv2i32(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i32>, i32)
12647 declare void @llvm.riscv.vsuxseg7.mask.nxv2f16.nxv2i32(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
12649 define void @test_vsuxseg7_nxv2f16_nxv2i32(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
12650 ; CHECK-LABEL: test_vsuxseg7_nxv2f16_nxv2i32:
12651 ; CHECK: # %bb.0: # %entry
12652 ; CHECK-NEXT: vmv1r.v v10, v8
12653 ; CHECK-NEXT: vmv1r.v v11, v8
12654 ; CHECK-NEXT: vmv1r.v v12, v8
12655 ; CHECK-NEXT: vmv1r.v v13, v8
12656 ; CHECK-NEXT: vmv1r.v v14, v8
12657 ; CHECK-NEXT: vmv1r.v v15, v8
12658 ; CHECK-NEXT: vmv1r.v v16, v8
12659 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12660 ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9
12663 tail call void @llvm.riscv.vsuxseg7.nxv2f16.nxv2i32(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
12667 define void @test_vsuxseg7_mask_nxv2f16_nxv2i32(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
12668 ; CHECK-LABEL: test_vsuxseg7_mask_nxv2f16_nxv2i32:
12669 ; CHECK: # %bb.0: # %entry
12670 ; CHECK-NEXT: vmv1r.v v10, v8
12671 ; CHECK-NEXT: vmv1r.v v11, v8
12672 ; CHECK-NEXT: vmv1r.v v12, v8
12673 ; CHECK-NEXT: vmv1r.v v13, v8
12674 ; CHECK-NEXT: vmv1r.v v14, v8
12675 ; CHECK-NEXT: vmv1r.v v15, v8
12676 ; CHECK-NEXT: vmv1r.v v16, v8
12677 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12678 ; CHECK-NEXT: vsuxseg7ei32.v v10, (a0), v9, v0.t
12681 tail call void @llvm.riscv.vsuxseg7.mask.nxv2f16.nxv2i32(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
12685 declare void @llvm.riscv.vsuxseg7.nxv2f16.nxv2i8(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i8>, i32)
12686 declare void @llvm.riscv.vsuxseg7.mask.nxv2f16.nxv2i8(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
12688 define void @test_vsuxseg7_nxv2f16_nxv2i8(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
12689 ; CHECK-LABEL: test_vsuxseg7_nxv2f16_nxv2i8:
12690 ; CHECK: # %bb.0: # %entry
12691 ; CHECK-NEXT: vmv1r.v v10, v8
12692 ; CHECK-NEXT: vmv1r.v v11, v8
12693 ; CHECK-NEXT: vmv1r.v v12, v8
12694 ; CHECK-NEXT: vmv1r.v v13, v8
12695 ; CHECK-NEXT: vmv1r.v v14, v8
12696 ; CHECK-NEXT: vmv1r.v v15, v8
12697 ; CHECK-NEXT: vmv1r.v v16, v8
12698 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12699 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9
12702 tail call void @llvm.riscv.vsuxseg7.nxv2f16.nxv2i8(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
12706 define void @test_vsuxseg7_mask_nxv2f16_nxv2i8(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
12707 ; CHECK-LABEL: test_vsuxseg7_mask_nxv2f16_nxv2i8:
12708 ; CHECK: # %bb.0: # %entry
12709 ; CHECK-NEXT: vmv1r.v v10, v8
12710 ; CHECK-NEXT: vmv1r.v v11, v8
12711 ; CHECK-NEXT: vmv1r.v v12, v8
12712 ; CHECK-NEXT: vmv1r.v v13, v8
12713 ; CHECK-NEXT: vmv1r.v v14, v8
12714 ; CHECK-NEXT: vmv1r.v v15, v8
12715 ; CHECK-NEXT: vmv1r.v v16, v8
12716 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12717 ; CHECK-NEXT: vsuxseg7ei8.v v10, (a0), v9, v0.t
12720 tail call void @llvm.riscv.vsuxseg7.mask.nxv2f16.nxv2i8(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
12724 declare void @llvm.riscv.vsuxseg7.nxv2f16.nxv2i16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i16>, i32)
12725 declare void @llvm.riscv.vsuxseg7.mask.nxv2f16.nxv2i16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
12727 define void @test_vsuxseg7_nxv2f16_nxv2i16(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
12728 ; CHECK-LABEL: test_vsuxseg7_nxv2f16_nxv2i16:
12729 ; CHECK: # %bb.0: # %entry
12730 ; CHECK-NEXT: vmv1r.v v10, v8
12731 ; CHECK-NEXT: vmv1r.v v11, v8
12732 ; CHECK-NEXT: vmv1r.v v12, v8
12733 ; CHECK-NEXT: vmv1r.v v13, v8
12734 ; CHECK-NEXT: vmv1r.v v14, v8
12735 ; CHECK-NEXT: vmv1r.v v15, v8
12736 ; CHECK-NEXT: vmv1r.v v16, v8
12737 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12738 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9
12741 tail call void @llvm.riscv.vsuxseg7.nxv2f16.nxv2i16(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
12745 define void @test_vsuxseg7_mask_nxv2f16_nxv2i16(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
12746 ; CHECK-LABEL: test_vsuxseg7_mask_nxv2f16_nxv2i16:
12747 ; CHECK: # %bb.0: # %entry
12748 ; CHECK-NEXT: vmv1r.v v10, v8
12749 ; CHECK-NEXT: vmv1r.v v11, v8
12750 ; CHECK-NEXT: vmv1r.v v12, v8
12751 ; CHECK-NEXT: vmv1r.v v13, v8
12752 ; CHECK-NEXT: vmv1r.v v14, v8
12753 ; CHECK-NEXT: vmv1r.v v15, v8
12754 ; CHECK-NEXT: vmv1r.v v16, v8
12755 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12756 ; CHECK-NEXT: vsuxseg7ei16.v v10, (a0), v9, v0.t
12759 tail call void @llvm.riscv.vsuxseg7.mask.nxv2f16.nxv2i16(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
12763 declare void @llvm.riscv.vsuxseg8.nxv2f16.nxv2i32(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i32>, i32)
12764 declare void @llvm.riscv.vsuxseg8.mask.nxv2f16.nxv2i32(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i32>, <vscale x 2 x i1>, i32)
12766 define void @test_vsuxseg8_nxv2f16_nxv2i32(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl) {
12767 ; CHECK-LABEL: test_vsuxseg8_nxv2f16_nxv2i32:
12768 ; CHECK: # %bb.0: # %entry
12769 ; CHECK-NEXT: vmv1r.v v10, v8
12770 ; CHECK-NEXT: vmv1r.v v11, v8
12771 ; CHECK-NEXT: vmv1r.v v12, v8
12772 ; CHECK-NEXT: vmv1r.v v13, v8
12773 ; CHECK-NEXT: vmv1r.v v14, v8
12774 ; CHECK-NEXT: vmv1r.v v15, v8
12775 ; CHECK-NEXT: vmv1r.v v16, v8
12776 ; CHECK-NEXT: vmv1r.v v17, v8
12777 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12778 ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9
12781 tail call void @llvm.riscv.vsuxseg8.nxv2f16.nxv2i32(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, i32 %vl)
12785 define void @test_vsuxseg8_mask_nxv2f16_nxv2i32(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl) {
12786 ; CHECK-LABEL: test_vsuxseg8_mask_nxv2f16_nxv2i32:
12787 ; CHECK: # %bb.0: # %entry
12788 ; CHECK-NEXT: vmv1r.v v10, v8
12789 ; CHECK-NEXT: vmv1r.v v11, v8
12790 ; CHECK-NEXT: vmv1r.v v12, v8
12791 ; CHECK-NEXT: vmv1r.v v13, v8
12792 ; CHECK-NEXT: vmv1r.v v14, v8
12793 ; CHECK-NEXT: vmv1r.v v15, v8
12794 ; CHECK-NEXT: vmv1r.v v16, v8
12795 ; CHECK-NEXT: vmv1r.v v17, v8
12796 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12797 ; CHECK-NEXT: vsuxseg8ei32.v v10, (a0), v9, v0.t
12800 tail call void @llvm.riscv.vsuxseg8.mask.nxv2f16.nxv2i32(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i32> %index, <vscale x 2 x i1> %mask, i32 %vl)
12804 declare void @llvm.riscv.vsuxseg8.nxv2f16.nxv2i8(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i8>, i32)
12805 declare void @llvm.riscv.vsuxseg8.mask.nxv2f16.nxv2i8(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i8>, <vscale x 2 x i1>, i32)
12807 define void @test_vsuxseg8_nxv2f16_nxv2i8(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl) {
12808 ; CHECK-LABEL: test_vsuxseg8_nxv2f16_nxv2i8:
12809 ; CHECK: # %bb.0: # %entry
12810 ; CHECK-NEXT: vmv1r.v v10, v8
12811 ; CHECK-NEXT: vmv1r.v v11, v8
12812 ; CHECK-NEXT: vmv1r.v v12, v8
12813 ; CHECK-NEXT: vmv1r.v v13, v8
12814 ; CHECK-NEXT: vmv1r.v v14, v8
12815 ; CHECK-NEXT: vmv1r.v v15, v8
12816 ; CHECK-NEXT: vmv1r.v v16, v8
12817 ; CHECK-NEXT: vmv1r.v v17, v8
12818 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12819 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9
12822 tail call void @llvm.riscv.vsuxseg8.nxv2f16.nxv2i8(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, i32 %vl)
12826 define void @test_vsuxseg8_mask_nxv2f16_nxv2i8(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl) {
12827 ; CHECK-LABEL: test_vsuxseg8_mask_nxv2f16_nxv2i8:
12828 ; CHECK: # %bb.0: # %entry
12829 ; CHECK-NEXT: vmv1r.v v10, v8
12830 ; CHECK-NEXT: vmv1r.v v11, v8
12831 ; CHECK-NEXT: vmv1r.v v12, v8
12832 ; CHECK-NEXT: vmv1r.v v13, v8
12833 ; CHECK-NEXT: vmv1r.v v14, v8
12834 ; CHECK-NEXT: vmv1r.v v15, v8
12835 ; CHECK-NEXT: vmv1r.v v16, v8
12836 ; CHECK-NEXT: vmv1r.v v17, v8
12837 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12838 ; CHECK-NEXT: vsuxseg8ei8.v v10, (a0), v9, v0.t
12841 tail call void @llvm.riscv.vsuxseg8.mask.nxv2f16.nxv2i8(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i8> %index, <vscale x 2 x i1> %mask, i32 %vl)
12845 declare void @llvm.riscv.vsuxseg8.nxv2f16.nxv2i16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i16>, i32)
12846 declare void @llvm.riscv.vsuxseg8.mask.nxv2f16.nxv2i16(<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>,<vscale x 2 x half>, ptr, <vscale x 2 x i16>, <vscale x 2 x i1>, i32)
12848 define void @test_vsuxseg8_nxv2f16_nxv2i16(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl) {
12849 ; CHECK-LABEL: test_vsuxseg8_nxv2f16_nxv2i16:
12850 ; CHECK: # %bb.0: # %entry
12851 ; CHECK-NEXT: vmv1r.v v10, v8
12852 ; CHECK-NEXT: vmv1r.v v11, v8
12853 ; CHECK-NEXT: vmv1r.v v12, v8
12854 ; CHECK-NEXT: vmv1r.v v13, v8
12855 ; CHECK-NEXT: vmv1r.v v14, v8
12856 ; CHECK-NEXT: vmv1r.v v15, v8
12857 ; CHECK-NEXT: vmv1r.v v16, v8
12858 ; CHECK-NEXT: vmv1r.v v17, v8
12859 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12860 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9
12863 tail call void @llvm.riscv.vsuxseg8.nxv2f16.nxv2i16(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, i32 %vl)
12867 define void @test_vsuxseg8_mask_nxv2f16_nxv2i16(<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl) {
12868 ; CHECK-LABEL: test_vsuxseg8_mask_nxv2f16_nxv2i16:
12869 ; CHECK: # %bb.0: # %entry
12870 ; CHECK-NEXT: vmv1r.v v10, v8
12871 ; CHECK-NEXT: vmv1r.v v11, v8
12872 ; CHECK-NEXT: vmv1r.v v12, v8
12873 ; CHECK-NEXT: vmv1r.v v13, v8
12874 ; CHECK-NEXT: vmv1r.v v14, v8
12875 ; CHECK-NEXT: vmv1r.v v15, v8
12876 ; CHECK-NEXT: vmv1r.v v16, v8
12877 ; CHECK-NEXT: vmv1r.v v17, v8
12878 ; CHECK-NEXT: vsetvli zero, a1, e16, mf2, ta, ma
12879 ; CHECK-NEXT: vsuxseg8ei16.v v10, (a0), v9, v0.t
12882 tail call void @llvm.riscv.vsuxseg8.mask.nxv2f16.nxv2i16(<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val,<vscale x 2 x half> %val, ptr %base, <vscale x 2 x i16> %index, <vscale x 2 x i1> %mask, i32 %vl)
12886 declare void @llvm.riscv.vsuxseg2.nxv4f32.nxv4i16(<vscale x 4 x float>,<vscale x 4 x float>, ptr, <vscale x 4 x i16>, i32)
12887 declare void @llvm.riscv.vsuxseg2.mask.nxv4f32.nxv4i16(<vscale x 4 x float>,<vscale x 4 x float>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
12889 define void @test_vsuxseg2_nxv4f32_nxv4i16(<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
12890 ; CHECK-LABEL: test_vsuxseg2_nxv4f32_nxv4i16:
12891 ; CHECK: # %bb.0: # %entry
12892 ; CHECK-NEXT: vmv1r.v v12, v10
12893 ; CHECK-NEXT: vmv2r.v v10, v8
12894 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
12895 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v12
12898 tail call void @llvm.riscv.vsuxseg2.nxv4f32.nxv4i16(<vscale x 4 x float> %val,<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
12902 define void @test_vsuxseg2_mask_nxv4f32_nxv4i16(<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
12903 ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f32_nxv4i16:
12904 ; CHECK: # %bb.0: # %entry
12905 ; CHECK-NEXT: vmv1r.v v12, v10
12906 ; CHECK-NEXT: vmv2r.v v10, v8
12907 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
12908 ; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v12, v0.t
12911 tail call void @llvm.riscv.vsuxseg2.mask.nxv4f32.nxv4i16(<vscale x 4 x float> %val,<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
12915 declare void @llvm.riscv.vsuxseg2.nxv4f32.nxv4i8(<vscale x 4 x float>,<vscale x 4 x float>, ptr, <vscale x 4 x i8>, i32)
12916 declare void @llvm.riscv.vsuxseg2.mask.nxv4f32.nxv4i8(<vscale x 4 x float>,<vscale x 4 x float>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
12918 define void @test_vsuxseg2_nxv4f32_nxv4i8(<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
12919 ; CHECK-LABEL: test_vsuxseg2_nxv4f32_nxv4i8:
12920 ; CHECK: # %bb.0: # %entry
12921 ; CHECK-NEXT: vmv1r.v v12, v10
12922 ; CHECK-NEXT: vmv2r.v v10, v8
12923 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
12924 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v12
12927 tail call void @llvm.riscv.vsuxseg2.nxv4f32.nxv4i8(<vscale x 4 x float> %val,<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
12931 define void @test_vsuxseg2_mask_nxv4f32_nxv4i8(<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
12932 ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f32_nxv4i8:
12933 ; CHECK: # %bb.0: # %entry
12934 ; CHECK-NEXT: vmv1r.v v12, v10
12935 ; CHECK-NEXT: vmv2r.v v10, v8
12936 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
12937 ; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v12, v0.t
12940 tail call void @llvm.riscv.vsuxseg2.mask.nxv4f32.nxv4i8(<vscale x 4 x float> %val,<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
12944 declare void @llvm.riscv.vsuxseg2.nxv4f32.nxv4i32(<vscale x 4 x float>,<vscale x 4 x float>, ptr, <vscale x 4 x i32>, i32)
12945 declare void @llvm.riscv.vsuxseg2.mask.nxv4f32.nxv4i32(<vscale x 4 x float>,<vscale x 4 x float>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
12947 define void @test_vsuxseg2_nxv4f32_nxv4i32(<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
12948 ; CHECK-LABEL: test_vsuxseg2_nxv4f32_nxv4i32:
12949 ; CHECK: # %bb.0: # %entry
12950 ; CHECK-NEXT: vmv2r.v v12, v10
12951 ; CHECK-NEXT: vmv2r.v v10, v8
12952 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
12953 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12
12956 tail call void @llvm.riscv.vsuxseg2.nxv4f32.nxv4i32(<vscale x 4 x float> %val,<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
12960 define void @test_vsuxseg2_mask_nxv4f32_nxv4i32(<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
12961 ; CHECK-LABEL: test_vsuxseg2_mask_nxv4f32_nxv4i32:
12962 ; CHECK: # %bb.0: # %entry
12963 ; CHECK-NEXT: vmv2r.v v12, v10
12964 ; CHECK-NEXT: vmv2r.v v10, v8
12965 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
12966 ; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v12, v0.t
12969 tail call void @llvm.riscv.vsuxseg2.mask.nxv4f32.nxv4i32(<vscale x 4 x float> %val,<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
12973 declare void @llvm.riscv.vsuxseg3.nxv4f32.nxv4i16(<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, ptr, <vscale x 4 x i16>, i32)
12974 declare void @llvm.riscv.vsuxseg3.mask.nxv4f32.nxv4i16(<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
12976 define void @test_vsuxseg3_nxv4f32_nxv4i16(<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
12977 ; CHECK-LABEL: test_vsuxseg3_nxv4f32_nxv4i16:
12978 ; CHECK: # %bb.0: # %entry
12979 ; CHECK-NEXT: vmv2r.v v12, v8
12980 ; CHECK-NEXT: vmv2r.v v14, v8
12981 ; CHECK-NEXT: vmv2r.v v16, v8
12982 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
12983 ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10
12986 tail call void @llvm.riscv.vsuxseg3.nxv4f32.nxv4i16(<vscale x 4 x float> %val,<vscale x 4 x float> %val,<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
12990 define void @test_vsuxseg3_mask_nxv4f32_nxv4i16(<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
12991 ; CHECK-LABEL: test_vsuxseg3_mask_nxv4f32_nxv4i16:
12992 ; CHECK: # %bb.0: # %entry
12993 ; CHECK-NEXT: vmv2r.v v12, v8
12994 ; CHECK-NEXT: vmv2r.v v14, v8
12995 ; CHECK-NEXT: vmv2r.v v16, v8
12996 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
12997 ; CHECK-NEXT: vsuxseg3ei16.v v12, (a0), v10, v0.t
13000 tail call void @llvm.riscv.vsuxseg3.mask.nxv4f32.nxv4i16(<vscale x 4 x float> %val,<vscale x 4 x float> %val,<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
13004 declare void @llvm.riscv.vsuxseg3.nxv4f32.nxv4i8(<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, ptr, <vscale x 4 x i8>, i32)
13005 declare void @llvm.riscv.vsuxseg3.mask.nxv4f32.nxv4i8(<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
13007 define void @test_vsuxseg3_nxv4f32_nxv4i8(<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
13008 ; CHECK-LABEL: test_vsuxseg3_nxv4f32_nxv4i8:
13009 ; CHECK: # %bb.0: # %entry
13010 ; CHECK-NEXT: vmv2r.v v12, v8
13011 ; CHECK-NEXT: vmv2r.v v14, v8
13012 ; CHECK-NEXT: vmv2r.v v16, v8
13013 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
13014 ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10
13017 tail call void @llvm.riscv.vsuxseg3.nxv4f32.nxv4i8(<vscale x 4 x float> %val,<vscale x 4 x float> %val,<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
13021 define void @test_vsuxseg3_mask_nxv4f32_nxv4i8(<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
13022 ; CHECK-LABEL: test_vsuxseg3_mask_nxv4f32_nxv4i8:
13023 ; CHECK: # %bb.0: # %entry
13024 ; CHECK-NEXT: vmv2r.v v12, v8
13025 ; CHECK-NEXT: vmv2r.v v14, v8
13026 ; CHECK-NEXT: vmv2r.v v16, v8
13027 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
13028 ; CHECK-NEXT: vsuxseg3ei8.v v12, (a0), v10, v0.t
13031 tail call void @llvm.riscv.vsuxseg3.mask.nxv4f32.nxv4i8(<vscale x 4 x float> %val,<vscale x 4 x float> %val,<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
13035 declare void @llvm.riscv.vsuxseg3.nxv4f32.nxv4i32(<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, ptr, <vscale x 4 x i32>, i32)
13036 declare void @llvm.riscv.vsuxseg3.mask.nxv4f32.nxv4i32(<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
13038 define void @test_vsuxseg3_nxv4f32_nxv4i32(<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
13039 ; CHECK-LABEL: test_vsuxseg3_nxv4f32_nxv4i32:
13040 ; CHECK: # %bb.0: # %entry
13041 ; CHECK-NEXT: vmv2r.v v12, v8
13042 ; CHECK-NEXT: vmv2r.v v14, v8
13043 ; CHECK-NEXT: vmv2r.v v16, v8
13044 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
13045 ; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10
13048 tail call void @llvm.riscv.vsuxseg3.nxv4f32.nxv4i32(<vscale x 4 x float> %val,<vscale x 4 x float> %val,<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
13052 define void @test_vsuxseg3_mask_nxv4f32_nxv4i32(<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
13053 ; CHECK-LABEL: test_vsuxseg3_mask_nxv4f32_nxv4i32:
13054 ; CHECK: # %bb.0: # %entry
13055 ; CHECK-NEXT: vmv2r.v v12, v8
13056 ; CHECK-NEXT: vmv2r.v v14, v8
13057 ; CHECK-NEXT: vmv2r.v v16, v8
13058 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
13059 ; CHECK-NEXT: vsuxseg3ei32.v v12, (a0), v10, v0.t
13062 tail call void @llvm.riscv.vsuxseg3.mask.nxv4f32.nxv4i32(<vscale x 4 x float> %val,<vscale x 4 x float> %val,<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)
13066 declare void @llvm.riscv.vsuxseg4.nxv4f32.nxv4i16(<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, ptr, <vscale x 4 x i16>, i32)
13067 declare void @llvm.riscv.vsuxseg4.mask.nxv4f32.nxv4i16(<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, ptr, <vscale x 4 x i16>, <vscale x 4 x i1>, i32)
13069 define void @test_vsuxseg4_nxv4f32_nxv4i16(<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl) {
13070 ; CHECK-LABEL: test_vsuxseg4_nxv4f32_nxv4i16:
13071 ; CHECK: # %bb.0: # %entry
13072 ; CHECK-NEXT: vmv2r.v v12, v8
13073 ; CHECK-NEXT: vmv2r.v v14, v8
13074 ; CHECK-NEXT: vmv2r.v v16, v8
13075 ; CHECK-NEXT: vmv2r.v v18, v8
13076 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
13077 ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10
13080 tail call void @llvm.riscv.vsuxseg4.nxv4f32.nxv4i16(<vscale x 4 x float> %val,<vscale x 4 x float> %val,<vscale x 4 x float> %val,<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i16> %index, i32 %vl)
13084 define void @test_vsuxseg4_mask_nxv4f32_nxv4i16(<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl) {
13085 ; CHECK-LABEL: test_vsuxseg4_mask_nxv4f32_nxv4i16:
13086 ; CHECK: # %bb.0: # %entry
13087 ; CHECK-NEXT: vmv2r.v v12, v8
13088 ; CHECK-NEXT: vmv2r.v v14, v8
13089 ; CHECK-NEXT: vmv2r.v v16, v8
13090 ; CHECK-NEXT: vmv2r.v v18, v8
13091 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
13092 ; CHECK-NEXT: vsuxseg4ei16.v v12, (a0), v10, v0.t
13095 tail call void @llvm.riscv.vsuxseg4.mask.nxv4f32.nxv4i16(<vscale x 4 x float> %val,<vscale x 4 x float> %val,<vscale x 4 x float> %val,<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i32 %vl)
13099 declare void @llvm.riscv.vsuxseg4.nxv4f32.nxv4i8(<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, ptr, <vscale x 4 x i8>, i32)
13100 declare void @llvm.riscv.vsuxseg4.mask.nxv4f32.nxv4i8(<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, ptr, <vscale x 4 x i8>, <vscale x 4 x i1>, i32)
13102 define void @test_vsuxseg4_nxv4f32_nxv4i8(<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl) {
13103 ; CHECK-LABEL: test_vsuxseg4_nxv4f32_nxv4i8:
13104 ; CHECK: # %bb.0: # %entry
13105 ; CHECK-NEXT: vmv2r.v v12, v8
13106 ; CHECK-NEXT: vmv2r.v v14, v8
13107 ; CHECK-NEXT: vmv2r.v v16, v8
13108 ; CHECK-NEXT: vmv2r.v v18, v8
13109 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
13110 ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10
13113 tail call void @llvm.riscv.vsuxseg4.nxv4f32.nxv4i8(<vscale x 4 x float> %val,<vscale x 4 x float> %val,<vscale x 4 x float> %val,<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i8> %index, i32 %vl)
13117 define void @test_vsuxseg4_mask_nxv4f32_nxv4i8(<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl) {
13118 ; CHECK-LABEL: test_vsuxseg4_mask_nxv4f32_nxv4i8:
13119 ; CHECK: # %bb.0: # %entry
13120 ; CHECK-NEXT: vmv2r.v v12, v8
13121 ; CHECK-NEXT: vmv2r.v v14, v8
13122 ; CHECK-NEXT: vmv2r.v v16, v8
13123 ; CHECK-NEXT: vmv2r.v v18, v8
13124 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
13125 ; CHECK-NEXT: vsuxseg4ei8.v v12, (a0), v10, v0.t
13128 tail call void @llvm.riscv.vsuxseg4.mask.nxv4f32.nxv4i8(<vscale x 4 x float> %val,<vscale x 4 x float> %val,<vscale x 4 x float> %val,<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i32 %vl)
13132 declare void @llvm.riscv.vsuxseg4.nxv4f32.nxv4i32(<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, ptr, <vscale x 4 x i32>, i32)
13133 declare void @llvm.riscv.vsuxseg4.mask.nxv4f32.nxv4i32(<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>,<vscale x 4 x float>, ptr, <vscale x 4 x i32>, <vscale x 4 x i1>, i32)
13135 define void @test_vsuxseg4_nxv4f32_nxv4i32(<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl) {
13136 ; CHECK-LABEL: test_vsuxseg4_nxv4f32_nxv4i32:
13137 ; CHECK: # %bb.0: # %entry
13138 ; CHECK-NEXT: vmv2r.v v12, v8
13139 ; CHECK-NEXT: vmv2r.v v14, v8
13140 ; CHECK-NEXT: vmv2r.v v16, v8
13141 ; CHECK-NEXT: vmv2r.v v18, v8
13142 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
13143 ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10
13146 tail call void @llvm.riscv.vsuxseg4.nxv4f32.nxv4i32(<vscale x 4 x float> %val,<vscale x 4 x float> %val,<vscale x 4 x float> %val,<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i32> %index, i32 %vl)
13150 define void @test_vsuxseg4_mask_nxv4f32_nxv4i32(<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl) {
13151 ; CHECK-LABEL: test_vsuxseg4_mask_nxv4f32_nxv4i32:
13152 ; CHECK: # %bb.0: # %entry
13153 ; CHECK-NEXT: vmv2r.v v12, v8
13154 ; CHECK-NEXT: vmv2r.v v14, v8
13155 ; CHECK-NEXT: vmv2r.v v16, v8
13156 ; CHECK-NEXT: vmv2r.v v18, v8
13157 ; CHECK-NEXT: vsetvli zero, a1, e32, m2, ta, ma
13158 ; CHECK-NEXT: vsuxseg4ei32.v v12, (a0), v10, v0.t
13161 tail call void @llvm.riscv.vsuxseg4.mask.nxv4f32.nxv4i32(<vscale x 4 x float> %val,<vscale x 4 x float> %val,<vscale x 4 x float> %val,<vscale x 4 x float> %val, ptr %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i32 %vl)