1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc -mtriple=riscv32 -mattr=+d,+zfh,+zvfh,+v -target-abi=ilp32d \
3 ; RUN: -verify-machineinstrs < %s | FileCheck %s
4 ; RUN: llc -mtriple=riscv64 -mattr=+d,+zfh,+zvfh,+v -target-abi=lp64d \
5 ; RUN: -verify-machineinstrs < %s | FileCheck %s
7 declare <vscale x 1 x i1> @llvm.experimental.constrained.fptosi.nxv1i1.nxv1f16(<vscale x 1 x half>, metadata)
8 define <vscale x 1 x i1> @vfptosi_nxv1f16_nxv1i1(<vscale x 1 x half> %va) strictfp {
9 ; CHECK-LABEL: vfptosi_nxv1f16_nxv1i1:
11 ; CHECK-NEXT: vsetvli a0, zero, e8, mf8, ta, ma
12 ; CHECK-NEXT: vfncvt.rtz.x.f.w v9, v8
13 ; CHECK-NEXT: vand.vi v8, v9, 1
14 ; CHECK-NEXT: vmsne.vi v0, v8, 0
16 %evec = call <vscale x 1 x i1> @llvm.experimental.constrained.fptosi.nxv1i1.nxv1f16(<vscale x 1 x half> %va, metadata !"fpexcept.strict")
17 ret <vscale x 1 x i1> %evec
20 declare <vscale x 1 x i1> @llvm.experimental.constrained.fptoui.nxv1i1.nxv1f16(<vscale x 1 x half>, metadata)
21 define <vscale x 1 x i1> @vfptoui_nxv1f16_nxv1i1(<vscale x 1 x half> %va) strictfp {
22 ; CHECK-LABEL: vfptoui_nxv1f16_nxv1i1:
24 ; CHECK-NEXT: vsetvli a0, zero, e8, mf8, ta, ma
25 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v9, v8
26 ; CHECK-NEXT: vand.vi v8, v9, 1
27 ; CHECK-NEXT: vmsne.vi v0, v8, 0
29 %evec = call <vscale x 1 x i1> @llvm.experimental.constrained.fptoui.nxv1i1.nxv1f16(<vscale x 1 x half> %va, metadata !"fpexcept.strict")
30 ret <vscale x 1 x i1> %evec
33 declare <vscale x 1 x i7> @llvm.experimental.constrained.fptosi.nxv1i7.nxv1f16(<vscale x 1 x half>, metadata)
34 define <vscale x 1 x i7> @vfptosi_nxv1f16_nxv1i7(<vscale x 1 x half> %va) strictfp {
35 ; CHECK-LABEL: vfptosi_nxv1f16_nxv1i7:
37 ; CHECK-NEXT: vsetvli a0, zero, e8, mf8, ta, ma
38 ; CHECK-NEXT: vfncvt.rtz.x.f.w v9, v8
39 ; CHECK-NEXT: vmv1r.v v8, v9
41 %evec = call <vscale x 1 x i7> @llvm.experimental.constrained.fptosi.nxv1i7.nxv1f16(<vscale x 1 x half> %va, metadata !"fpexcept.strict")
42 ret <vscale x 1 x i7> %evec
45 declare <vscale x 1 x i7> @llvm.experimental.constrained.fptoui.nxv1i7.nxv1f16(<vscale x 1 x half>, metadata)
46 define <vscale x 1 x i7> @vfptoui_nxv1f16_nxv1i7(<vscale x 1 x half> %va) strictfp {
47 ; CHECK-LABEL: vfptoui_nxv1f16_nxv1i7:
49 ; CHECK-NEXT: vsetvli a0, zero, e8, mf8, ta, ma
50 ; CHECK-NEXT: vfncvt.rtz.x.f.w v9, v8
51 ; CHECK-NEXT: vmv1r.v v8, v9
53 %evec = call <vscale x 1 x i7> @llvm.experimental.constrained.fptoui.nxv1i7.nxv1f16(<vscale x 1 x half> %va, metadata !"fpexcept.strict")
54 ret <vscale x 1 x i7> %evec
57 declare <vscale x 1 x i8> @llvm.experimental.constrained.fptosi.nxv1i8.nxv1f16(<vscale x 1 x half>, metadata)
58 define <vscale x 1 x i8> @vfptosi_nxv1f16_nxv1i8(<vscale x 1 x half> %va) strictfp {
59 ; CHECK-LABEL: vfptosi_nxv1f16_nxv1i8:
61 ; CHECK-NEXT: vsetvli a0, zero, e8, mf8, ta, ma
62 ; CHECK-NEXT: vfncvt.rtz.x.f.w v9, v8
63 ; CHECK-NEXT: vmv1r.v v8, v9
65 %evec = call <vscale x 1 x i8> @llvm.experimental.constrained.fptosi.nxv1i8.nxv1f16(<vscale x 1 x half> %va, metadata !"fpexcept.strict")
66 ret <vscale x 1 x i8> %evec
69 declare <vscale x 1 x i8> @llvm.experimental.constrained.fptoui.nxv1i8.nxv1f16(<vscale x 1 x half>, metadata)
70 define <vscale x 1 x i8> @vfptoui_nxv1f16_nxv1i8(<vscale x 1 x half> %va) strictfp {
71 ; CHECK-LABEL: vfptoui_nxv1f16_nxv1i8:
73 ; CHECK-NEXT: vsetvli a0, zero, e8, mf8, ta, ma
74 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v9, v8
75 ; CHECK-NEXT: vmv1r.v v8, v9
77 %evec = call <vscale x 1 x i8> @llvm.experimental.constrained.fptoui.nxv1i8.nxv1f16(<vscale x 1 x half> %va, metadata !"fpexcept.strict")
78 ret <vscale x 1 x i8> %evec
81 declare <vscale x 1 x i16> @llvm.experimental.constrained.fptosi.nxv1i16.nxv1f16(<vscale x 1 x half>, metadata)
82 define <vscale x 1 x i16> @vfptosi_nxv1f16_nxv1i16(<vscale x 1 x half> %va) strictfp {
83 ; CHECK-LABEL: vfptosi_nxv1f16_nxv1i16:
85 ; CHECK-NEXT: vsetvli a0, zero, e16, mf4, ta, ma
86 ; CHECK-NEXT: vfcvt.rtz.x.f.v v8, v8
88 %evec = call <vscale x 1 x i16> @llvm.experimental.constrained.fptosi.nxv1i16.nxv1f16(<vscale x 1 x half> %va, metadata !"fpexcept.strict")
89 ret <vscale x 1 x i16> %evec
92 declare <vscale x 1 x i16> @llvm.experimental.constrained.fptoui.nxv1i16.nxv1f16(<vscale x 1 x half>, metadata)
93 define <vscale x 1 x i16> @vfptoui_nxv1f16_nxv1i16(<vscale x 1 x half> %va) strictfp {
94 ; CHECK-LABEL: vfptoui_nxv1f16_nxv1i16:
96 ; CHECK-NEXT: vsetvli a0, zero, e16, mf4, ta, ma
97 ; CHECK-NEXT: vfcvt.rtz.xu.f.v v8, v8
99 %evec = call <vscale x 1 x i16> @llvm.experimental.constrained.fptoui.nxv1i16.nxv1f16(<vscale x 1 x half> %va, metadata !"fpexcept.strict")
100 ret <vscale x 1 x i16> %evec
103 declare <vscale x 1 x i32> @llvm.experimental.constrained.fptosi.nxv1i32.nxv1f16(<vscale x 1 x half>, metadata)
104 define <vscale x 1 x i32> @vfptosi_nxv1f16_nxv1i32(<vscale x 1 x half> %va) strictfp {
105 ; CHECK-LABEL: vfptosi_nxv1f16_nxv1i32:
107 ; CHECK-NEXT: vsetvli a0, zero, e16, mf4, ta, ma
108 ; CHECK-NEXT: vfwcvt.rtz.x.f.v v9, v8
109 ; CHECK-NEXT: vmv1r.v v8, v9
111 %evec = call <vscale x 1 x i32> @llvm.experimental.constrained.fptosi.nxv1i32.nxv1f16(<vscale x 1 x half> %va, metadata !"fpexcept.strict")
112 ret <vscale x 1 x i32> %evec
115 declare <vscale x 1 x i32> @llvm.experimental.constrained.fptoui.nxv1i32.nxv1f16(<vscale x 1 x half>, metadata)
116 define <vscale x 1 x i32> @vfptoui_nxv1f16_nxv1i32(<vscale x 1 x half> %va) strictfp {
117 ; CHECK-LABEL: vfptoui_nxv1f16_nxv1i32:
119 ; CHECK-NEXT: vsetvli a0, zero, e16, mf4, ta, ma
120 ; CHECK-NEXT: vfwcvt.rtz.xu.f.v v9, v8
121 ; CHECK-NEXT: vmv1r.v v8, v9
123 %evec = call <vscale x 1 x i32> @llvm.experimental.constrained.fptoui.nxv1i32.nxv1f16(<vscale x 1 x half> %va, metadata !"fpexcept.strict")
124 ret <vscale x 1 x i32> %evec
127 declare <vscale x 1 x i64> @llvm.experimental.constrained.fptosi.nxv1i64.nxv1f16(<vscale x 1 x half>, metadata)
128 define <vscale x 1 x i64> @vfptosi_nxv1f16_nxv1i64(<vscale x 1 x half> %va) strictfp {
129 ; CHECK-LABEL: vfptosi_nxv1f16_nxv1i64:
131 ; CHECK-NEXT: vsetvli a0, zero, e16, mf4, ta, ma
132 ; CHECK-NEXT: vfwcvt.f.f.v v9, v8
133 ; CHECK-NEXT: vsetvli zero, zero, e32, mf2, ta, ma
134 ; CHECK-NEXT: vfwcvt.rtz.x.f.v v8, v9
136 %evec = call <vscale x 1 x i64> @llvm.experimental.constrained.fptosi.nxv1i64.nxv1f16(<vscale x 1 x half> %va, metadata !"fpexcept.strict")
137 ret <vscale x 1 x i64> %evec
140 declare <vscale x 1 x i64> @llvm.experimental.constrained.fptoui.nxv1i64.nxv1f16(<vscale x 1 x half>, metadata)
141 define <vscale x 1 x i64> @vfptoui_nxv1f16_nxv1i64(<vscale x 1 x half> %va) strictfp {
142 ; CHECK-LABEL: vfptoui_nxv1f16_nxv1i64:
144 ; CHECK-NEXT: vsetvli a0, zero, e16, mf4, ta, ma
145 ; CHECK-NEXT: vfwcvt.f.f.v v9, v8
146 ; CHECK-NEXT: vsetvli zero, zero, e32, mf2, ta, ma
147 ; CHECK-NEXT: vfwcvt.rtz.xu.f.v v8, v9
149 %evec = call <vscale x 1 x i64> @llvm.experimental.constrained.fptoui.nxv1i64.nxv1f16(<vscale x 1 x half> %va, metadata !"fpexcept.strict")
150 ret <vscale x 1 x i64> %evec
153 declare <vscale x 2 x i1> @llvm.experimental.constrained.fptosi.nxv2i1.nxv2f16(<vscale x 2 x half>, metadata)
154 define <vscale x 2 x i1> @vfptosi_nxv2f16_nxv2i1(<vscale x 2 x half> %va) strictfp {
155 ; CHECK-LABEL: vfptosi_nxv2f16_nxv2i1:
157 ; CHECK-NEXT: vsetvli a0, zero, e8, mf4, ta, ma
158 ; CHECK-NEXT: vfncvt.rtz.x.f.w v9, v8
159 ; CHECK-NEXT: vand.vi v8, v9, 1
160 ; CHECK-NEXT: vmsne.vi v0, v8, 0
162 %evec = call <vscale x 2 x i1> @llvm.experimental.constrained.fptosi.nxv2i1.nxv2f16(<vscale x 2 x half> %va, metadata !"fpexcept.strict")
163 ret <vscale x 2 x i1> %evec
166 declare <vscale x 2 x i1> @llvm.experimental.constrained.fptoui.nxv2i1.nxv2f16(<vscale x 2 x half>, metadata)
167 define <vscale x 2 x i1> @vfptoui_nxv2f16_nxv2i1(<vscale x 2 x half> %va) strictfp {
168 ; CHECK-LABEL: vfptoui_nxv2f16_nxv2i1:
170 ; CHECK-NEXT: vsetvli a0, zero, e8, mf4, ta, ma
171 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v9, v8
172 ; CHECK-NEXT: vand.vi v8, v9, 1
173 ; CHECK-NEXT: vmsne.vi v0, v8, 0
175 %evec = call <vscale x 2 x i1> @llvm.experimental.constrained.fptoui.nxv2i1.nxv2f16(<vscale x 2 x half> %va, metadata !"fpexcept.strict")
176 ret <vscale x 2 x i1> %evec
179 declare <vscale x 2 x i8> @llvm.experimental.constrained.fptosi.nxv2i8.nxv2f16(<vscale x 2 x half>, metadata)
180 define <vscale x 2 x i8> @vfptosi_nxv2f16_nxv2i8(<vscale x 2 x half> %va) strictfp {
181 ; CHECK-LABEL: vfptosi_nxv2f16_nxv2i8:
183 ; CHECK-NEXT: vsetvli a0, zero, e8, mf4, ta, ma
184 ; CHECK-NEXT: vfncvt.rtz.x.f.w v9, v8
185 ; CHECK-NEXT: vmv1r.v v8, v9
187 %evec = call <vscale x 2 x i8> @llvm.experimental.constrained.fptosi.nxv2i8.nxv2f16(<vscale x 2 x half> %va, metadata !"fpexcept.strict")
188 ret <vscale x 2 x i8> %evec
191 declare <vscale x 2 x i8> @llvm.experimental.constrained.fptoui.nxv2i8.nxv2f16(<vscale x 2 x half>, metadata)
192 define <vscale x 2 x i8> @vfptoui_nxv2f16_nxv2i8(<vscale x 2 x half> %va) strictfp {
193 ; CHECK-LABEL: vfptoui_nxv2f16_nxv2i8:
195 ; CHECK-NEXT: vsetvli a0, zero, e8, mf4, ta, ma
196 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v9, v8
197 ; CHECK-NEXT: vmv1r.v v8, v9
199 %evec = call <vscale x 2 x i8> @llvm.experimental.constrained.fptoui.nxv2i8.nxv2f16(<vscale x 2 x half> %va, metadata !"fpexcept.strict")
200 ret <vscale x 2 x i8> %evec
203 declare <vscale x 2 x i16> @llvm.experimental.constrained.fptosi.nxv2i16.nxv2f16(<vscale x 2 x half>, metadata)
204 define <vscale x 2 x i16> @vfptosi_nxv2f16_nxv2i16(<vscale x 2 x half> %va) strictfp {
205 ; CHECK-LABEL: vfptosi_nxv2f16_nxv2i16:
207 ; CHECK-NEXT: vsetvli a0, zero, e16, mf2, ta, ma
208 ; CHECK-NEXT: vfcvt.rtz.x.f.v v8, v8
210 %evec = call <vscale x 2 x i16> @llvm.experimental.constrained.fptosi.nxv2i16.nxv2f16(<vscale x 2 x half> %va, metadata !"fpexcept.strict")
211 ret <vscale x 2 x i16> %evec
214 declare <vscale x 2 x i16> @llvm.experimental.constrained.fptoui.nxv2i16.nxv2f16(<vscale x 2 x half>, metadata)
215 define <vscale x 2 x i16> @vfptoui_nxv2f16_nxv2i16(<vscale x 2 x half> %va) strictfp {
216 ; CHECK-LABEL: vfptoui_nxv2f16_nxv2i16:
218 ; CHECK-NEXT: vsetvli a0, zero, e16, mf2, ta, ma
219 ; CHECK-NEXT: vfcvt.rtz.xu.f.v v8, v8
221 %evec = call <vscale x 2 x i16> @llvm.experimental.constrained.fptoui.nxv2i16.nxv2f16(<vscale x 2 x half> %va, metadata !"fpexcept.strict")
222 ret <vscale x 2 x i16> %evec
225 declare <vscale x 2 x i32> @llvm.experimental.constrained.fptosi.nxv2i32.nxv2f16(<vscale x 2 x half>, metadata)
226 define <vscale x 2 x i32> @vfptosi_nxv2f16_nxv2i32(<vscale x 2 x half> %va) strictfp {
227 ; CHECK-LABEL: vfptosi_nxv2f16_nxv2i32:
229 ; CHECK-NEXT: vsetvli a0, zero, e16, mf2, ta, ma
230 ; CHECK-NEXT: vfwcvt.rtz.x.f.v v9, v8
231 ; CHECK-NEXT: vmv1r.v v8, v9
233 %evec = call <vscale x 2 x i32> @llvm.experimental.constrained.fptosi.nxv2i32.nxv2f16(<vscale x 2 x half> %va, metadata !"fpexcept.strict")
234 ret <vscale x 2 x i32> %evec
237 declare <vscale x 2 x i32> @llvm.experimental.constrained.fptoui.nxv2i32.nxv2f16(<vscale x 2 x half>, metadata)
238 define <vscale x 2 x i32> @vfptoui_nxv2f16_nxv2i32(<vscale x 2 x half> %va) strictfp {
239 ; CHECK-LABEL: vfptoui_nxv2f16_nxv2i32:
241 ; CHECK-NEXT: vsetvli a0, zero, e16, mf2, ta, ma
242 ; CHECK-NEXT: vfwcvt.rtz.xu.f.v v9, v8
243 ; CHECK-NEXT: vmv1r.v v8, v9
245 %evec = call <vscale x 2 x i32> @llvm.experimental.constrained.fptoui.nxv2i32.nxv2f16(<vscale x 2 x half> %va, metadata !"fpexcept.strict")
246 ret <vscale x 2 x i32> %evec
249 declare <vscale x 2 x i64> @llvm.experimental.constrained.fptosi.nxv2i64.nxv2f16(<vscale x 2 x half>, metadata)
250 define <vscale x 2 x i64> @vfptosi_nxv2f16_nxv2i64(<vscale x 2 x half> %va) strictfp {
251 ; CHECK-LABEL: vfptosi_nxv2f16_nxv2i64:
253 ; CHECK-NEXT: vsetvli a0, zero, e16, mf2, ta, ma
254 ; CHECK-NEXT: vfwcvt.f.f.v v10, v8
255 ; CHECK-NEXT: vsetvli zero, zero, e32, m1, ta, ma
256 ; CHECK-NEXT: vfwcvt.rtz.x.f.v v8, v10
258 %evec = call <vscale x 2 x i64> @llvm.experimental.constrained.fptosi.nxv2i64.nxv2f16(<vscale x 2 x half> %va, metadata !"fpexcept.strict")
259 ret <vscale x 2 x i64> %evec
262 declare <vscale x 2 x i64> @llvm.experimental.constrained.fptoui.nxv2i64.nxv2f16(<vscale x 2 x half>, metadata)
263 define <vscale x 2 x i64> @vfptoui_nxv2f16_nxv2i64(<vscale x 2 x half> %va) strictfp {
264 ; CHECK-LABEL: vfptoui_nxv2f16_nxv2i64:
266 ; CHECK-NEXT: vsetvli a0, zero, e16, mf2, ta, ma
267 ; CHECK-NEXT: vfwcvt.f.f.v v10, v8
268 ; CHECK-NEXT: vsetvli zero, zero, e32, m1, ta, ma
269 ; CHECK-NEXT: vfwcvt.rtz.xu.f.v v8, v10
271 %evec = call <vscale x 2 x i64> @llvm.experimental.constrained.fptoui.nxv2i64.nxv2f16(<vscale x 2 x half> %va, metadata !"fpexcept.strict")
272 ret <vscale x 2 x i64> %evec
275 declare <vscale x 4 x i1> @llvm.experimental.constrained.fptosi.nxv4i1.nxv4f16(<vscale x 4 x half>, metadata)
276 define <vscale x 4 x i1> @vfptosi_nxv4f16_nxv4i1(<vscale x 4 x half> %va) strictfp {
277 ; CHECK-LABEL: vfptosi_nxv4f16_nxv4i1:
279 ; CHECK-NEXT: vsetvli a0, zero, e8, mf2, ta, ma
280 ; CHECK-NEXT: vfncvt.rtz.x.f.w v9, v8
281 ; CHECK-NEXT: vand.vi v8, v9, 1
282 ; CHECK-NEXT: vmsne.vi v0, v8, 0
284 %evec = call <vscale x 4 x i1> @llvm.experimental.constrained.fptosi.nxv4i1.nxv4f16(<vscale x 4 x half> %va, metadata !"fpexcept.strict")
285 ret <vscale x 4 x i1> %evec
288 declare <vscale x 4 x i1> @llvm.experimental.constrained.fptoui.nxv4i1.nxv4f16(<vscale x 4 x half>, metadata)
289 define <vscale x 4 x i1> @vfptoui_nxv4f16_nxv4i1(<vscale x 4 x half> %va) strictfp {
290 ; CHECK-LABEL: vfptoui_nxv4f16_nxv4i1:
292 ; CHECK-NEXT: vsetvli a0, zero, e8, mf2, ta, ma
293 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v9, v8
294 ; CHECK-NEXT: vand.vi v8, v9, 1
295 ; CHECK-NEXT: vmsne.vi v0, v8, 0
297 %evec = call <vscale x 4 x i1> @llvm.experimental.constrained.fptoui.nxv4i1.nxv4f16(<vscale x 4 x half> %va, metadata !"fpexcept.strict")
298 ret <vscale x 4 x i1> %evec
301 declare <vscale x 4 x i8> @llvm.experimental.constrained.fptosi.nxv4i8.nxv4f16(<vscale x 4 x half>, metadata)
302 define <vscale x 4 x i8> @vfptosi_nxv4f16_nxv4i8(<vscale x 4 x half> %va) strictfp {
303 ; CHECK-LABEL: vfptosi_nxv4f16_nxv4i8:
305 ; CHECK-NEXT: vsetvli a0, zero, e8, mf2, ta, ma
306 ; CHECK-NEXT: vfncvt.rtz.x.f.w v9, v8
307 ; CHECK-NEXT: vmv1r.v v8, v9
309 %evec = call <vscale x 4 x i8> @llvm.experimental.constrained.fptosi.nxv4i8.nxv4f16(<vscale x 4 x half> %va, metadata !"fpexcept.strict")
310 ret <vscale x 4 x i8> %evec
313 declare <vscale x 4 x i8> @llvm.experimental.constrained.fptoui.nxv4i8.nxv4f16(<vscale x 4 x half>, metadata)
314 define <vscale x 4 x i8> @vfptoui_nxv4f16_nxv4i8(<vscale x 4 x half> %va) strictfp {
315 ; CHECK-LABEL: vfptoui_nxv4f16_nxv4i8:
317 ; CHECK-NEXT: vsetvli a0, zero, e8, mf2, ta, ma
318 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v9, v8
319 ; CHECK-NEXT: vmv1r.v v8, v9
321 %evec = call <vscale x 4 x i8> @llvm.experimental.constrained.fptoui.nxv4i8.nxv4f16(<vscale x 4 x half> %va, metadata !"fpexcept.strict")
322 ret <vscale x 4 x i8> %evec
325 declare <vscale x 4 x i16> @llvm.experimental.constrained.fptosi.nxv4i16.nxv4f16(<vscale x 4 x half>, metadata)
326 define <vscale x 4 x i16> @vfptosi_nxv4f16_nxv4i16(<vscale x 4 x half> %va) strictfp {
327 ; CHECK-LABEL: vfptosi_nxv4f16_nxv4i16:
329 ; CHECK-NEXT: vsetvli a0, zero, e16, m1, ta, ma
330 ; CHECK-NEXT: vfcvt.rtz.x.f.v v8, v8
332 %evec = call <vscale x 4 x i16> @llvm.experimental.constrained.fptosi.nxv4i16.nxv4f16(<vscale x 4 x half> %va, metadata !"fpexcept.strict")
333 ret <vscale x 4 x i16> %evec
336 declare <vscale x 4 x i16> @llvm.experimental.constrained.fptoui.nxv4i16.nxv4f16(<vscale x 4 x half>, metadata)
337 define <vscale x 4 x i16> @vfptoui_nxv4f16_nxv4i16(<vscale x 4 x half> %va) strictfp {
338 ; CHECK-LABEL: vfptoui_nxv4f16_nxv4i16:
340 ; CHECK-NEXT: vsetvli a0, zero, e16, m1, ta, ma
341 ; CHECK-NEXT: vfcvt.rtz.xu.f.v v8, v8
343 %evec = call <vscale x 4 x i16> @llvm.experimental.constrained.fptoui.nxv4i16.nxv4f16(<vscale x 4 x half> %va, metadata !"fpexcept.strict")
344 ret <vscale x 4 x i16> %evec
347 declare <vscale x 4 x i32> @llvm.experimental.constrained.fptosi.nxv4i32.nxv4f16(<vscale x 4 x half>, metadata)
348 define <vscale x 4 x i32> @vfptosi_nxv4f16_nxv4i32(<vscale x 4 x half> %va) strictfp {
349 ; CHECK-LABEL: vfptosi_nxv4f16_nxv4i32:
351 ; CHECK-NEXT: vsetvli a0, zero, e16, m1, ta, ma
352 ; CHECK-NEXT: vfwcvt.rtz.x.f.v v10, v8
353 ; CHECK-NEXT: vmv2r.v v8, v10
355 %evec = call <vscale x 4 x i32> @llvm.experimental.constrained.fptosi.nxv4i32.nxv4f16(<vscale x 4 x half> %va, metadata !"fpexcept.strict")
356 ret <vscale x 4 x i32> %evec
359 declare <vscale x 4 x i32> @llvm.experimental.constrained.fptoui.nxv4i32.nxv4f16(<vscale x 4 x half>, metadata)
360 define <vscale x 4 x i32> @vfptoui_nxv4f16_nxv4i32(<vscale x 4 x half> %va) strictfp {
361 ; CHECK-LABEL: vfptoui_nxv4f16_nxv4i32:
363 ; CHECK-NEXT: vsetvli a0, zero, e16, m1, ta, ma
364 ; CHECK-NEXT: vfwcvt.rtz.xu.f.v v10, v8
365 ; CHECK-NEXT: vmv2r.v v8, v10
367 %evec = call <vscale x 4 x i32> @llvm.experimental.constrained.fptoui.nxv4i32.nxv4f16(<vscale x 4 x half> %va, metadata !"fpexcept.strict")
368 ret <vscale x 4 x i32> %evec
371 declare <vscale x 4 x i64> @llvm.experimental.constrained.fptosi.nxv4i64.nxv4f16(<vscale x 4 x half>, metadata)
372 define <vscale x 4 x i64> @vfptosi_nxv4f16_nxv4i64(<vscale x 4 x half> %va) strictfp {
373 ; CHECK-LABEL: vfptosi_nxv4f16_nxv4i64:
375 ; CHECK-NEXT: vsetvli a0, zero, e16, m1, ta, ma
376 ; CHECK-NEXT: vfwcvt.f.f.v v12, v8
377 ; CHECK-NEXT: vsetvli zero, zero, e32, m2, ta, ma
378 ; CHECK-NEXT: vfwcvt.rtz.x.f.v v8, v12
380 %evec = call <vscale x 4 x i64> @llvm.experimental.constrained.fptosi.nxv4i64.nxv4f16(<vscale x 4 x half> %va, metadata !"fpexcept.strict")
381 ret <vscale x 4 x i64> %evec
384 declare <vscale x 4 x i64> @llvm.experimental.constrained.fptoui.nxv4i64.nxv4f16(<vscale x 4 x half>, metadata)
385 define <vscale x 4 x i64> @vfptoui_nxv4f16_nxv4i64(<vscale x 4 x half> %va) strictfp {
386 ; CHECK-LABEL: vfptoui_nxv4f16_nxv4i64:
388 ; CHECK-NEXT: vsetvli a0, zero, e16, m1, ta, ma
389 ; CHECK-NEXT: vfwcvt.f.f.v v12, v8
390 ; CHECK-NEXT: vsetvli zero, zero, e32, m2, ta, ma
391 ; CHECK-NEXT: vfwcvt.rtz.xu.f.v v8, v12
393 %evec = call <vscale x 4 x i64> @llvm.experimental.constrained.fptoui.nxv4i64.nxv4f16(<vscale x 4 x half> %va, metadata !"fpexcept.strict")
394 ret <vscale x 4 x i64> %evec
397 declare <vscale x 8 x i1> @llvm.experimental.constrained.fptosi.nxv8i1.nxv8f16(<vscale x 8 x half>, metadata)
398 define <vscale x 8 x i1> @vfptosi_nxv8f16_nxv8i1(<vscale x 8 x half> %va) strictfp {
399 ; CHECK-LABEL: vfptosi_nxv8f16_nxv8i1:
401 ; CHECK-NEXT: vsetvli a0, zero, e8, m1, ta, ma
402 ; CHECK-NEXT: vfncvt.rtz.x.f.w v10, v8
403 ; CHECK-NEXT: vand.vi v8, v10, 1
404 ; CHECK-NEXT: vmsne.vi v0, v8, 0
406 %evec = call <vscale x 8 x i1> @llvm.experimental.constrained.fptosi.nxv8i1.nxv8f16(<vscale x 8 x half> %va, metadata !"fpexcept.strict")
407 ret <vscale x 8 x i1> %evec
410 declare <vscale x 8 x i1> @llvm.experimental.constrained.fptoui.nxv8i1.nxv8f16(<vscale x 8 x half>, metadata)
411 define <vscale x 8 x i1> @vfptoui_nxv8f16_nxv8i1(<vscale x 8 x half> %va) strictfp {
412 ; CHECK-LABEL: vfptoui_nxv8f16_nxv8i1:
414 ; CHECK-NEXT: vsetvli a0, zero, e8, m1, ta, ma
415 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v10, v8
416 ; CHECK-NEXT: vand.vi v8, v10, 1
417 ; CHECK-NEXT: vmsne.vi v0, v8, 0
419 %evec = call <vscale x 8 x i1> @llvm.experimental.constrained.fptoui.nxv8i1.nxv8f16(<vscale x 8 x half> %va, metadata !"fpexcept.strict")
420 ret <vscale x 8 x i1> %evec
423 declare <vscale x 8 x i8> @llvm.experimental.constrained.fptosi.nxv8i8.nxv8f16(<vscale x 8 x half>, metadata)
424 define <vscale x 8 x i8> @vfptosi_nxv8f16_nxv8i8(<vscale x 8 x half> %va) strictfp {
425 ; CHECK-LABEL: vfptosi_nxv8f16_nxv8i8:
427 ; CHECK-NEXT: vsetvli a0, zero, e8, m1, ta, ma
428 ; CHECK-NEXT: vfncvt.rtz.x.f.w v10, v8
429 ; CHECK-NEXT: vmv.v.v v8, v10
431 %evec = call <vscale x 8 x i8> @llvm.experimental.constrained.fptosi.nxv8i8.nxv8f16(<vscale x 8 x half> %va, metadata !"fpexcept.strict")
432 ret <vscale x 8 x i8> %evec
435 declare <vscale x 8 x i8> @llvm.experimental.constrained.fptoui.nxv8i8.nxv8f16(<vscale x 8 x half>, metadata)
436 define <vscale x 8 x i8> @vfptoui_nxv8f16_nxv8i8(<vscale x 8 x half> %va) strictfp {
437 ; CHECK-LABEL: vfptoui_nxv8f16_nxv8i8:
439 ; CHECK-NEXT: vsetvli a0, zero, e8, m1, ta, ma
440 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v10, v8
441 ; CHECK-NEXT: vmv.v.v v8, v10
443 %evec = call <vscale x 8 x i8> @llvm.experimental.constrained.fptoui.nxv8i8.nxv8f16(<vscale x 8 x half> %va, metadata !"fpexcept.strict")
444 ret <vscale x 8 x i8> %evec
447 declare <vscale x 8 x i16> @llvm.experimental.constrained.fptosi.nxv8i16.nxv8f16(<vscale x 8 x half>, metadata)
448 define <vscale x 8 x i16> @vfptosi_nxv8f16_nxv8i16(<vscale x 8 x half> %va) strictfp {
449 ; CHECK-LABEL: vfptosi_nxv8f16_nxv8i16:
451 ; CHECK-NEXT: vsetvli a0, zero, e16, m2, ta, ma
452 ; CHECK-NEXT: vfcvt.rtz.x.f.v v8, v8
454 %evec = call <vscale x 8 x i16> @llvm.experimental.constrained.fptosi.nxv8i16.nxv8f16(<vscale x 8 x half> %va, metadata !"fpexcept.strict")
455 ret <vscale x 8 x i16> %evec
458 declare <vscale x 8 x i16> @llvm.experimental.constrained.fptoui.nxv8i16.nxv8f16(<vscale x 8 x half>, metadata)
459 define <vscale x 8 x i16> @vfptoui_nxv8f16_nxv8i16(<vscale x 8 x half> %va) strictfp {
460 ; CHECK-LABEL: vfptoui_nxv8f16_nxv8i16:
462 ; CHECK-NEXT: vsetvli a0, zero, e16, m2, ta, ma
463 ; CHECK-NEXT: vfcvt.rtz.xu.f.v v8, v8
465 %evec = call <vscale x 8 x i16> @llvm.experimental.constrained.fptoui.nxv8i16.nxv8f16(<vscale x 8 x half> %va, metadata !"fpexcept.strict")
466 ret <vscale x 8 x i16> %evec
469 declare <vscale x 8 x i32> @llvm.experimental.constrained.fptosi.nxv8i32.nxv8f16(<vscale x 8 x half>, metadata)
470 define <vscale x 8 x i32> @vfptosi_nxv8f16_nxv8i32(<vscale x 8 x half> %va) strictfp {
471 ; CHECK-LABEL: vfptosi_nxv8f16_nxv8i32:
473 ; CHECK-NEXT: vsetvli a0, zero, e16, m2, ta, ma
474 ; CHECK-NEXT: vfwcvt.rtz.x.f.v v12, v8
475 ; CHECK-NEXT: vmv4r.v v8, v12
477 %evec = call <vscale x 8 x i32> @llvm.experimental.constrained.fptosi.nxv8i32.nxv8f16(<vscale x 8 x half> %va, metadata !"fpexcept.strict")
478 ret <vscale x 8 x i32> %evec
481 declare <vscale x 8 x i32> @llvm.experimental.constrained.fptoui.nxv8i32.nxv8f16(<vscale x 8 x half>, metadata)
482 define <vscale x 8 x i32> @vfptoui_nxv8f16_nxv8i32(<vscale x 8 x half> %va) strictfp {
483 ; CHECK-LABEL: vfptoui_nxv8f16_nxv8i32:
485 ; CHECK-NEXT: vsetvli a0, zero, e16, m2, ta, ma
486 ; CHECK-NEXT: vfwcvt.rtz.xu.f.v v12, v8
487 ; CHECK-NEXT: vmv4r.v v8, v12
489 %evec = call <vscale x 8 x i32> @llvm.experimental.constrained.fptoui.nxv8i32.nxv8f16(<vscale x 8 x half> %va, metadata !"fpexcept.strict")
490 ret <vscale x 8 x i32> %evec
493 declare <vscale x 8 x i64> @llvm.experimental.constrained.fptosi.nxv8i64.nxv8f16(<vscale x 8 x half>, metadata)
494 define <vscale x 8 x i64> @vfptosi_nxv8f16_nxv8i64(<vscale x 8 x half> %va) strictfp {
495 ; CHECK-LABEL: vfptosi_nxv8f16_nxv8i64:
497 ; CHECK-NEXT: vsetvli a0, zero, e16, m2, ta, ma
498 ; CHECK-NEXT: vfwcvt.f.f.v v16, v8
499 ; CHECK-NEXT: vsetvli zero, zero, e32, m4, ta, ma
500 ; CHECK-NEXT: vfwcvt.rtz.x.f.v v8, v16
502 %evec = call <vscale x 8 x i64> @llvm.experimental.constrained.fptosi.nxv8i64.nxv8f16(<vscale x 8 x half> %va, metadata !"fpexcept.strict")
503 ret <vscale x 8 x i64> %evec
506 declare <vscale x 8 x i64> @llvm.experimental.constrained.fptoui.nxv8i64.nxv8f16(<vscale x 8 x half>, metadata)
507 define <vscale x 8 x i64> @vfptoui_nxv8f16_nxv8i64(<vscale x 8 x half> %va) strictfp {
508 ; CHECK-LABEL: vfptoui_nxv8f16_nxv8i64:
510 ; CHECK-NEXT: vsetvli a0, zero, e16, m2, ta, ma
511 ; CHECK-NEXT: vfwcvt.f.f.v v16, v8
512 ; CHECK-NEXT: vsetvli zero, zero, e32, m4, ta, ma
513 ; CHECK-NEXT: vfwcvt.rtz.xu.f.v v8, v16
515 %evec = call <vscale x 8 x i64> @llvm.experimental.constrained.fptoui.nxv8i64.nxv8f16(<vscale x 8 x half> %va, metadata !"fpexcept.strict")
516 ret <vscale x 8 x i64> %evec
519 declare <vscale x 16 x i1> @llvm.experimental.constrained.fptosi.nxv16i1.nxv16f16(<vscale x 16 x half>, metadata)
520 define <vscale x 16 x i1> @vfptosi_nxv16f16_nxv16i1(<vscale x 16 x half> %va) strictfp {
521 ; CHECK-LABEL: vfptosi_nxv16f16_nxv16i1:
523 ; CHECK-NEXT: vsetvli a0, zero, e8, m2, ta, ma
524 ; CHECK-NEXT: vfncvt.rtz.x.f.w v12, v8
525 ; CHECK-NEXT: vand.vi v8, v12, 1
526 ; CHECK-NEXT: vmsne.vi v0, v8, 0
528 %evec = call <vscale x 16 x i1> @llvm.experimental.constrained.fptosi.nxv16i1.nxv16f16(<vscale x 16 x half> %va, metadata !"fpexcept.strict")
529 ret <vscale x 16 x i1> %evec
532 declare <vscale x 16 x i1> @llvm.experimental.constrained.fptoui.nxv16i1.nxv16f16(<vscale x 16 x half>, metadata)
533 define <vscale x 16 x i1> @vfptoui_nxv16f16_nxv16i1(<vscale x 16 x half> %va) strictfp {
534 ; CHECK-LABEL: vfptoui_nxv16f16_nxv16i1:
536 ; CHECK-NEXT: vsetvli a0, zero, e8, m2, ta, ma
537 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v12, v8
538 ; CHECK-NEXT: vand.vi v8, v12, 1
539 ; CHECK-NEXT: vmsne.vi v0, v8, 0
541 %evec = call <vscale x 16 x i1> @llvm.experimental.constrained.fptoui.nxv16i1.nxv16f16(<vscale x 16 x half> %va, metadata !"fpexcept.strict")
542 ret <vscale x 16 x i1> %evec
545 declare <vscale x 16 x i8> @llvm.experimental.constrained.fptosi.nxv16i8.nxv16f16(<vscale x 16 x half>, metadata)
546 define <vscale x 16 x i8> @vfptosi_nxv16f16_nxv16i8(<vscale x 16 x half> %va) strictfp {
547 ; CHECK-LABEL: vfptosi_nxv16f16_nxv16i8:
549 ; CHECK-NEXT: vsetvli a0, zero, e8, m2, ta, ma
550 ; CHECK-NEXT: vfncvt.rtz.x.f.w v12, v8
551 ; CHECK-NEXT: vmv.v.v v8, v12
553 %evec = call <vscale x 16 x i8> @llvm.experimental.constrained.fptosi.nxv16i8.nxv16f16(<vscale x 16 x half> %va, metadata !"fpexcept.strict")
554 ret <vscale x 16 x i8> %evec
557 declare <vscale x 16 x i8> @llvm.experimental.constrained.fptoui.nxv16i8.nxv16f16(<vscale x 16 x half>, metadata)
558 define <vscale x 16 x i8> @vfptoui_nxv16f16_nxv16i8(<vscale x 16 x half> %va) strictfp {
559 ; CHECK-LABEL: vfptoui_nxv16f16_nxv16i8:
561 ; CHECK-NEXT: vsetvli a0, zero, e8, m2, ta, ma
562 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v12, v8
563 ; CHECK-NEXT: vmv.v.v v8, v12
565 %evec = call <vscale x 16 x i8> @llvm.experimental.constrained.fptoui.nxv16i8.nxv16f16(<vscale x 16 x half> %va, metadata !"fpexcept.strict")
566 ret <vscale x 16 x i8> %evec
569 declare <vscale x 16 x i16> @llvm.experimental.constrained.fptosi.nxv16i16.nxv16f16(<vscale x 16 x half>, metadata)
570 define <vscale x 16 x i16> @vfptosi_nxv16f16_nxv16i16(<vscale x 16 x half> %va) strictfp {
571 ; CHECK-LABEL: vfptosi_nxv16f16_nxv16i16:
573 ; CHECK-NEXT: vsetvli a0, zero, e16, m4, ta, ma
574 ; CHECK-NEXT: vfcvt.rtz.x.f.v v8, v8
576 %evec = call <vscale x 16 x i16> @llvm.experimental.constrained.fptosi.nxv16i16.nxv16f16(<vscale x 16 x half> %va, metadata !"fpexcept.strict")
577 ret <vscale x 16 x i16> %evec
580 declare <vscale x 16 x i16> @llvm.experimental.constrained.fptoui.nxv16i16.nxv16f16(<vscale x 16 x half>, metadata)
581 define <vscale x 16 x i16> @vfptoui_nxv16f16_nxv16i16(<vscale x 16 x half> %va) strictfp {
582 ; CHECK-LABEL: vfptoui_nxv16f16_nxv16i16:
584 ; CHECK-NEXT: vsetvli a0, zero, e16, m4, ta, ma
585 ; CHECK-NEXT: vfcvt.rtz.xu.f.v v8, v8
587 %evec = call <vscale x 16 x i16> @llvm.experimental.constrained.fptoui.nxv16i16.nxv16f16(<vscale x 16 x half> %va, metadata !"fpexcept.strict")
588 ret <vscale x 16 x i16> %evec
591 declare <vscale x 16 x i32> @llvm.experimental.constrained.fptosi.nxv16i32.nxv16f16(<vscale x 16 x half>, metadata)
592 define <vscale x 16 x i32> @vfptosi_nxv16f16_nxv16i32(<vscale x 16 x half> %va) strictfp {
593 ; CHECK-LABEL: vfptosi_nxv16f16_nxv16i32:
595 ; CHECK-NEXT: vsetvli a0, zero, e16, m4, ta, ma
596 ; CHECK-NEXT: vfwcvt.rtz.x.f.v v16, v8
597 ; CHECK-NEXT: vmv8r.v v8, v16
599 %evec = call <vscale x 16 x i32> @llvm.experimental.constrained.fptosi.nxv16i32.nxv16f16(<vscale x 16 x half> %va, metadata !"fpexcept.strict")
600 ret <vscale x 16 x i32> %evec
603 declare <vscale x 16 x i32> @llvm.experimental.constrained.fptoui.nxv16i32.nxv16f16(<vscale x 16 x half>, metadata)
604 define <vscale x 16 x i32> @vfptoui_nxv16f16_nxv16i32(<vscale x 16 x half> %va) strictfp {
605 ; CHECK-LABEL: vfptoui_nxv16f16_nxv16i32:
607 ; CHECK-NEXT: vsetvli a0, zero, e16, m4, ta, ma
608 ; CHECK-NEXT: vfwcvt.rtz.xu.f.v v16, v8
609 ; CHECK-NEXT: vmv8r.v v8, v16
611 %evec = call <vscale x 16 x i32> @llvm.experimental.constrained.fptoui.nxv16i32.nxv16f16(<vscale x 16 x half> %va, metadata !"fpexcept.strict")
612 ret <vscale x 16 x i32> %evec
615 declare <vscale x 32 x i1> @llvm.experimental.constrained.fptosi.nxv32i1.nxv32f16(<vscale x 32 x half>, metadata)
616 define <vscale x 32 x i1> @vfptosi_nxv32f16_nxv32i1(<vscale x 32 x half> %va) strictfp {
617 ; CHECK-LABEL: vfptosi_nxv32f16_nxv32i1:
619 ; CHECK-NEXT: vsetvli a0, zero, e8, m4, ta, ma
620 ; CHECK-NEXT: vfncvt.rtz.x.f.w v16, v8
621 ; CHECK-NEXT: vand.vi v8, v16, 1
622 ; CHECK-NEXT: vmsne.vi v0, v8, 0
624 %evec = call <vscale x 32 x i1> @llvm.experimental.constrained.fptosi.nxv32i1.nxv32f16(<vscale x 32 x half> %va, metadata !"fpexcept.strict")
625 ret <vscale x 32 x i1> %evec
628 declare <vscale x 32 x i1> @llvm.experimental.constrained.fptoui.nxv32i1.nxv32f16(<vscale x 32 x half>, metadata)
629 define <vscale x 32 x i1> @vfptoui_nxv32f16_nxv32i1(<vscale x 32 x half> %va) strictfp {
630 ; CHECK-LABEL: vfptoui_nxv32f16_nxv32i1:
632 ; CHECK-NEXT: vsetvli a0, zero, e8, m4, ta, ma
633 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v16, v8
634 ; CHECK-NEXT: vand.vi v8, v16, 1
635 ; CHECK-NEXT: vmsne.vi v0, v8, 0
637 %evec = call <vscale x 32 x i1> @llvm.experimental.constrained.fptoui.nxv32i1.nxv32f16(<vscale x 32 x half> %va, metadata !"fpexcept.strict")
638 ret <vscale x 32 x i1> %evec
641 declare <vscale x 32 x i8> @llvm.experimental.constrained.fptosi.nxv32i8.nxv32f16(<vscale x 32 x half>, metadata)
642 define <vscale x 32 x i8> @vfptosi_nxv32f16_nxv32i8(<vscale x 32 x half> %va) strictfp {
643 ; CHECK-LABEL: vfptosi_nxv32f16_nxv32i8:
645 ; CHECK-NEXT: vsetvli a0, zero, e8, m4, ta, ma
646 ; CHECK-NEXT: vfncvt.rtz.x.f.w v16, v8
647 ; CHECK-NEXT: vmv.v.v v8, v16
649 %evec = call <vscale x 32 x i8> @llvm.experimental.constrained.fptosi.nxv32i8.nxv32f16(<vscale x 32 x half> %va, metadata !"fpexcept.strict")
650 ret <vscale x 32 x i8> %evec
653 declare <vscale x 32 x i8> @llvm.experimental.constrained.fptoui.nxv32i8.nxv32f16(<vscale x 32 x half>, metadata)
654 define <vscale x 32 x i8> @vfptoui_nxv32f16_nxv32i8(<vscale x 32 x half> %va) strictfp {
655 ; CHECK-LABEL: vfptoui_nxv32f16_nxv32i8:
657 ; CHECK-NEXT: vsetvli a0, zero, e8, m4, ta, ma
658 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v16, v8
659 ; CHECK-NEXT: vmv.v.v v8, v16
661 %evec = call <vscale x 32 x i8> @llvm.experimental.constrained.fptoui.nxv32i8.nxv32f16(<vscale x 32 x half> %va, metadata !"fpexcept.strict")
662 ret <vscale x 32 x i8> %evec
665 declare <vscale x 32 x i16> @llvm.experimental.constrained.fptosi.nxv32i16.nxv32f16(<vscale x 32 x half>, metadata)
666 define <vscale x 32 x i16> @vfptosi_nxv32f16_nxv32i16(<vscale x 32 x half> %va) strictfp {
667 ; CHECK-LABEL: vfptosi_nxv32f16_nxv32i16:
669 ; CHECK-NEXT: vsetvli a0, zero, e16, m8, ta, ma
670 ; CHECK-NEXT: vfcvt.rtz.x.f.v v8, v8
672 %evec = call <vscale x 32 x i16> @llvm.experimental.constrained.fptosi.nxv32i16.nxv32f16(<vscale x 32 x half> %va, metadata !"fpexcept.strict")
673 ret <vscale x 32 x i16> %evec
676 declare <vscale x 32 x i16> @llvm.experimental.constrained.fptoui.nxv32i16.nxv32f16(<vscale x 32 x half>, metadata)
677 define <vscale x 32 x i16> @vfptoui_nxv32f16_nxv32i16(<vscale x 32 x half> %va) strictfp {
678 ; CHECK-LABEL: vfptoui_nxv32f16_nxv32i16:
680 ; CHECK-NEXT: vsetvli a0, zero, e16, m8, ta, ma
681 ; CHECK-NEXT: vfcvt.rtz.xu.f.v v8, v8
683 %evec = call <vscale x 32 x i16> @llvm.experimental.constrained.fptoui.nxv32i16.nxv32f16(<vscale x 32 x half> %va, metadata !"fpexcept.strict")
684 ret <vscale x 32 x i16> %evec
687 declare <vscale x 1 x i1> @llvm.experimental.constrained.fptosi.nxv1i1.nxv1f32(<vscale x 1 x float>, metadata)
688 define <vscale x 1 x i1> @vfptosi_nxv1f32_nxv1i1(<vscale x 1 x float> %va) strictfp {
689 ; CHECK-LABEL: vfptosi_nxv1f32_nxv1i1:
691 ; CHECK-NEXT: vsetvli a0, zero, e16, mf4, ta, ma
692 ; CHECK-NEXT: vfncvt.rtz.x.f.w v9, v8
693 ; CHECK-NEXT: vand.vi v8, v9, 1
694 ; CHECK-NEXT: vmsne.vi v0, v8, 0
696 %evec = call <vscale x 1 x i1> @llvm.experimental.constrained.fptosi.nxv1i1.nxv1f32(<vscale x 1 x float> %va, metadata !"fpexcept.strict")
697 ret <vscale x 1 x i1> %evec
700 declare <vscale x 1 x i1> @llvm.experimental.constrained.fptoui.nxv1i1.nxv1f32(<vscale x 1 x float>, metadata)
701 define <vscale x 1 x i1> @vfptoui_nxv1f32_nxv1i1(<vscale x 1 x float> %va) strictfp {
702 ; CHECK-LABEL: vfptoui_nxv1f32_nxv1i1:
704 ; CHECK-NEXT: vsetvli a0, zero, e16, mf4, ta, ma
705 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v9, v8
706 ; CHECK-NEXT: vand.vi v8, v9, 1
707 ; CHECK-NEXT: vmsne.vi v0, v8, 0
709 %evec = call <vscale x 1 x i1> @llvm.experimental.constrained.fptoui.nxv1i1.nxv1f32(<vscale x 1 x float> %va, metadata !"fpexcept.strict")
710 ret <vscale x 1 x i1> %evec
713 declare <vscale x 1 x i8> @llvm.experimental.constrained.fptosi.nxv1i8.nxv1f32(<vscale x 1 x float>, metadata)
714 define <vscale x 1 x i8> @vfptosi_nxv1f32_nxv1i8(<vscale x 1 x float> %va) strictfp {
715 ; CHECK-LABEL: vfptosi_nxv1f32_nxv1i8:
717 ; CHECK-NEXT: vsetvli a0, zero, e16, mf4, ta, ma
718 ; CHECK-NEXT: vfncvt.rtz.x.f.w v9, v8
719 ; CHECK-NEXT: vsetvli zero, zero, e8, mf8, ta, ma
720 ; CHECK-NEXT: vnsrl.wi v8, v9, 0
722 %evec = call <vscale x 1 x i8> @llvm.experimental.constrained.fptosi.nxv1i8.nxv1f32(<vscale x 1 x float> %va, metadata !"fpexcept.strict")
723 ret <vscale x 1 x i8> %evec
726 declare <vscale x 1 x i8> @llvm.experimental.constrained.fptoui.nxv1i8.nxv1f32(<vscale x 1 x float>, metadata)
727 define <vscale x 1 x i8> @vfptoui_nxv1f32_nxv1i8(<vscale x 1 x float> %va) strictfp {
728 ; CHECK-LABEL: vfptoui_nxv1f32_nxv1i8:
730 ; CHECK-NEXT: vsetvli a0, zero, e16, mf4, ta, ma
731 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v9, v8
732 ; CHECK-NEXT: vsetvli zero, zero, e8, mf8, ta, ma
733 ; CHECK-NEXT: vnsrl.wi v8, v9, 0
735 %evec = call <vscale x 1 x i8> @llvm.experimental.constrained.fptoui.nxv1i8.nxv1f32(<vscale x 1 x float> %va, metadata !"fpexcept.strict")
736 ret <vscale x 1 x i8> %evec
739 declare <vscale x 1 x i16> @llvm.experimental.constrained.fptosi.nxv1i16.nxv1f32(<vscale x 1 x float>, metadata)
740 define <vscale x 1 x i16> @vfptosi_nxv1f32_nxv1i16(<vscale x 1 x float> %va) strictfp {
741 ; CHECK-LABEL: vfptosi_nxv1f32_nxv1i16:
743 ; CHECK-NEXT: vsetvli a0, zero, e16, mf4, ta, ma
744 ; CHECK-NEXT: vfncvt.rtz.x.f.w v9, v8
745 ; CHECK-NEXT: vmv1r.v v8, v9
747 %evec = call <vscale x 1 x i16> @llvm.experimental.constrained.fptosi.nxv1i16.nxv1f32(<vscale x 1 x float> %va, metadata !"fpexcept.strict")
748 ret <vscale x 1 x i16> %evec
751 declare <vscale x 1 x i16> @llvm.experimental.constrained.fptoui.nxv1i16.nxv1f32(<vscale x 1 x float>, metadata)
752 define <vscale x 1 x i16> @vfptoui_nxv1f32_nxv1i16(<vscale x 1 x float> %va) strictfp {
753 ; CHECK-LABEL: vfptoui_nxv1f32_nxv1i16:
755 ; CHECK-NEXT: vsetvli a0, zero, e16, mf4, ta, ma
756 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v9, v8
757 ; CHECK-NEXT: vmv1r.v v8, v9
759 %evec = call <vscale x 1 x i16> @llvm.experimental.constrained.fptoui.nxv1i16.nxv1f32(<vscale x 1 x float> %va, metadata !"fpexcept.strict")
760 ret <vscale x 1 x i16> %evec
763 declare <vscale x 1 x i32> @llvm.experimental.constrained.fptosi.nxv1i32.nxv1f32(<vscale x 1 x float>, metadata)
764 define <vscale x 1 x i32> @vfptosi_nxv1f32_nxv1i32(<vscale x 1 x float> %va) strictfp {
765 ; CHECK-LABEL: vfptosi_nxv1f32_nxv1i32:
767 ; CHECK-NEXT: vsetvli a0, zero, e32, mf2, ta, ma
768 ; CHECK-NEXT: vfcvt.rtz.x.f.v v8, v8
770 %evec = call <vscale x 1 x i32> @llvm.experimental.constrained.fptosi.nxv1i32.nxv1f32(<vscale x 1 x float> %va, metadata !"fpexcept.strict")
771 ret <vscale x 1 x i32> %evec
774 declare <vscale x 1 x i32> @llvm.experimental.constrained.fptoui.nxv1i32.nxv1f32(<vscale x 1 x float>, metadata)
775 define <vscale x 1 x i32> @vfptoui_nxv1f32_nxv1i32(<vscale x 1 x float> %va) strictfp {
776 ; CHECK-LABEL: vfptoui_nxv1f32_nxv1i32:
778 ; CHECK-NEXT: vsetvli a0, zero, e32, mf2, ta, ma
779 ; CHECK-NEXT: vfcvt.rtz.xu.f.v v8, v8
781 %evec = call <vscale x 1 x i32> @llvm.experimental.constrained.fptoui.nxv1i32.nxv1f32(<vscale x 1 x float> %va, metadata !"fpexcept.strict")
782 ret <vscale x 1 x i32> %evec
785 declare <vscale x 1 x i64> @llvm.experimental.constrained.fptosi.nxv1i64.nxv1f32(<vscale x 1 x float>, metadata)
786 define <vscale x 1 x i64> @vfptosi_nxv1f32_nxv1i64(<vscale x 1 x float> %va) strictfp {
787 ; CHECK-LABEL: vfptosi_nxv1f32_nxv1i64:
789 ; CHECK-NEXT: vsetvli a0, zero, e32, mf2, ta, ma
790 ; CHECK-NEXT: vfwcvt.rtz.x.f.v v9, v8
791 ; CHECK-NEXT: vmv1r.v v8, v9
793 %evec = call <vscale x 1 x i64> @llvm.experimental.constrained.fptosi.nxv1i64.nxv1f32(<vscale x 1 x float> %va, metadata !"fpexcept.strict")
794 ret <vscale x 1 x i64> %evec
797 declare <vscale x 1 x i64> @llvm.experimental.constrained.fptoui.nxv1i64.nxv1f32(<vscale x 1 x float>, metadata)
798 define <vscale x 1 x i64> @vfptoui_nxv1f32_nxv1i64(<vscale x 1 x float> %va) strictfp {
799 ; CHECK-LABEL: vfptoui_nxv1f32_nxv1i64:
801 ; CHECK-NEXT: vsetvli a0, zero, e32, mf2, ta, ma
802 ; CHECK-NEXT: vfwcvt.rtz.xu.f.v v9, v8
803 ; CHECK-NEXT: vmv1r.v v8, v9
805 %evec = call <vscale x 1 x i64> @llvm.experimental.constrained.fptoui.nxv1i64.nxv1f32(<vscale x 1 x float> %va, metadata !"fpexcept.strict")
806 ret <vscale x 1 x i64> %evec
809 declare <vscale x 2 x i1> @llvm.experimental.constrained.fptosi.nxv2i1.nxv2f32(<vscale x 2 x float>, metadata)
810 define <vscale x 2 x i1> @vfptosi_nxv2f32_nxv2i1(<vscale x 2 x float> %va) strictfp {
811 ; CHECK-LABEL: vfptosi_nxv2f32_nxv2i1:
813 ; CHECK-NEXT: vsetvli a0, zero, e16, mf2, ta, ma
814 ; CHECK-NEXT: vfncvt.rtz.x.f.w v9, v8
815 ; CHECK-NEXT: vand.vi v8, v9, 1
816 ; CHECK-NEXT: vmsne.vi v0, v8, 0
818 %evec = call <vscale x 2 x i1> @llvm.experimental.constrained.fptosi.nxv2i1.nxv2f32(<vscale x 2 x float> %va, metadata !"fpexcept.strict")
819 ret <vscale x 2 x i1> %evec
822 declare <vscale x 2 x i1> @llvm.experimental.constrained.fptoui.nxv2i1.nxv2f32(<vscale x 2 x float>, metadata)
823 define <vscale x 2 x i1> @vfptoui_nxv2f32_nxv2i1(<vscale x 2 x float> %va) strictfp {
824 ; CHECK-LABEL: vfptoui_nxv2f32_nxv2i1:
826 ; CHECK-NEXT: vsetvli a0, zero, e16, mf2, ta, ma
827 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v9, v8
828 ; CHECK-NEXT: vand.vi v8, v9, 1
829 ; CHECK-NEXT: vmsne.vi v0, v8, 0
831 %evec = call <vscale x 2 x i1> @llvm.experimental.constrained.fptoui.nxv2i1.nxv2f32(<vscale x 2 x float> %va, metadata !"fpexcept.strict")
832 ret <vscale x 2 x i1> %evec
835 declare <vscale x 2 x i8> @llvm.experimental.constrained.fptosi.nxv2i8.nxv2f32(<vscale x 2 x float>, metadata)
836 define <vscale x 2 x i8> @vfptosi_nxv2f32_nxv2i8(<vscale x 2 x float> %va) strictfp {
837 ; CHECK-LABEL: vfptosi_nxv2f32_nxv2i8:
839 ; CHECK-NEXT: vsetvli a0, zero, e16, mf2, ta, ma
840 ; CHECK-NEXT: vfncvt.rtz.x.f.w v9, v8
841 ; CHECK-NEXT: vsetvli zero, zero, e8, mf4, ta, ma
842 ; CHECK-NEXT: vnsrl.wi v8, v9, 0
844 %evec = call <vscale x 2 x i8> @llvm.experimental.constrained.fptosi.nxv2i8.nxv2f32(<vscale x 2 x float> %va, metadata !"fpexcept.strict")
845 ret <vscale x 2 x i8> %evec
848 declare <vscale x 2 x i8> @llvm.experimental.constrained.fptoui.nxv2i8.nxv2f32(<vscale x 2 x float>, metadata)
849 define <vscale x 2 x i8> @vfptoui_nxv2f32_nxv2i8(<vscale x 2 x float> %va) strictfp {
850 ; CHECK-LABEL: vfptoui_nxv2f32_nxv2i8:
852 ; CHECK-NEXT: vsetvli a0, zero, e16, mf2, ta, ma
853 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v9, v8
854 ; CHECK-NEXT: vsetvli zero, zero, e8, mf4, ta, ma
855 ; CHECK-NEXT: vnsrl.wi v8, v9, 0
857 %evec = call <vscale x 2 x i8> @llvm.experimental.constrained.fptoui.nxv2i8.nxv2f32(<vscale x 2 x float> %va, metadata !"fpexcept.strict")
858 ret <vscale x 2 x i8> %evec
861 declare <vscale x 2 x i16> @llvm.experimental.constrained.fptosi.nxv2i16.nxv2f32(<vscale x 2 x float>, metadata)
862 define <vscale x 2 x i16> @vfptosi_nxv2f32_nxv2i16(<vscale x 2 x float> %va) strictfp {
863 ; CHECK-LABEL: vfptosi_nxv2f32_nxv2i16:
865 ; CHECK-NEXT: vsetvli a0, zero, e16, mf2, ta, ma
866 ; CHECK-NEXT: vfncvt.rtz.x.f.w v9, v8
867 ; CHECK-NEXT: vmv1r.v v8, v9
869 %evec = call <vscale x 2 x i16> @llvm.experimental.constrained.fptosi.nxv2i16.nxv2f32(<vscale x 2 x float> %va, metadata !"fpexcept.strict")
870 ret <vscale x 2 x i16> %evec
873 declare <vscale x 2 x i16> @llvm.experimental.constrained.fptoui.nxv2i16.nxv2f32(<vscale x 2 x float>, metadata)
874 define <vscale x 2 x i16> @vfptoui_nxv2f32_nxv2i16(<vscale x 2 x float> %va) strictfp {
875 ; CHECK-LABEL: vfptoui_nxv2f32_nxv2i16:
877 ; CHECK-NEXT: vsetvli a0, zero, e16, mf2, ta, ma
878 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v9, v8
879 ; CHECK-NEXT: vmv1r.v v8, v9
881 %evec = call <vscale x 2 x i16> @llvm.experimental.constrained.fptoui.nxv2i16.nxv2f32(<vscale x 2 x float> %va, metadata !"fpexcept.strict")
882 ret <vscale x 2 x i16> %evec
885 declare <vscale x 2 x i32> @llvm.experimental.constrained.fptosi.nxv2i32.nxv2f32(<vscale x 2 x float>, metadata)
886 define <vscale x 2 x i32> @vfptosi_nxv2f32_nxv2i32(<vscale x 2 x float> %va) strictfp {
887 ; CHECK-LABEL: vfptosi_nxv2f32_nxv2i32:
889 ; CHECK-NEXT: vsetvli a0, zero, e32, m1, ta, ma
890 ; CHECK-NEXT: vfcvt.rtz.x.f.v v8, v8
892 %evec = call <vscale x 2 x i32> @llvm.experimental.constrained.fptosi.nxv2i32.nxv2f32(<vscale x 2 x float> %va, metadata !"fpexcept.strict")
893 ret <vscale x 2 x i32> %evec
896 declare <vscale x 2 x i32> @llvm.experimental.constrained.fptoui.nxv2i32.nxv2f32(<vscale x 2 x float>, metadata)
897 define <vscale x 2 x i32> @vfptoui_nxv2f32_nxv2i32(<vscale x 2 x float> %va) strictfp {
898 ; CHECK-LABEL: vfptoui_nxv2f32_nxv2i32:
900 ; CHECK-NEXT: vsetvli a0, zero, e32, m1, ta, ma
901 ; CHECK-NEXT: vfcvt.rtz.xu.f.v v8, v8
903 %evec = call <vscale x 2 x i32> @llvm.experimental.constrained.fptoui.nxv2i32.nxv2f32(<vscale x 2 x float> %va, metadata !"fpexcept.strict")
904 ret <vscale x 2 x i32> %evec
907 declare <vscale x 2 x i64> @llvm.experimental.constrained.fptosi.nxv2i64.nxv2f32(<vscale x 2 x float>, metadata)
908 define <vscale x 2 x i64> @vfptosi_nxv2f32_nxv2i64(<vscale x 2 x float> %va) strictfp {
909 ; CHECK-LABEL: vfptosi_nxv2f32_nxv2i64:
911 ; CHECK-NEXT: vsetvli a0, zero, e32, m1, ta, ma
912 ; CHECK-NEXT: vfwcvt.rtz.x.f.v v10, v8
913 ; CHECK-NEXT: vmv2r.v v8, v10
915 %evec = call <vscale x 2 x i64> @llvm.experimental.constrained.fptosi.nxv2i64.nxv2f32(<vscale x 2 x float> %va, metadata !"fpexcept.strict")
916 ret <vscale x 2 x i64> %evec
919 declare <vscale x 2 x i64> @llvm.experimental.constrained.fptoui.nxv2i64.nxv2f32(<vscale x 2 x float>, metadata)
920 define <vscale x 2 x i64> @vfptoui_nxv2f32_nxv2i64(<vscale x 2 x float> %va) strictfp {
921 ; CHECK-LABEL: vfptoui_nxv2f32_nxv2i64:
923 ; CHECK-NEXT: vsetvli a0, zero, e32, m1, ta, ma
924 ; CHECK-NEXT: vfwcvt.rtz.xu.f.v v10, v8
925 ; CHECK-NEXT: vmv2r.v v8, v10
927 %evec = call <vscale x 2 x i64> @llvm.experimental.constrained.fptoui.nxv2i64.nxv2f32(<vscale x 2 x float> %va, metadata !"fpexcept.strict")
928 ret <vscale x 2 x i64> %evec
931 declare <vscale x 4 x i1> @llvm.experimental.constrained.fptosi.nxv4i1.nxv4f32(<vscale x 4 x float>, metadata)
932 define <vscale x 4 x i1> @vfptosi_nxv4f32_nxv4i1(<vscale x 4 x float> %va) strictfp {
933 ; CHECK-LABEL: vfptosi_nxv4f32_nxv4i1:
935 ; CHECK-NEXT: vsetvli a0, zero, e16, m1, ta, ma
936 ; CHECK-NEXT: vfncvt.rtz.x.f.w v10, v8
937 ; CHECK-NEXT: vand.vi v8, v10, 1
938 ; CHECK-NEXT: vmsne.vi v0, v8, 0
940 %evec = call <vscale x 4 x i1> @llvm.experimental.constrained.fptosi.nxv4i1.nxv4f32(<vscale x 4 x float> %va, metadata !"fpexcept.strict")
941 ret <vscale x 4 x i1> %evec
944 declare <vscale x 4 x i1> @llvm.experimental.constrained.fptoui.nxv4i1.nxv4f32(<vscale x 4 x float>, metadata)
945 define <vscale x 4 x i1> @vfptoui_nxv4f32_nxv4i1(<vscale x 4 x float> %va) strictfp {
946 ; CHECK-LABEL: vfptoui_nxv4f32_nxv4i1:
948 ; CHECK-NEXT: vsetvli a0, zero, e16, m1, ta, ma
949 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v10, v8
950 ; CHECK-NEXT: vand.vi v8, v10, 1
951 ; CHECK-NEXT: vmsne.vi v0, v8, 0
953 %evec = call <vscale x 4 x i1> @llvm.experimental.constrained.fptoui.nxv4i1.nxv4f32(<vscale x 4 x float> %va, metadata !"fpexcept.strict")
954 ret <vscale x 4 x i1> %evec
957 declare <vscale x 4 x i8> @llvm.experimental.constrained.fptosi.nxv4i8.nxv4f32(<vscale x 4 x float>, metadata)
958 define <vscale x 4 x i8> @vfptosi_nxv4f32_nxv4i8(<vscale x 4 x float> %va) strictfp {
959 ; CHECK-LABEL: vfptosi_nxv4f32_nxv4i8:
961 ; CHECK-NEXT: vsetvli a0, zero, e16, m1, ta, ma
962 ; CHECK-NEXT: vfncvt.rtz.x.f.w v10, v8
963 ; CHECK-NEXT: vsetvli zero, zero, e8, mf2, ta, ma
964 ; CHECK-NEXT: vnsrl.wi v8, v10, 0
966 %evec = call <vscale x 4 x i8> @llvm.experimental.constrained.fptosi.nxv4i8.nxv4f32(<vscale x 4 x float> %va, metadata !"fpexcept.strict")
967 ret <vscale x 4 x i8> %evec
970 declare <vscale x 4 x i8> @llvm.experimental.constrained.fptoui.nxv4i8.nxv4f32(<vscale x 4 x float>, metadata)
971 define <vscale x 4 x i8> @vfptoui_nxv4f32_nxv4i8(<vscale x 4 x float> %va) strictfp {
972 ; CHECK-LABEL: vfptoui_nxv4f32_nxv4i8:
974 ; CHECK-NEXT: vsetvli a0, zero, e16, m1, ta, ma
975 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v10, v8
976 ; CHECK-NEXT: vsetvli zero, zero, e8, mf2, ta, ma
977 ; CHECK-NEXT: vnsrl.wi v8, v10, 0
979 %evec = call <vscale x 4 x i8> @llvm.experimental.constrained.fptoui.nxv4i8.nxv4f32(<vscale x 4 x float> %va, metadata !"fpexcept.strict")
980 ret <vscale x 4 x i8> %evec
983 declare <vscale x 4 x i16> @llvm.experimental.constrained.fptosi.nxv4i16.nxv4f32(<vscale x 4 x float>, metadata)
984 define <vscale x 4 x i16> @vfptosi_nxv4f32_nxv4i16(<vscale x 4 x float> %va) strictfp {
985 ; CHECK-LABEL: vfptosi_nxv4f32_nxv4i16:
987 ; CHECK-NEXT: vsetvli a0, zero, e16, m1, ta, ma
988 ; CHECK-NEXT: vfncvt.rtz.x.f.w v10, v8
989 ; CHECK-NEXT: vmv.v.v v8, v10
991 %evec = call <vscale x 4 x i16> @llvm.experimental.constrained.fptosi.nxv4i16.nxv4f32(<vscale x 4 x float> %va, metadata !"fpexcept.strict")
992 ret <vscale x 4 x i16> %evec
995 declare <vscale x 4 x i16> @llvm.experimental.constrained.fptoui.nxv4i16.nxv4f32(<vscale x 4 x float>, metadata)
996 define <vscale x 4 x i16> @vfptoui_nxv4f32_nxv4i16(<vscale x 4 x float> %va) strictfp {
997 ; CHECK-LABEL: vfptoui_nxv4f32_nxv4i16:
999 ; CHECK-NEXT: vsetvli a0, zero, e16, m1, ta, ma
1000 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v10, v8
1001 ; CHECK-NEXT: vmv.v.v v8, v10
1003 %evec = call <vscale x 4 x i16> @llvm.experimental.constrained.fptoui.nxv4i16.nxv4f32(<vscale x 4 x float> %va, metadata !"fpexcept.strict")
1004 ret <vscale x 4 x i16> %evec
1007 declare <vscale x 4 x i32> @llvm.experimental.constrained.fptosi.nxv4i32.nxv4f32(<vscale x 4 x float>, metadata)
1008 define <vscale x 4 x i32> @vfptosi_nxv4f32_nxv4i32(<vscale x 4 x float> %va) strictfp {
1009 ; CHECK-LABEL: vfptosi_nxv4f32_nxv4i32:
1011 ; CHECK-NEXT: vsetvli a0, zero, e32, m2, ta, ma
1012 ; CHECK-NEXT: vfcvt.rtz.x.f.v v8, v8
1014 %evec = call <vscale x 4 x i32> @llvm.experimental.constrained.fptosi.nxv4i32.nxv4f32(<vscale x 4 x float> %va, metadata !"fpexcept.strict")
1015 ret <vscale x 4 x i32> %evec
1018 declare <vscale x 4 x i32> @llvm.experimental.constrained.fptoui.nxv4i32.nxv4f32(<vscale x 4 x float>, metadata)
1019 define <vscale x 4 x i32> @vfptoui_nxv4f32_nxv4i32(<vscale x 4 x float> %va) strictfp {
1020 ; CHECK-LABEL: vfptoui_nxv4f32_nxv4i32:
1022 ; CHECK-NEXT: vsetvli a0, zero, e32, m2, ta, ma
1023 ; CHECK-NEXT: vfcvt.rtz.xu.f.v v8, v8
1025 %evec = call <vscale x 4 x i32> @llvm.experimental.constrained.fptoui.nxv4i32.nxv4f32(<vscale x 4 x float> %va, metadata !"fpexcept.strict")
1026 ret <vscale x 4 x i32> %evec
1029 declare <vscale x 4 x i64> @llvm.experimental.constrained.fptosi.nxv4i64.nxv4f32(<vscale x 4 x float>, metadata)
1030 define <vscale x 4 x i64> @vfptosi_nxv4f32_nxv4i64(<vscale x 4 x float> %va) strictfp {
1031 ; CHECK-LABEL: vfptosi_nxv4f32_nxv4i64:
1033 ; CHECK-NEXT: vsetvli a0, zero, e32, m2, ta, ma
1034 ; CHECK-NEXT: vfwcvt.rtz.x.f.v v12, v8
1035 ; CHECK-NEXT: vmv4r.v v8, v12
1037 %evec = call <vscale x 4 x i64> @llvm.experimental.constrained.fptosi.nxv4i64.nxv4f32(<vscale x 4 x float> %va, metadata !"fpexcept.strict")
1038 ret <vscale x 4 x i64> %evec
1041 declare <vscale x 4 x i64> @llvm.experimental.constrained.fptoui.nxv4i64.nxv4f32(<vscale x 4 x float>, metadata)
1042 define <vscale x 4 x i64> @vfptoui_nxv4f32_nxv4i64(<vscale x 4 x float> %va) strictfp {
1043 ; CHECK-LABEL: vfptoui_nxv4f32_nxv4i64:
1045 ; CHECK-NEXT: vsetvli a0, zero, e32, m2, ta, ma
1046 ; CHECK-NEXT: vfwcvt.rtz.xu.f.v v12, v8
1047 ; CHECK-NEXT: vmv4r.v v8, v12
1049 %evec = call <vscale x 4 x i64> @llvm.experimental.constrained.fptoui.nxv4i64.nxv4f32(<vscale x 4 x float> %va, metadata !"fpexcept.strict")
1050 ret <vscale x 4 x i64> %evec
1053 declare <vscale x 8 x i1> @llvm.experimental.constrained.fptosi.nxv8i1.nxv8f32(<vscale x 8 x float>, metadata)
1054 define <vscale x 8 x i1> @vfptosi_nxv8f32_nxv8i1(<vscale x 8 x float> %va) strictfp {
1055 ; CHECK-LABEL: vfptosi_nxv8f32_nxv8i1:
1057 ; CHECK-NEXT: vsetvli a0, zero, e16, m2, ta, ma
1058 ; CHECK-NEXT: vfncvt.rtz.x.f.w v12, v8
1059 ; CHECK-NEXT: vand.vi v8, v12, 1
1060 ; CHECK-NEXT: vmsne.vi v0, v8, 0
1062 %evec = call <vscale x 8 x i1> @llvm.experimental.constrained.fptosi.nxv8i1.nxv8f32(<vscale x 8 x float> %va, metadata !"fpexcept.strict")
1063 ret <vscale x 8 x i1> %evec
1066 declare <vscale x 8 x i1> @llvm.experimental.constrained.fptoui.nxv8i1.nxv8f32(<vscale x 8 x float>, metadata)
1067 define <vscale x 8 x i1> @vfptoui_nxv8f32_nxv8i1(<vscale x 8 x float> %va) strictfp {
1068 ; CHECK-LABEL: vfptoui_nxv8f32_nxv8i1:
1070 ; CHECK-NEXT: vsetvli a0, zero, e16, m2, ta, ma
1071 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v12, v8
1072 ; CHECK-NEXT: vand.vi v8, v12, 1
1073 ; CHECK-NEXT: vmsne.vi v0, v8, 0
1075 %evec = call <vscale x 8 x i1> @llvm.experimental.constrained.fptoui.nxv8i1.nxv8f32(<vscale x 8 x float> %va, metadata !"fpexcept.strict")
1076 ret <vscale x 8 x i1> %evec
1079 declare <vscale x 8 x i8> @llvm.experimental.constrained.fptosi.nxv8i8.nxv8f32(<vscale x 8 x float>, metadata)
1080 define <vscale x 8 x i8> @vfptosi_nxv8f32_nxv8i8(<vscale x 8 x float> %va) strictfp {
1081 ; CHECK-LABEL: vfptosi_nxv8f32_nxv8i8:
1083 ; CHECK-NEXT: vsetvli a0, zero, e16, m2, ta, ma
1084 ; CHECK-NEXT: vfncvt.rtz.x.f.w v12, v8
1085 ; CHECK-NEXT: vsetvli zero, zero, e8, m1, ta, ma
1086 ; CHECK-NEXT: vnsrl.wi v8, v12, 0
1088 %evec = call <vscale x 8 x i8> @llvm.experimental.constrained.fptosi.nxv8i8.nxv8f32(<vscale x 8 x float> %va, metadata !"fpexcept.strict")
1089 ret <vscale x 8 x i8> %evec
1092 declare <vscale x 8 x i8> @llvm.experimental.constrained.fptoui.nxv8i8.nxv8f32(<vscale x 8 x float>, metadata)
1093 define <vscale x 8 x i8> @vfptoui_nxv8f32_nxv8i8(<vscale x 8 x float> %va) strictfp {
1094 ; CHECK-LABEL: vfptoui_nxv8f32_nxv8i8:
1096 ; CHECK-NEXT: vsetvli a0, zero, e16, m2, ta, ma
1097 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v12, v8
1098 ; CHECK-NEXT: vsetvli zero, zero, e8, m1, ta, ma
1099 ; CHECK-NEXT: vnsrl.wi v8, v12, 0
1101 %evec = call <vscale x 8 x i8> @llvm.experimental.constrained.fptoui.nxv8i8.nxv8f32(<vscale x 8 x float> %va, metadata !"fpexcept.strict")
1102 ret <vscale x 8 x i8> %evec
1105 declare <vscale x 8 x i16> @llvm.experimental.constrained.fptosi.nxv8i16.nxv8f32(<vscale x 8 x float>, metadata)
1106 define <vscale x 8 x i16> @vfptosi_nxv8f32_nxv8i16(<vscale x 8 x float> %va) strictfp {
1107 ; CHECK-LABEL: vfptosi_nxv8f32_nxv8i16:
1109 ; CHECK-NEXT: vsetvli a0, zero, e16, m2, ta, ma
1110 ; CHECK-NEXT: vfncvt.rtz.x.f.w v12, v8
1111 ; CHECK-NEXT: vmv.v.v v8, v12
1113 %evec = call <vscale x 8 x i16> @llvm.experimental.constrained.fptosi.nxv8i16.nxv8f32(<vscale x 8 x float> %va, metadata !"fpexcept.strict")
1114 ret <vscale x 8 x i16> %evec
1117 declare <vscale x 8 x i16> @llvm.experimental.constrained.fptoui.nxv8i16.nxv8f32(<vscale x 8 x float>, metadata)
1118 define <vscale x 8 x i16> @vfptoui_nxv8f32_nxv8i16(<vscale x 8 x float> %va) strictfp {
1119 ; CHECK-LABEL: vfptoui_nxv8f32_nxv8i16:
1121 ; CHECK-NEXT: vsetvli a0, zero, e16, m2, ta, ma
1122 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v12, v8
1123 ; CHECK-NEXT: vmv.v.v v8, v12
1125 %evec = call <vscale x 8 x i16> @llvm.experimental.constrained.fptoui.nxv8i16.nxv8f32(<vscale x 8 x float> %va, metadata !"fpexcept.strict")
1126 ret <vscale x 8 x i16> %evec
1129 declare <vscale x 8 x i32> @llvm.experimental.constrained.fptosi.nxv8i32.nxv8f32(<vscale x 8 x float>, metadata)
1130 define <vscale x 8 x i32> @vfptosi_nxv8f32_nxv8i32(<vscale x 8 x float> %va) strictfp {
1131 ; CHECK-LABEL: vfptosi_nxv8f32_nxv8i32:
1133 ; CHECK-NEXT: vsetvli a0, zero, e32, m4, ta, ma
1134 ; CHECK-NEXT: vfcvt.rtz.x.f.v v8, v8
1136 %evec = call <vscale x 8 x i32> @llvm.experimental.constrained.fptosi.nxv8i32.nxv8f32(<vscale x 8 x float> %va, metadata !"fpexcept.strict")
1137 ret <vscale x 8 x i32> %evec
1140 declare <vscale x 8 x i32> @llvm.experimental.constrained.fptoui.nxv8i32.nxv8f32(<vscale x 8 x float>, metadata)
1141 define <vscale x 8 x i32> @vfptoui_nxv8f32_nxv8i32(<vscale x 8 x float> %va) strictfp {
1142 ; CHECK-LABEL: vfptoui_nxv8f32_nxv8i32:
1144 ; CHECK-NEXT: vsetvli a0, zero, e32, m4, ta, ma
1145 ; CHECK-NEXT: vfcvt.rtz.xu.f.v v8, v8
1147 %evec = call <vscale x 8 x i32> @llvm.experimental.constrained.fptoui.nxv8i32.nxv8f32(<vscale x 8 x float> %va, metadata !"fpexcept.strict")
1148 ret <vscale x 8 x i32> %evec
1151 declare <vscale x 8 x i64> @llvm.experimental.constrained.fptosi.nxv8i64.nxv8f32(<vscale x 8 x float>, metadata)
1152 define <vscale x 8 x i64> @vfptosi_nxv8f32_nxv8i64(<vscale x 8 x float> %va) strictfp {
1153 ; CHECK-LABEL: vfptosi_nxv8f32_nxv8i64:
1155 ; CHECK-NEXT: vsetvli a0, zero, e32, m4, ta, ma
1156 ; CHECK-NEXT: vfwcvt.rtz.x.f.v v16, v8
1157 ; CHECK-NEXT: vmv8r.v v8, v16
1159 %evec = call <vscale x 8 x i64> @llvm.experimental.constrained.fptosi.nxv8i64.nxv8f32(<vscale x 8 x float> %va, metadata !"fpexcept.strict")
1160 ret <vscale x 8 x i64> %evec
1163 declare <vscale x 8 x i64> @llvm.experimental.constrained.fptoui.nxv8i64.nxv8f32(<vscale x 8 x float>, metadata)
1164 define <vscale x 8 x i64> @vfptoui_nxv8f32_nxv8i64(<vscale x 8 x float> %va) strictfp {
1165 ; CHECK-LABEL: vfptoui_nxv8f32_nxv8i64:
1167 ; CHECK-NEXT: vsetvli a0, zero, e32, m4, ta, ma
1168 ; CHECK-NEXT: vfwcvt.rtz.xu.f.v v16, v8
1169 ; CHECK-NEXT: vmv8r.v v8, v16
1171 %evec = call <vscale x 8 x i64> @llvm.experimental.constrained.fptoui.nxv8i64.nxv8f32(<vscale x 8 x float> %va, metadata !"fpexcept.strict")
1172 ret <vscale x 8 x i64> %evec
1175 declare <vscale x 16 x i1> @llvm.experimental.constrained.fptosi.nxv16i1.nxv16f32(<vscale x 16 x float>, metadata)
1176 define <vscale x 16 x i1> @vfptosi_nxv16f32_nxv16i1(<vscale x 16 x float> %va) strictfp {
1177 ; CHECK-LABEL: vfptosi_nxv16f32_nxv16i1:
1179 ; CHECK-NEXT: vsetvli a0, zero, e16, m4, ta, ma
1180 ; CHECK-NEXT: vfncvt.rtz.x.f.w v16, v8
1181 ; CHECK-NEXT: vand.vi v8, v16, 1
1182 ; CHECK-NEXT: vmsne.vi v0, v8, 0
1184 %evec = call <vscale x 16 x i1> @llvm.experimental.constrained.fptosi.nxv16i1.nxv16f32(<vscale x 16 x float> %va, metadata !"fpexcept.strict")
1185 ret <vscale x 16 x i1> %evec
1188 declare <vscale x 16 x i1> @llvm.experimental.constrained.fptoui.nxv16i1.nxv16f32(<vscale x 16 x float>, metadata)
1189 define <vscale x 16 x i1> @vfptoui_nxv16f32_nxv16i1(<vscale x 16 x float> %va) strictfp {
1190 ; CHECK-LABEL: vfptoui_nxv16f32_nxv16i1:
1192 ; CHECK-NEXT: vsetvli a0, zero, e16, m4, ta, ma
1193 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v16, v8
1194 ; CHECK-NEXT: vand.vi v8, v16, 1
1195 ; CHECK-NEXT: vmsne.vi v0, v8, 0
1197 %evec = call <vscale x 16 x i1> @llvm.experimental.constrained.fptoui.nxv16i1.nxv16f32(<vscale x 16 x float> %va, metadata !"fpexcept.strict")
1198 ret <vscale x 16 x i1> %evec
1201 declare <vscale x 16 x i8> @llvm.experimental.constrained.fptosi.nxv16i8.nxv16f32(<vscale x 16 x float>, metadata)
1202 define <vscale x 16 x i8> @vfptosi_nxv16f32_nxv16i8(<vscale x 16 x float> %va) strictfp {
1203 ; CHECK-LABEL: vfptosi_nxv16f32_nxv16i8:
1205 ; CHECK-NEXT: vsetvli a0, zero, e16, m4, ta, ma
1206 ; CHECK-NEXT: vfncvt.rtz.x.f.w v16, v8
1207 ; CHECK-NEXT: vsetvli zero, zero, e8, m2, ta, ma
1208 ; CHECK-NEXT: vnsrl.wi v8, v16, 0
1210 %evec = call <vscale x 16 x i8> @llvm.experimental.constrained.fptosi.nxv16i8.nxv16f32(<vscale x 16 x float> %va, metadata !"fpexcept.strict")
1211 ret <vscale x 16 x i8> %evec
1214 declare <vscale x 16 x i8> @llvm.experimental.constrained.fptoui.nxv16i8.nxv16f32(<vscale x 16 x float>, metadata)
1215 define <vscale x 16 x i8> @vfptoui_nxv16f32_nxv16i8(<vscale x 16 x float> %va) strictfp {
1216 ; CHECK-LABEL: vfptoui_nxv16f32_nxv16i8:
1218 ; CHECK-NEXT: vsetvli a0, zero, e16, m4, ta, ma
1219 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v16, v8
1220 ; CHECK-NEXT: vsetvli zero, zero, e8, m2, ta, ma
1221 ; CHECK-NEXT: vnsrl.wi v8, v16, 0
1223 %evec = call <vscale x 16 x i8> @llvm.experimental.constrained.fptoui.nxv16i8.nxv16f32(<vscale x 16 x float> %va, metadata !"fpexcept.strict")
1224 ret <vscale x 16 x i8> %evec
1227 declare <vscale x 16 x i16> @llvm.experimental.constrained.fptosi.nxv16i16.nxv16f32(<vscale x 16 x float>, metadata)
1228 define <vscale x 16 x i16> @vfptosi_nxv16f32_nxv16i16(<vscale x 16 x float> %va) strictfp {
1229 ; CHECK-LABEL: vfptosi_nxv16f32_nxv16i16:
1231 ; CHECK-NEXT: vsetvli a0, zero, e16, m4, ta, ma
1232 ; CHECK-NEXT: vfncvt.rtz.x.f.w v16, v8
1233 ; CHECK-NEXT: vmv.v.v v8, v16
1235 %evec = call <vscale x 16 x i16> @llvm.experimental.constrained.fptosi.nxv16i16.nxv16f32(<vscale x 16 x float> %va, metadata !"fpexcept.strict")
1236 ret <vscale x 16 x i16> %evec
1239 declare <vscale x 16 x i16> @llvm.experimental.constrained.fptoui.nxv16i16.nxv16f32(<vscale x 16 x float>, metadata)
1240 define <vscale x 16 x i16> @vfptoui_nxv16f32_nxv16i16(<vscale x 16 x float> %va) strictfp {
1241 ; CHECK-LABEL: vfptoui_nxv16f32_nxv16i16:
1243 ; CHECK-NEXT: vsetvli a0, zero, e16, m4, ta, ma
1244 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v16, v8
1245 ; CHECK-NEXT: vmv.v.v v8, v16
1247 %evec = call <vscale x 16 x i16> @llvm.experimental.constrained.fptoui.nxv16i16.nxv16f32(<vscale x 16 x float> %va, metadata !"fpexcept.strict")
1248 ret <vscale x 16 x i16> %evec
1251 declare <vscale x 16 x i32> @llvm.experimental.constrained.fptosi.nxv16i32.nxv16f32(<vscale x 16 x float>, metadata)
1252 define <vscale x 16 x i32> @vfptosi_nxv16f32_nxv16i32(<vscale x 16 x float> %va) strictfp {
1253 ; CHECK-LABEL: vfptosi_nxv16f32_nxv16i32:
1255 ; CHECK-NEXT: vsetvli a0, zero, e32, m8, ta, ma
1256 ; CHECK-NEXT: vfcvt.rtz.x.f.v v8, v8
1258 %evec = call <vscale x 16 x i32> @llvm.experimental.constrained.fptosi.nxv16i32.nxv16f32(<vscale x 16 x float> %va, metadata !"fpexcept.strict")
1259 ret <vscale x 16 x i32> %evec
1262 declare <vscale x 16 x i32> @llvm.experimental.constrained.fptoui.nxv16i32.nxv16f32(<vscale x 16 x float>, metadata)
1263 define <vscale x 16 x i32> @vfptoui_nxv16f32_nxv16i32(<vscale x 16 x float> %va) strictfp {
1264 ; CHECK-LABEL: vfptoui_nxv16f32_nxv16i32:
1266 ; CHECK-NEXT: vsetvli a0, zero, e32, m8, ta, ma
1267 ; CHECK-NEXT: vfcvt.rtz.xu.f.v v8, v8
1269 %evec = call <vscale x 16 x i32> @llvm.experimental.constrained.fptoui.nxv16i32.nxv16f32(<vscale x 16 x float> %va, metadata !"fpexcept.strict")
1270 ret <vscale x 16 x i32> %evec
1273 declare <vscale x 1 x i1> @llvm.experimental.constrained.fptosi.nxv1i1.nxv1f64(<vscale x 1 x double>, metadata)
1274 define <vscale x 1 x i1> @vfptosi_nxv1f64_nxv1i1(<vscale x 1 x double> %va) strictfp {
1275 ; CHECK-LABEL: vfptosi_nxv1f64_nxv1i1:
1277 ; CHECK-NEXT: vsetvli a0, zero, e32, mf2, ta, ma
1278 ; CHECK-NEXT: vfncvt.rtz.x.f.w v9, v8
1279 ; CHECK-NEXT: vand.vi v8, v9, 1
1280 ; CHECK-NEXT: vmsne.vi v0, v8, 0
1282 %evec = call <vscale x 1 x i1> @llvm.experimental.constrained.fptosi.nxv1i1.nxv1f64(<vscale x 1 x double> %va, metadata !"fpexcept.strict")
1283 ret <vscale x 1 x i1> %evec
1286 declare <vscale x 1 x i1> @llvm.experimental.constrained.fptoui.nxv1i1.nxv1f64(<vscale x 1 x double>, metadata)
1287 define <vscale x 1 x i1> @vfptoui_nxv1f64_nxv1i1(<vscale x 1 x double> %va) strictfp {
1288 ; CHECK-LABEL: vfptoui_nxv1f64_nxv1i1:
1290 ; CHECK-NEXT: vsetvli a0, zero, e32, mf2, ta, ma
1291 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v9, v8
1292 ; CHECK-NEXT: vand.vi v8, v9, 1
1293 ; CHECK-NEXT: vmsne.vi v0, v8, 0
1295 %evec = call <vscale x 1 x i1> @llvm.experimental.constrained.fptoui.nxv1i1.nxv1f64(<vscale x 1 x double> %va, metadata !"fpexcept.strict")
1296 ret <vscale x 1 x i1> %evec
1299 declare <vscale x 1 x i8> @llvm.experimental.constrained.fptosi.nxv1i8.nxv1f64(<vscale x 1 x double>, metadata)
1300 define <vscale x 1 x i8> @vfptosi_nxv1f64_nxv1i8(<vscale x 1 x double> %va) strictfp {
1301 ; CHECK-LABEL: vfptosi_nxv1f64_nxv1i8:
1303 ; CHECK-NEXT: vsetvli a0, zero, e32, mf2, ta, ma
1304 ; CHECK-NEXT: vfncvt.rtz.x.f.w v9, v8
1305 ; CHECK-NEXT: vsetvli zero, zero, e16, mf4, ta, ma
1306 ; CHECK-NEXT: vnsrl.wi v8, v9, 0
1307 ; CHECK-NEXT: vsetvli zero, zero, e8, mf8, ta, ma
1308 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
1310 %evec = call <vscale x 1 x i8> @llvm.experimental.constrained.fptosi.nxv1i8.nxv1f64(<vscale x 1 x double> %va, metadata !"fpexcept.strict")
1311 ret <vscale x 1 x i8> %evec
1314 declare <vscale x 1 x i8> @llvm.experimental.constrained.fptoui.nxv1i8.nxv1f64(<vscale x 1 x double>, metadata)
1315 define <vscale x 1 x i8> @vfptoui_nxv1f64_nxv1i8(<vscale x 1 x double> %va) strictfp {
1316 ; CHECK-LABEL: vfptoui_nxv1f64_nxv1i8:
1318 ; CHECK-NEXT: vsetvli a0, zero, e32, mf2, ta, ma
1319 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v9, v8
1320 ; CHECK-NEXT: vsetvli zero, zero, e16, mf4, ta, ma
1321 ; CHECK-NEXT: vnsrl.wi v8, v9, 0
1322 ; CHECK-NEXT: vsetvli zero, zero, e8, mf8, ta, ma
1323 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
1325 %evec = call <vscale x 1 x i8> @llvm.experimental.constrained.fptoui.nxv1i8.nxv1f64(<vscale x 1 x double> %va, metadata !"fpexcept.strict")
1326 ret <vscale x 1 x i8> %evec
1329 declare <vscale x 1 x i16> @llvm.experimental.constrained.fptosi.nxv1i16.nxv1f64(<vscale x 1 x double>, metadata)
1330 define <vscale x 1 x i16> @vfptosi_nxv1f64_nxv1i16(<vscale x 1 x double> %va) strictfp {
1331 ; CHECK-LABEL: vfptosi_nxv1f64_nxv1i16:
1333 ; CHECK-NEXT: vsetvli a0, zero, e32, mf2, ta, ma
1334 ; CHECK-NEXT: vfncvt.rtz.x.f.w v9, v8
1335 ; CHECK-NEXT: vsetvli zero, zero, e16, mf4, ta, ma
1336 ; CHECK-NEXT: vnsrl.wi v8, v9, 0
1338 %evec = call <vscale x 1 x i16> @llvm.experimental.constrained.fptosi.nxv1i16.nxv1f64(<vscale x 1 x double> %va, metadata !"fpexcept.strict")
1339 ret <vscale x 1 x i16> %evec
1342 declare <vscale x 1 x i16> @llvm.experimental.constrained.fptoui.nxv1i16.nxv1f64(<vscale x 1 x double>, metadata)
1343 define <vscale x 1 x i16> @vfptoui_nxv1f64_nxv1i16(<vscale x 1 x double> %va) strictfp {
1344 ; CHECK-LABEL: vfptoui_nxv1f64_nxv1i16:
1346 ; CHECK-NEXT: vsetvli a0, zero, e32, mf2, ta, ma
1347 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v9, v8
1348 ; CHECK-NEXT: vsetvli zero, zero, e16, mf4, ta, ma
1349 ; CHECK-NEXT: vnsrl.wi v8, v9, 0
1351 %evec = call <vscale x 1 x i16> @llvm.experimental.constrained.fptoui.nxv1i16.nxv1f64(<vscale x 1 x double> %va, metadata !"fpexcept.strict")
1352 ret <vscale x 1 x i16> %evec
1355 declare <vscale x 1 x i32> @llvm.experimental.constrained.fptosi.nxv1i32.nxv1f64(<vscale x 1 x double>, metadata)
1356 define <vscale x 1 x i32> @vfptosi_nxv1f64_nxv1i32(<vscale x 1 x double> %va) strictfp {
1357 ; CHECK-LABEL: vfptosi_nxv1f64_nxv1i32:
1359 ; CHECK-NEXT: vsetvli a0, zero, e32, mf2, ta, ma
1360 ; CHECK-NEXT: vfncvt.rtz.x.f.w v9, v8
1361 ; CHECK-NEXT: vmv1r.v v8, v9
1363 %evec = call <vscale x 1 x i32> @llvm.experimental.constrained.fptosi.nxv1i32.nxv1f64(<vscale x 1 x double> %va, metadata !"fpexcept.strict")
1364 ret <vscale x 1 x i32> %evec
1367 declare <vscale x 1 x i32> @llvm.experimental.constrained.fptoui.nxv1i32.nxv1f64(<vscale x 1 x double>, metadata)
1368 define <vscale x 1 x i32> @vfptoui_nxv1f64_nxv1i32(<vscale x 1 x double> %va) strictfp {
1369 ; CHECK-LABEL: vfptoui_nxv1f64_nxv1i32:
1371 ; CHECK-NEXT: vsetvli a0, zero, e32, mf2, ta, ma
1372 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v9, v8
1373 ; CHECK-NEXT: vmv1r.v v8, v9
1375 %evec = call <vscale x 1 x i32> @llvm.experimental.constrained.fptoui.nxv1i32.nxv1f64(<vscale x 1 x double> %va, metadata !"fpexcept.strict")
1376 ret <vscale x 1 x i32> %evec
1379 declare <vscale x 1 x i64> @llvm.experimental.constrained.fptosi.nxv1i64.nxv1f64(<vscale x 1 x double>, metadata)
1380 define <vscale x 1 x i64> @vfptosi_nxv1f64_nxv1i64(<vscale x 1 x double> %va) strictfp {
1381 ; CHECK-LABEL: vfptosi_nxv1f64_nxv1i64:
1383 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma
1384 ; CHECK-NEXT: vfcvt.rtz.x.f.v v8, v8
1386 %evec = call <vscale x 1 x i64> @llvm.experimental.constrained.fptosi.nxv1i64.nxv1f64(<vscale x 1 x double> %va, metadata !"fpexcept.strict")
1387 ret <vscale x 1 x i64> %evec
1390 declare <vscale x 1 x i64> @llvm.experimental.constrained.fptoui.nxv1i64.nxv1f64(<vscale x 1 x double>, metadata)
1391 define <vscale x 1 x i64> @vfptoui_nxv1f64_nxv1i64(<vscale x 1 x double> %va) strictfp {
1392 ; CHECK-LABEL: vfptoui_nxv1f64_nxv1i64:
1394 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma
1395 ; CHECK-NEXT: vfcvt.rtz.xu.f.v v8, v8
1397 %evec = call <vscale x 1 x i64> @llvm.experimental.constrained.fptoui.nxv1i64.nxv1f64(<vscale x 1 x double> %va, metadata !"fpexcept.strict")
1398 ret <vscale x 1 x i64> %evec
1401 declare <vscale x 2 x i1> @llvm.experimental.constrained.fptosi.nxv2i1.nxv2f64(<vscale x 2 x double>, metadata)
1402 define <vscale x 2 x i1> @vfptosi_nxv2f64_nxv2i1(<vscale x 2 x double> %va) strictfp {
1403 ; CHECK-LABEL: vfptosi_nxv2f64_nxv2i1:
1405 ; CHECK-NEXT: vsetvli a0, zero, e32, m1, ta, ma
1406 ; CHECK-NEXT: vfncvt.rtz.x.f.w v10, v8
1407 ; CHECK-NEXT: vand.vi v8, v10, 1
1408 ; CHECK-NEXT: vmsne.vi v0, v8, 0
1410 %evec = call <vscale x 2 x i1> @llvm.experimental.constrained.fptosi.nxv2i1.nxv2f64(<vscale x 2 x double> %va, metadata !"fpexcept.strict")
1411 ret <vscale x 2 x i1> %evec
1414 declare <vscale x 2 x i1> @llvm.experimental.constrained.fptoui.nxv2i1.nxv2f64(<vscale x 2 x double>, metadata)
1415 define <vscale x 2 x i1> @vfptoui_nxv2f64_nxv2i1(<vscale x 2 x double> %va) strictfp {
1416 ; CHECK-LABEL: vfptoui_nxv2f64_nxv2i1:
1418 ; CHECK-NEXT: vsetvli a0, zero, e32, m1, ta, ma
1419 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v10, v8
1420 ; CHECK-NEXT: vand.vi v8, v10, 1
1421 ; CHECK-NEXT: vmsne.vi v0, v8, 0
1423 %evec = call <vscale x 2 x i1> @llvm.experimental.constrained.fptoui.nxv2i1.nxv2f64(<vscale x 2 x double> %va, metadata !"fpexcept.strict")
1424 ret <vscale x 2 x i1> %evec
1427 declare <vscale x 2 x i8> @llvm.experimental.constrained.fptosi.nxv2i8.nxv2f64(<vscale x 2 x double>, metadata)
1428 define <vscale x 2 x i8> @vfptosi_nxv2f64_nxv2i8(<vscale x 2 x double> %va) strictfp {
1429 ; CHECK-LABEL: vfptosi_nxv2f64_nxv2i8:
1431 ; CHECK-NEXT: vsetvli a0, zero, e32, m1, ta, ma
1432 ; CHECK-NEXT: vfncvt.rtz.x.f.w v10, v8
1433 ; CHECK-NEXT: vsetvli zero, zero, e16, mf2, ta, ma
1434 ; CHECK-NEXT: vnsrl.wi v8, v10, 0
1435 ; CHECK-NEXT: vsetvli zero, zero, e8, mf4, ta, ma
1436 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
1438 %evec = call <vscale x 2 x i8> @llvm.experimental.constrained.fptosi.nxv2i8.nxv2f64(<vscale x 2 x double> %va, metadata !"fpexcept.strict")
1439 ret <vscale x 2 x i8> %evec
1442 declare <vscale x 2 x i8> @llvm.experimental.constrained.fptoui.nxv2i8.nxv2f64(<vscale x 2 x double>, metadata)
1443 define <vscale x 2 x i8> @vfptoui_nxv2f64_nxv2i8(<vscale x 2 x double> %va) strictfp {
1444 ; CHECK-LABEL: vfptoui_nxv2f64_nxv2i8:
1446 ; CHECK-NEXT: vsetvli a0, zero, e32, m1, ta, ma
1447 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v10, v8
1448 ; CHECK-NEXT: vsetvli zero, zero, e16, mf2, ta, ma
1449 ; CHECK-NEXT: vnsrl.wi v8, v10, 0
1450 ; CHECK-NEXT: vsetvli zero, zero, e8, mf4, ta, ma
1451 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
1453 %evec = call <vscale x 2 x i8> @llvm.experimental.constrained.fptoui.nxv2i8.nxv2f64(<vscale x 2 x double> %va, metadata !"fpexcept.strict")
1454 ret <vscale x 2 x i8> %evec
1457 declare <vscale x 2 x i16> @llvm.experimental.constrained.fptosi.nxv2i16.nxv2f64(<vscale x 2 x double>, metadata)
1458 define <vscale x 2 x i16> @vfptosi_nxv2f64_nxv2i16(<vscale x 2 x double> %va) strictfp {
1459 ; CHECK-LABEL: vfptosi_nxv2f64_nxv2i16:
1461 ; CHECK-NEXT: vsetvli a0, zero, e32, m1, ta, ma
1462 ; CHECK-NEXT: vfncvt.rtz.x.f.w v10, v8
1463 ; CHECK-NEXT: vsetvli zero, zero, e16, mf2, ta, ma
1464 ; CHECK-NEXT: vnsrl.wi v8, v10, 0
1466 %evec = call <vscale x 2 x i16> @llvm.experimental.constrained.fptosi.nxv2i16.nxv2f64(<vscale x 2 x double> %va, metadata !"fpexcept.strict")
1467 ret <vscale x 2 x i16> %evec
1470 declare <vscale x 2 x i16> @llvm.experimental.constrained.fptoui.nxv2i16.nxv2f64(<vscale x 2 x double>, metadata)
1471 define <vscale x 2 x i16> @vfptoui_nxv2f64_nxv2i16(<vscale x 2 x double> %va) strictfp {
1472 ; CHECK-LABEL: vfptoui_nxv2f64_nxv2i16:
1474 ; CHECK-NEXT: vsetvli a0, zero, e32, m1, ta, ma
1475 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v10, v8
1476 ; CHECK-NEXT: vsetvli zero, zero, e16, mf2, ta, ma
1477 ; CHECK-NEXT: vnsrl.wi v8, v10, 0
1479 %evec = call <vscale x 2 x i16> @llvm.experimental.constrained.fptoui.nxv2i16.nxv2f64(<vscale x 2 x double> %va, metadata !"fpexcept.strict")
1480 ret <vscale x 2 x i16> %evec
1483 declare <vscale x 2 x i32> @llvm.experimental.constrained.fptosi.nxv2i32.nxv2f64(<vscale x 2 x double>, metadata)
1484 define <vscale x 2 x i32> @vfptosi_nxv2f64_nxv2i32(<vscale x 2 x double> %va) strictfp {
1485 ; CHECK-LABEL: vfptosi_nxv2f64_nxv2i32:
1487 ; CHECK-NEXT: vsetvli a0, zero, e32, m1, ta, ma
1488 ; CHECK-NEXT: vfncvt.rtz.x.f.w v10, v8
1489 ; CHECK-NEXT: vmv.v.v v8, v10
1491 %evec = call <vscale x 2 x i32> @llvm.experimental.constrained.fptosi.nxv2i32.nxv2f64(<vscale x 2 x double> %va, metadata !"fpexcept.strict")
1492 ret <vscale x 2 x i32> %evec
1495 declare <vscale x 2 x i32> @llvm.experimental.constrained.fptoui.nxv2i32.nxv2f64(<vscale x 2 x double>, metadata)
1496 define <vscale x 2 x i32> @vfptoui_nxv2f64_nxv2i32(<vscale x 2 x double> %va) strictfp {
1497 ; CHECK-LABEL: vfptoui_nxv2f64_nxv2i32:
1499 ; CHECK-NEXT: vsetvli a0, zero, e32, m1, ta, ma
1500 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v10, v8
1501 ; CHECK-NEXT: vmv.v.v v8, v10
1503 %evec = call <vscale x 2 x i32> @llvm.experimental.constrained.fptoui.nxv2i32.nxv2f64(<vscale x 2 x double> %va, metadata !"fpexcept.strict")
1504 ret <vscale x 2 x i32> %evec
1507 declare <vscale x 2 x i64> @llvm.experimental.constrained.fptosi.nxv2i64.nxv2f64(<vscale x 2 x double>, metadata)
1508 define <vscale x 2 x i64> @vfptosi_nxv2f64_nxv2i64(<vscale x 2 x double> %va) strictfp {
1509 ; CHECK-LABEL: vfptosi_nxv2f64_nxv2i64:
1511 ; CHECK-NEXT: vsetvli a0, zero, e64, m2, ta, ma
1512 ; CHECK-NEXT: vfcvt.rtz.x.f.v v8, v8
1514 %evec = call <vscale x 2 x i64> @llvm.experimental.constrained.fptosi.nxv2i64.nxv2f64(<vscale x 2 x double> %va, metadata !"fpexcept.strict")
1515 ret <vscale x 2 x i64> %evec
1518 declare <vscale x 2 x i64> @llvm.experimental.constrained.fptoui.nxv2i64.nxv2f64(<vscale x 2 x double>, metadata)
1519 define <vscale x 2 x i64> @vfptoui_nxv2f64_nxv2i64(<vscale x 2 x double> %va) strictfp {
1520 ; CHECK-LABEL: vfptoui_nxv2f64_nxv2i64:
1522 ; CHECK-NEXT: vsetvli a0, zero, e64, m2, ta, ma
1523 ; CHECK-NEXT: vfcvt.rtz.xu.f.v v8, v8
1525 %evec = call <vscale x 2 x i64> @llvm.experimental.constrained.fptoui.nxv2i64.nxv2f64(<vscale x 2 x double> %va, metadata !"fpexcept.strict")
1526 ret <vscale x 2 x i64> %evec
1529 declare <vscale x 4 x i1> @llvm.experimental.constrained.fptosi.nxv4i1.nxv4f64(<vscale x 4 x double>, metadata)
1530 define <vscale x 4 x i1> @vfptosi_nxv4f64_nxv4i1(<vscale x 4 x double> %va) strictfp {
1531 ; CHECK-LABEL: vfptosi_nxv4f64_nxv4i1:
1533 ; CHECK-NEXT: vsetvli a0, zero, e32, m2, ta, ma
1534 ; CHECK-NEXT: vfncvt.rtz.x.f.w v12, v8
1535 ; CHECK-NEXT: vand.vi v8, v12, 1
1536 ; CHECK-NEXT: vmsne.vi v0, v8, 0
1538 %evec = call <vscale x 4 x i1> @llvm.experimental.constrained.fptosi.nxv4i1.nxv4f64(<vscale x 4 x double> %va, metadata !"fpexcept.strict")
1539 ret <vscale x 4 x i1> %evec
1542 declare <vscale x 4 x i1> @llvm.experimental.constrained.fptoui.nxv4i1.nxv4f64(<vscale x 4 x double>, metadata)
1543 define <vscale x 4 x i1> @vfptoui_nxv4f64_nxv4i1(<vscale x 4 x double> %va) strictfp {
1544 ; CHECK-LABEL: vfptoui_nxv4f64_nxv4i1:
1546 ; CHECK-NEXT: vsetvli a0, zero, e32, m2, ta, ma
1547 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v12, v8
1548 ; CHECK-NEXT: vand.vi v8, v12, 1
1549 ; CHECK-NEXT: vmsne.vi v0, v8, 0
1551 %evec = call <vscale x 4 x i1> @llvm.experimental.constrained.fptoui.nxv4i1.nxv4f64(<vscale x 4 x double> %va, metadata !"fpexcept.strict")
1552 ret <vscale x 4 x i1> %evec
1555 declare <vscale x 4 x i8> @llvm.experimental.constrained.fptosi.nxv4i8.nxv4f64(<vscale x 4 x double>, metadata)
1556 define <vscale x 4 x i8> @vfptosi_nxv4f64_nxv4i8(<vscale x 4 x double> %va) strictfp {
1557 ; CHECK-LABEL: vfptosi_nxv4f64_nxv4i8:
1559 ; CHECK-NEXT: vsetvli a0, zero, e32, m2, ta, ma
1560 ; CHECK-NEXT: vfncvt.rtz.x.f.w v12, v8
1561 ; CHECK-NEXT: vsetvli zero, zero, e16, m1, ta, ma
1562 ; CHECK-NEXT: vnsrl.wi v8, v12, 0
1563 ; CHECK-NEXT: vsetvli zero, zero, e8, mf2, ta, ma
1564 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
1566 %evec = call <vscale x 4 x i8> @llvm.experimental.constrained.fptosi.nxv4i8.nxv4f64(<vscale x 4 x double> %va, metadata !"fpexcept.strict")
1567 ret <vscale x 4 x i8> %evec
1570 declare <vscale x 4 x i8> @llvm.experimental.constrained.fptoui.nxv4i8.nxv4f64(<vscale x 4 x double>, metadata)
1571 define <vscale x 4 x i8> @vfptoui_nxv4f64_nxv4i8(<vscale x 4 x double> %va) strictfp {
1572 ; CHECK-LABEL: vfptoui_nxv4f64_nxv4i8:
1574 ; CHECK-NEXT: vsetvli a0, zero, e32, m2, ta, ma
1575 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v12, v8
1576 ; CHECK-NEXT: vsetvli zero, zero, e16, m1, ta, ma
1577 ; CHECK-NEXT: vnsrl.wi v8, v12, 0
1578 ; CHECK-NEXT: vsetvli zero, zero, e8, mf2, ta, ma
1579 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
1581 %evec = call <vscale x 4 x i8> @llvm.experimental.constrained.fptoui.nxv4i8.nxv4f64(<vscale x 4 x double> %va, metadata !"fpexcept.strict")
1582 ret <vscale x 4 x i8> %evec
1585 declare <vscale x 4 x i16> @llvm.experimental.constrained.fptosi.nxv4i16.nxv4f64(<vscale x 4 x double>, metadata)
1586 define <vscale x 4 x i16> @vfptosi_nxv4f64_nxv4i16(<vscale x 4 x double> %va) strictfp {
1587 ; CHECK-LABEL: vfptosi_nxv4f64_nxv4i16:
1589 ; CHECK-NEXT: vsetvli a0, zero, e32, m2, ta, ma
1590 ; CHECK-NEXT: vfncvt.rtz.x.f.w v12, v8
1591 ; CHECK-NEXT: vsetvli zero, zero, e16, m1, ta, ma
1592 ; CHECK-NEXT: vnsrl.wi v8, v12, 0
1594 %evec = call <vscale x 4 x i16> @llvm.experimental.constrained.fptosi.nxv4i16.nxv4f64(<vscale x 4 x double> %va, metadata !"fpexcept.strict")
1595 ret <vscale x 4 x i16> %evec
1598 declare <vscale x 4 x i16> @llvm.experimental.constrained.fptoui.nxv4i16.nxv4f64(<vscale x 4 x double>, metadata)
1599 define <vscale x 4 x i16> @vfptoui_nxv4f64_nxv4i16(<vscale x 4 x double> %va) strictfp {
1600 ; CHECK-LABEL: vfptoui_nxv4f64_nxv4i16:
1602 ; CHECK-NEXT: vsetvli a0, zero, e32, m2, ta, ma
1603 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v12, v8
1604 ; CHECK-NEXT: vsetvli zero, zero, e16, m1, ta, ma
1605 ; CHECK-NEXT: vnsrl.wi v8, v12, 0
1607 %evec = call <vscale x 4 x i16> @llvm.experimental.constrained.fptoui.nxv4i16.nxv4f64(<vscale x 4 x double> %va, metadata !"fpexcept.strict")
1608 ret <vscale x 4 x i16> %evec
1611 declare <vscale x 4 x i32> @llvm.experimental.constrained.fptosi.nxv4i32.nxv4f64(<vscale x 4 x double>, metadata)
1612 define <vscale x 4 x i32> @vfptosi_nxv4f64_nxv4i32(<vscale x 4 x double> %va) strictfp {
1613 ; CHECK-LABEL: vfptosi_nxv4f64_nxv4i32:
1615 ; CHECK-NEXT: vsetvli a0, zero, e32, m2, ta, ma
1616 ; CHECK-NEXT: vfncvt.rtz.x.f.w v12, v8
1617 ; CHECK-NEXT: vmv.v.v v8, v12
1619 %evec = call <vscale x 4 x i32> @llvm.experimental.constrained.fptosi.nxv4i32.nxv4f64(<vscale x 4 x double> %va, metadata !"fpexcept.strict")
1620 ret <vscale x 4 x i32> %evec
1623 declare <vscale x 4 x i32> @llvm.experimental.constrained.fptoui.nxv4i32.nxv4f64(<vscale x 4 x double>, metadata)
1624 define <vscale x 4 x i32> @vfptoui_nxv4f64_nxv4i32(<vscale x 4 x double> %va) strictfp {
1625 ; CHECK-LABEL: vfptoui_nxv4f64_nxv4i32:
1627 ; CHECK-NEXT: vsetvli a0, zero, e32, m2, ta, ma
1628 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v12, v8
1629 ; CHECK-NEXT: vmv.v.v v8, v12
1631 %evec = call <vscale x 4 x i32> @llvm.experimental.constrained.fptoui.nxv4i32.nxv4f64(<vscale x 4 x double> %va, metadata !"fpexcept.strict")
1632 ret <vscale x 4 x i32> %evec
1635 declare <vscale x 4 x i64> @llvm.experimental.constrained.fptosi.nxv4i64.nxv4f64(<vscale x 4 x double>, metadata)
1636 define <vscale x 4 x i64> @vfptosi_nxv4f64_nxv4i64(<vscale x 4 x double> %va) strictfp {
1637 ; CHECK-LABEL: vfptosi_nxv4f64_nxv4i64:
1639 ; CHECK-NEXT: vsetvli a0, zero, e64, m4, ta, ma
1640 ; CHECK-NEXT: vfcvt.rtz.x.f.v v8, v8
1642 %evec = call <vscale x 4 x i64> @llvm.experimental.constrained.fptosi.nxv4i64.nxv4f64(<vscale x 4 x double> %va, metadata !"fpexcept.strict")
1643 ret <vscale x 4 x i64> %evec
1646 declare <vscale x 4 x i64> @llvm.experimental.constrained.fptoui.nxv4i64.nxv4f64(<vscale x 4 x double>, metadata)
1647 define <vscale x 4 x i64> @vfptoui_nxv4f64_nxv4i64(<vscale x 4 x double> %va) strictfp {
1648 ; CHECK-LABEL: vfptoui_nxv4f64_nxv4i64:
1650 ; CHECK-NEXT: vsetvli a0, zero, e64, m4, ta, ma
1651 ; CHECK-NEXT: vfcvt.rtz.xu.f.v v8, v8
1653 %evec = call <vscale x 4 x i64> @llvm.experimental.constrained.fptoui.nxv4i64.nxv4f64(<vscale x 4 x double> %va, metadata !"fpexcept.strict")
1654 ret <vscale x 4 x i64> %evec
1657 declare <vscale x 8 x i1> @llvm.experimental.constrained.fptosi.nxv8i1.nxv8f64(<vscale x 8 x double>, metadata)
1658 define <vscale x 8 x i1> @vfptosi_nxv8f64_nxv8i1(<vscale x 8 x double> %va) strictfp {
1659 ; CHECK-LABEL: vfptosi_nxv8f64_nxv8i1:
1661 ; CHECK-NEXT: vsetvli a0, zero, e32, m4, ta, ma
1662 ; CHECK-NEXT: vfncvt.rtz.x.f.w v16, v8
1663 ; CHECK-NEXT: vand.vi v8, v16, 1
1664 ; CHECK-NEXT: vmsne.vi v0, v8, 0
1666 %evec = call <vscale x 8 x i1> @llvm.experimental.constrained.fptosi.nxv8i1.nxv8f64(<vscale x 8 x double> %va, metadata !"fpexcept.strict")
1667 ret <vscale x 8 x i1> %evec
1670 declare <vscale x 8 x i1> @llvm.experimental.constrained.fptoui.nxv8i1.nxv8f64(<vscale x 8 x double>, metadata)
1671 define <vscale x 8 x i1> @vfptoui_nxv8f64_nxv8i1(<vscale x 8 x double> %va) strictfp {
1672 ; CHECK-LABEL: vfptoui_nxv8f64_nxv8i1:
1674 ; CHECK-NEXT: vsetvli a0, zero, e32, m4, ta, ma
1675 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v16, v8
1676 ; CHECK-NEXT: vand.vi v8, v16, 1
1677 ; CHECK-NEXT: vmsne.vi v0, v8, 0
1679 %evec = call <vscale x 8 x i1> @llvm.experimental.constrained.fptoui.nxv8i1.nxv8f64(<vscale x 8 x double> %va, metadata !"fpexcept.strict")
1680 ret <vscale x 8 x i1> %evec
1683 declare <vscale x 8 x i8> @llvm.experimental.constrained.fptosi.nxv8i8.nxv8f64(<vscale x 8 x double>, metadata)
1684 define <vscale x 8 x i8> @vfptosi_nxv8f64_nxv8i8(<vscale x 8 x double> %va) strictfp {
1685 ; CHECK-LABEL: vfptosi_nxv8f64_nxv8i8:
1687 ; CHECK-NEXT: vsetvli a0, zero, e32, m4, ta, ma
1688 ; CHECK-NEXT: vfncvt.rtz.x.f.w v16, v8
1689 ; CHECK-NEXT: vsetvli zero, zero, e16, m2, ta, ma
1690 ; CHECK-NEXT: vnsrl.wi v10, v16, 0
1691 ; CHECK-NEXT: vsetvli zero, zero, e8, m1, ta, ma
1692 ; CHECK-NEXT: vnsrl.wi v8, v10, 0
1694 %evec = call <vscale x 8 x i8> @llvm.experimental.constrained.fptosi.nxv8i8.nxv8f64(<vscale x 8 x double> %va, metadata !"fpexcept.strict")
1695 ret <vscale x 8 x i8> %evec
1698 declare <vscale x 8 x i8> @llvm.experimental.constrained.fptoui.nxv8i8.nxv8f64(<vscale x 8 x double>, metadata)
1699 define <vscale x 8 x i8> @vfptoui_nxv8f64_nxv8i8(<vscale x 8 x double> %va) strictfp {
1700 ; CHECK-LABEL: vfptoui_nxv8f64_nxv8i8:
1702 ; CHECK-NEXT: vsetvli a0, zero, e32, m4, ta, ma
1703 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v16, v8
1704 ; CHECK-NEXT: vsetvli zero, zero, e16, m2, ta, ma
1705 ; CHECK-NEXT: vnsrl.wi v10, v16, 0
1706 ; CHECK-NEXT: vsetvli zero, zero, e8, m1, ta, ma
1707 ; CHECK-NEXT: vnsrl.wi v8, v10, 0
1709 %evec = call <vscale x 8 x i8> @llvm.experimental.constrained.fptoui.nxv8i8.nxv8f64(<vscale x 8 x double> %va, metadata !"fpexcept.strict")
1710 ret <vscale x 8 x i8> %evec
1713 declare <vscale x 8 x i16> @llvm.experimental.constrained.fptosi.nxv8i16.nxv8f64(<vscale x 8 x double>, metadata)
1714 define <vscale x 8 x i16> @vfptosi_nxv8f64_nxv8i16(<vscale x 8 x double> %va) strictfp {
1715 ; CHECK-LABEL: vfptosi_nxv8f64_nxv8i16:
1717 ; CHECK-NEXT: vsetvli a0, zero, e32, m4, ta, ma
1718 ; CHECK-NEXT: vfncvt.rtz.x.f.w v16, v8
1719 ; CHECK-NEXT: vsetvli zero, zero, e16, m2, ta, ma
1720 ; CHECK-NEXT: vnsrl.wi v8, v16, 0
1722 %evec = call <vscale x 8 x i16> @llvm.experimental.constrained.fptosi.nxv8i16.nxv8f64(<vscale x 8 x double> %va, metadata !"fpexcept.strict")
1723 ret <vscale x 8 x i16> %evec
1726 declare <vscale x 8 x i16> @llvm.experimental.constrained.fptoui.nxv8i16.nxv8f64(<vscale x 8 x double>, metadata)
1727 define <vscale x 8 x i16> @vfptoui_nxv8f64_nxv8i16(<vscale x 8 x double> %va) strictfp {
1728 ; CHECK-LABEL: vfptoui_nxv8f64_nxv8i16:
1730 ; CHECK-NEXT: vsetvli a0, zero, e32, m4, ta, ma
1731 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v16, v8
1732 ; CHECK-NEXT: vsetvli zero, zero, e16, m2, ta, ma
1733 ; CHECK-NEXT: vnsrl.wi v8, v16, 0
1735 %evec = call <vscale x 8 x i16> @llvm.experimental.constrained.fptoui.nxv8i16.nxv8f64(<vscale x 8 x double> %va, metadata !"fpexcept.strict")
1736 ret <vscale x 8 x i16> %evec
1739 declare <vscale x 8 x i32> @llvm.experimental.constrained.fptosi.nxv8i32.nxv8f64(<vscale x 8 x double>, metadata)
1740 define <vscale x 8 x i32> @vfptosi_nxv8f64_nxv8i32(<vscale x 8 x double> %va) strictfp {
1741 ; CHECK-LABEL: vfptosi_nxv8f64_nxv8i32:
1743 ; CHECK-NEXT: vsetvli a0, zero, e32, m4, ta, ma
1744 ; CHECK-NEXT: vfncvt.rtz.x.f.w v16, v8
1745 ; CHECK-NEXT: vmv.v.v v8, v16
1747 %evec = call <vscale x 8 x i32> @llvm.experimental.constrained.fptosi.nxv8i32.nxv8f64(<vscale x 8 x double> %va, metadata !"fpexcept.strict")
1748 ret <vscale x 8 x i32> %evec
1751 declare <vscale x 8 x i32> @llvm.experimental.constrained.fptoui.nxv8i32.nxv8f64(<vscale x 8 x double>, metadata)
1752 define <vscale x 8 x i32> @vfptoui_nxv8f64_nxv8i32(<vscale x 8 x double> %va) strictfp {
1753 ; CHECK-LABEL: vfptoui_nxv8f64_nxv8i32:
1755 ; CHECK-NEXT: vsetvli a0, zero, e32, m4, ta, ma
1756 ; CHECK-NEXT: vfncvt.rtz.xu.f.w v16, v8
1757 ; CHECK-NEXT: vmv.v.v v8, v16
1759 %evec = call <vscale x 8 x i32> @llvm.experimental.constrained.fptoui.nxv8i32.nxv8f64(<vscale x 8 x double> %va, metadata !"fpexcept.strict")
1760 ret <vscale x 8 x i32> %evec
1763 declare <vscale x 8 x i64> @llvm.experimental.constrained.fptosi.nxv8i64.nxv8f64(<vscale x 8 x double>, metadata)
1764 define <vscale x 8 x i64> @vfptosi_nxv8f64_nxv8i64(<vscale x 8 x double> %va) strictfp {
1765 ; CHECK-LABEL: vfptosi_nxv8f64_nxv8i64:
1767 ; CHECK-NEXT: vsetvli a0, zero, e64, m8, ta, ma
1768 ; CHECK-NEXT: vfcvt.rtz.x.f.v v8, v8
1770 %evec = call <vscale x 8 x i64> @llvm.experimental.constrained.fptosi.nxv8i64.nxv8f64(<vscale x 8 x double> %va, metadata !"fpexcept.strict")
1771 ret <vscale x 8 x i64> %evec
1774 declare <vscale x 8 x i64> @llvm.experimental.constrained.fptoui.nxv8i64.nxv8f64(<vscale x 8 x double>, metadata)
1775 define <vscale x 8 x i64> @vfptoui_nxv8f64_nxv8i64(<vscale x 8 x double> %va) strictfp {
1776 ; CHECK-LABEL: vfptoui_nxv8f64_nxv8i64:
1778 ; CHECK-NEXT: vsetvli a0, zero, e64, m8, ta, ma
1779 ; CHECK-NEXT: vfcvt.rtz.xu.f.v v8, v8
1781 %evec = call <vscale x 8 x i64> @llvm.experimental.constrained.fptoui.nxv8i64.nxv8f64(<vscale x 8 x double> %va, metadata !"fpexcept.strict")
1782 ret <vscale x 8 x i64> %evec