1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2
2 // REQUIRES: riscv-registered-target
3 // RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zfh \
4 // RUN: -target-feature +zvfh -disable-O0-optnone \
5 // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \
6 // RUN: FileCheck --check-prefix=CHECK-RV64 %s
8 #include <riscv_vector.h>
10 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vreinterpret_v_i8mf8_u8mf8
11 // CHECK-RV64-SAME: (<vscale x 1 x i8> [[SRC:%.*]]) #[[ATTR0:[0-9]+]] {
12 // CHECK-RV64-NEXT: entry:
13 // CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[SRC]]
15 vuint8mf8_t
test_vreinterpret_v_i8mf8_u8mf8(vint8mf8_t src
) {
16 return __riscv_vreinterpret_u8mf8(src
);
19 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vreinterpret_v_i8mf4_u8mf4
20 // CHECK-RV64-SAME: (<vscale x 2 x i8> [[SRC:%.*]]) #[[ATTR0]] {
21 // CHECK-RV64-NEXT: entry:
22 // CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[SRC]]
24 vuint8mf4_t
test_vreinterpret_v_i8mf4_u8mf4(vint8mf4_t src
) {
25 return __riscv_vreinterpret_u8mf4(src
);
28 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vreinterpret_v_i8mf2_u8mf2
29 // CHECK-RV64-SAME: (<vscale x 4 x i8> [[SRC:%.*]]) #[[ATTR0]] {
30 // CHECK-RV64-NEXT: entry:
31 // CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[SRC]]
33 vuint8mf2_t
test_vreinterpret_v_i8mf2_u8mf2(vint8mf2_t src
) {
34 return __riscv_vreinterpret_u8mf2(src
);
37 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_i8m1_u8m1
38 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
39 // CHECK-RV64-NEXT: entry:
40 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[SRC]]
42 vuint8m1_t
test_vreinterpret_v_i8m1_u8m1(vint8m1_t src
) {
43 return __riscv_vreinterpret_u8m1(src
);
46 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vreinterpret_v_i8m2_u8m2
47 // CHECK-RV64-SAME: (<vscale x 16 x i8> [[SRC:%.*]]) #[[ATTR0]] {
48 // CHECK-RV64-NEXT: entry:
49 // CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[SRC]]
51 vuint8m2_t
test_vreinterpret_v_i8m2_u8m2(vint8m2_t src
) {
52 return __riscv_vreinterpret_u8m2(src
);
55 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vreinterpret_v_i8m4_u8m4
56 // CHECK-RV64-SAME: (<vscale x 32 x i8> [[SRC:%.*]]) #[[ATTR0]] {
57 // CHECK-RV64-NEXT: entry:
58 // CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[SRC]]
60 vuint8m4_t
test_vreinterpret_v_i8m4_u8m4(vint8m4_t src
) {
61 return __riscv_vreinterpret_u8m4(src
);
64 // CHECK-RV64-LABEL: define dso_local <vscale x 64 x i8> @test_vreinterpret_v_i8m8_u8m8
65 // CHECK-RV64-SAME: (<vscale x 64 x i8> [[SRC:%.*]]) #[[ATTR0]] {
66 // CHECK-RV64-NEXT: entry:
67 // CHECK-RV64-NEXT: ret <vscale x 64 x i8> [[SRC]]
69 vuint8m8_t
test_vreinterpret_v_i8m8_u8m8(vint8m8_t src
) {
70 return __riscv_vreinterpret_u8m8(src
);
73 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vreinterpret_v_u8mf8_i8mf8
74 // CHECK-RV64-SAME: (<vscale x 1 x i8> [[SRC:%.*]]) #[[ATTR0]] {
75 // CHECK-RV64-NEXT: entry:
76 // CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[SRC]]
78 vint8mf8_t
test_vreinterpret_v_u8mf8_i8mf8(vuint8mf8_t src
) {
79 return __riscv_vreinterpret_i8mf8(src
);
82 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vreinterpret_v_u8mf4_i8mf4
83 // CHECK-RV64-SAME: (<vscale x 2 x i8> [[SRC:%.*]]) #[[ATTR0]] {
84 // CHECK-RV64-NEXT: entry:
85 // CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[SRC]]
87 vint8mf4_t
test_vreinterpret_v_u8mf4_i8mf4(vuint8mf4_t src
) {
88 return __riscv_vreinterpret_i8mf4(src
);
91 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vreinterpret_v_u8mf2_i8mf2
92 // CHECK-RV64-SAME: (<vscale x 4 x i8> [[SRC:%.*]]) #[[ATTR0]] {
93 // CHECK-RV64-NEXT: entry:
94 // CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[SRC]]
96 vint8mf2_t
test_vreinterpret_v_u8mf2_i8mf2(vuint8mf2_t src
) {
97 return __riscv_vreinterpret_i8mf2(src
);
100 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_u8m1_i8m1
101 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
102 // CHECK-RV64-NEXT: entry:
103 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[SRC]]
105 vint8m1_t
test_vreinterpret_v_u8m1_i8m1(vuint8m1_t src
) {
106 return __riscv_vreinterpret_i8m1(src
);
109 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vreinterpret_v_u8m2_i8m2
110 // CHECK-RV64-SAME: (<vscale x 16 x i8> [[SRC:%.*]]) #[[ATTR0]] {
111 // CHECK-RV64-NEXT: entry:
112 // CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[SRC]]
114 vint8m2_t
test_vreinterpret_v_u8m2_i8m2(vuint8m2_t src
) {
115 return __riscv_vreinterpret_i8m2(src
);
118 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vreinterpret_v_u8m4_i8m4
119 // CHECK-RV64-SAME: (<vscale x 32 x i8> [[SRC:%.*]]) #[[ATTR0]] {
120 // CHECK-RV64-NEXT: entry:
121 // CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[SRC]]
123 vint8m4_t
test_vreinterpret_v_u8m4_i8m4(vuint8m4_t src
) {
124 return __riscv_vreinterpret_i8m4(src
);
127 // CHECK-RV64-LABEL: define dso_local <vscale x 64 x i8> @test_vreinterpret_v_u8m8_i8m8
128 // CHECK-RV64-SAME: (<vscale x 64 x i8> [[SRC:%.*]]) #[[ATTR0]] {
129 // CHECK-RV64-NEXT: entry:
130 // CHECK-RV64-NEXT: ret <vscale x 64 x i8> [[SRC]]
132 vint8m8_t
test_vreinterpret_v_u8m8_i8m8(vuint8m8_t src
) {
133 return __riscv_vreinterpret_i8m8(src
);
136 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x half> @test_vreinterpret_v_i16mf4_f16mf4
137 // CHECK-RV64-SAME: (<vscale x 1 x i16> [[SRC:%.*]]) #[[ATTR0]] {
138 // CHECK-RV64-NEXT: entry:
139 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i16> [[SRC]] to <vscale x 1 x half>
140 // CHECK-RV64-NEXT: ret <vscale x 1 x half> [[TMP0]]
142 vfloat16mf4_t
test_vreinterpret_v_i16mf4_f16mf4(vint16mf4_t src
) {
143 return __riscv_vreinterpret_f16mf4(src
);
146 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x half> @test_vreinterpret_v_i16mf2_f16mf2
147 // CHECK-RV64-SAME: (<vscale x 2 x i16> [[SRC:%.*]]) #[[ATTR0]] {
148 // CHECK-RV64-NEXT: entry:
149 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i16> [[SRC]] to <vscale x 2 x half>
150 // CHECK-RV64-NEXT: ret <vscale x 2 x half> [[TMP0]]
152 vfloat16mf2_t
test_vreinterpret_v_i16mf2_f16mf2(vint16mf2_t src
) {
153 return __riscv_vreinterpret_f16mf2(src
);
156 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x half> @test_vreinterpret_v_i16m1_f16m1
157 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
158 // CHECK-RV64-NEXT: entry:
159 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 4 x half>
160 // CHECK-RV64-NEXT: ret <vscale x 4 x half> [[TMP0]]
162 vfloat16m1_t
test_vreinterpret_v_i16m1_f16m1(vint16m1_t src
) {
163 return __riscv_vreinterpret_f16m1(src
);
166 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x half> @test_vreinterpret_v_i16m2_f16m2
167 // CHECK-RV64-SAME: (<vscale x 8 x i16> [[SRC:%.*]]) #[[ATTR0]] {
168 // CHECK-RV64-NEXT: entry:
169 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC]] to <vscale x 8 x half>
170 // CHECK-RV64-NEXT: ret <vscale x 8 x half> [[TMP0]]
172 vfloat16m2_t
test_vreinterpret_v_i16m2_f16m2(vint16m2_t src
) {
173 return __riscv_vreinterpret_f16m2(src
);
176 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x half> @test_vreinterpret_v_i16m4_f16m4
177 // CHECK-RV64-SAME: (<vscale x 16 x i16> [[SRC:%.*]]) #[[ATTR0]] {
178 // CHECK-RV64-NEXT: entry:
179 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC]] to <vscale x 16 x half>
180 // CHECK-RV64-NEXT: ret <vscale x 16 x half> [[TMP0]]
182 vfloat16m4_t
test_vreinterpret_v_i16m4_f16m4(vint16m4_t src
) {
183 return __riscv_vreinterpret_f16m4(src
);
186 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x half> @test_vreinterpret_v_i16m8_f16m8
187 // CHECK-RV64-SAME: (<vscale x 32 x i16> [[SRC:%.*]]) #[[ATTR0]] {
188 // CHECK-RV64-NEXT: entry:
189 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC]] to <vscale x 32 x half>
190 // CHECK-RV64-NEXT: ret <vscale x 32 x half> [[TMP0]]
192 vfloat16m8_t
test_vreinterpret_v_i16m8_f16m8(vint16m8_t src
) {
193 return __riscv_vreinterpret_f16m8(src
);
196 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x half> @test_vreinterpret_v_u16mf4_f16mf4
197 // CHECK-RV64-SAME: (<vscale x 1 x i16> [[SRC:%.*]]) #[[ATTR0]] {
198 // CHECK-RV64-NEXT: entry:
199 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i16> [[SRC]] to <vscale x 1 x half>
200 // CHECK-RV64-NEXT: ret <vscale x 1 x half> [[TMP0]]
202 vfloat16mf4_t
test_vreinterpret_v_u16mf4_f16mf4(vuint16mf4_t src
) {
203 return __riscv_vreinterpret_f16mf4(src
);
206 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x half> @test_vreinterpret_v_u16mf2_f16mf2
207 // CHECK-RV64-SAME: (<vscale x 2 x i16> [[SRC:%.*]]) #[[ATTR0]] {
208 // CHECK-RV64-NEXT: entry:
209 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i16> [[SRC]] to <vscale x 2 x half>
210 // CHECK-RV64-NEXT: ret <vscale x 2 x half> [[TMP0]]
212 vfloat16mf2_t
test_vreinterpret_v_u16mf2_f16mf2(vuint16mf2_t src
) {
213 return __riscv_vreinterpret_f16mf2(src
);
216 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x half> @test_vreinterpret_v_u16m1_f16m1
217 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
218 // CHECK-RV64-NEXT: entry:
219 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 4 x half>
220 // CHECK-RV64-NEXT: ret <vscale x 4 x half> [[TMP0]]
222 vfloat16m1_t
test_vreinterpret_v_u16m1_f16m1(vuint16m1_t src
) {
223 return __riscv_vreinterpret_f16m1(src
);
226 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x half> @test_vreinterpret_v_u16m2_f16m2
227 // CHECK-RV64-SAME: (<vscale x 8 x i16> [[SRC:%.*]]) #[[ATTR0]] {
228 // CHECK-RV64-NEXT: entry:
229 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC]] to <vscale x 8 x half>
230 // CHECK-RV64-NEXT: ret <vscale x 8 x half> [[TMP0]]
232 vfloat16m2_t
test_vreinterpret_v_u16m2_f16m2(vuint16m2_t src
) {
233 return __riscv_vreinterpret_f16m2(src
);
236 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x half> @test_vreinterpret_v_u16m4_f16m4
237 // CHECK-RV64-SAME: (<vscale x 16 x i16> [[SRC:%.*]]) #[[ATTR0]] {
238 // CHECK-RV64-NEXT: entry:
239 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC]] to <vscale x 16 x half>
240 // CHECK-RV64-NEXT: ret <vscale x 16 x half> [[TMP0]]
242 vfloat16m4_t
test_vreinterpret_v_u16m4_f16m4(vuint16m4_t src
) {
243 return __riscv_vreinterpret_f16m4(src
);
246 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x half> @test_vreinterpret_v_u16m8_f16m8
247 // CHECK-RV64-SAME: (<vscale x 32 x i16> [[SRC:%.*]]) #[[ATTR0]] {
248 // CHECK-RV64-NEXT: entry:
249 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC]] to <vscale x 32 x half>
250 // CHECK-RV64-NEXT: ret <vscale x 32 x half> [[TMP0]]
252 vfloat16m8_t
test_vreinterpret_v_u16m8_f16m8(vuint16m8_t src
) {
253 return __riscv_vreinterpret_f16m8(src
);
256 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i16> @test_vreinterpret_v_i16mf4_u16mf4
257 // CHECK-RV64-SAME: (<vscale x 1 x i16> [[SRC:%.*]]) #[[ATTR0]] {
258 // CHECK-RV64-NEXT: entry:
259 // CHECK-RV64-NEXT: ret <vscale x 1 x i16> [[SRC]]
261 vuint16mf4_t
test_vreinterpret_v_i16mf4_u16mf4(vint16mf4_t src
) {
262 return __riscv_vreinterpret_u16mf4(src
);
265 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i16> @test_vreinterpret_v_i16mf2_u16mf2
266 // CHECK-RV64-SAME: (<vscale x 2 x i16> [[SRC:%.*]]) #[[ATTR0]] {
267 // CHECK-RV64-NEXT: entry:
268 // CHECK-RV64-NEXT: ret <vscale x 2 x i16> [[SRC]]
270 vuint16mf2_t
test_vreinterpret_v_i16mf2_u16mf2(vint16mf2_t src
) {
271 return __riscv_vreinterpret_u16mf2(src
);
274 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_i16m1_u16m1
275 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
276 // CHECK-RV64-NEXT: entry:
277 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[SRC]]
279 vuint16m1_t
test_vreinterpret_v_i16m1_u16m1(vint16m1_t src
) {
280 return __riscv_vreinterpret_u16m1(src
);
283 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i16> @test_vreinterpret_v_i16m2_u16m2
284 // CHECK-RV64-SAME: (<vscale x 8 x i16> [[SRC:%.*]]) #[[ATTR0]] {
285 // CHECK-RV64-NEXT: entry:
286 // CHECK-RV64-NEXT: ret <vscale x 8 x i16> [[SRC]]
288 vuint16m2_t
test_vreinterpret_v_i16m2_u16m2(vint16m2_t src
) {
289 return __riscv_vreinterpret_u16m2(src
);
292 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i16> @test_vreinterpret_v_i16m4_u16m4
293 // CHECK-RV64-SAME: (<vscale x 16 x i16> [[SRC:%.*]]) #[[ATTR0]] {
294 // CHECK-RV64-NEXT: entry:
295 // CHECK-RV64-NEXT: ret <vscale x 16 x i16> [[SRC]]
297 vuint16m4_t
test_vreinterpret_v_i16m4_u16m4(vint16m4_t src
) {
298 return __riscv_vreinterpret_u16m4(src
);
301 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i16> @test_vreinterpret_v_i16m8_u16m8
302 // CHECK-RV64-SAME: (<vscale x 32 x i16> [[SRC:%.*]]) #[[ATTR0]] {
303 // CHECK-RV64-NEXT: entry:
304 // CHECK-RV64-NEXT: ret <vscale x 32 x i16> [[SRC]]
306 vuint16m8_t
test_vreinterpret_v_i16m8_u16m8(vint16m8_t src
) {
307 return __riscv_vreinterpret_u16m8(src
);
310 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i16> @test_vreinterpret_v_u16mf4_i16mf4
311 // CHECK-RV64-SAME: (<vscale x 1 x i16> [[SRC:%.*]]) #[[ATTR0]] {
312 // CHECK-RV64-NEXT: entry:
313 // CHECK-RV64-NEXT: ret <vscale x 1 x i16> [[SRC]]
315 vint16mf4_t
test_vreinterpret_v_u16mf4_i16mf4(vuint16mf4_t src
) {
316 return __riscv_vreinterpret_i16mf4(src
);
319 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i16> @test_vreinterpret_v_u16mf2_i16mf2
320 // CHECK-RV64-SAME: (<vscale x 2 x i16> [[SRC:%.*]]) #[[ATTR0]] {
321 // CHECK-RV64-NEXT: entry:
322 // CHECK-RV64-NEXT: ret <vscale x 2 x i16> [[SRC]]
324 vint16mf2_t
test_vreinterpret_v_u16mf2_i16mf2(vuint16mf2_t src
) {
325 return __riscv_vreinterpret_i16mf2(src
);
328 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_u16m1_i16m1
329 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
330 // CHECK-RV64-NEXT: entry:
331 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[SRC]]
333 vint16m1_t
test_vreinterpret_v_u16m1_i16m1(vuint16m1_t src
) {
334 return __riscv_vreinterpret_i16m1(src
);
337 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i16> @test_vreinterpret_v_u16m2_i16m2
338 // CHECK-RV64-SAME: (<vscale x 8 x i16> [[SRC:%.*]]) #[[ATTR0]] {
339 // CHECK-RV64-NEXT: entry:
340 // CHECK-RV64-NEXT: ret <vscale x 8 x i16> [[SRC]]
342 vint16m2_t
test_vreinterpret_v_u16m2_i16m2(vuint16m2_t src
) {
343 return __riscv_vreinterpret_i16m2(src
);
346 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i16> @test_vreinterpret_v_u16m4_i16m4
347 // CHECK-RV64-SAME: (<vscale x 16 x i16> [[SRC:%.*]]) #[[ATTR0]] {
348 // CHECK-RV64-NEXT: entry:
349 // CHECK-RV64-NEXT: ret <vscale x 16 x i16> [[SRC]]
351 vint16m4_t
test_vreinterpret_v_u16m4_i16m4(vuint16m4_t src
) {
352 return __riscv_vreinterpret_i16m4(src
);
355 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i16> @test_vreinterpret_v_u16m8_i16m8
356 // CHECK-RV64-SAME: (<vscale x 32 x i16> [[SRC:%.*]]) #[[ATTR0]] {
357 // CHECK-RV64-NEXT: entry:
358 // CHECK-RV64-NEXT: ret <vscale x 32 x i16> [[SRC]]
360 vint16m8_t
test_vreinterpret_v_u16m8_i16m8(vuint16m8_t src
) {
361 return __riscv_vreinterpret_i16m8(src
);
364 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i16> @test_vreinterpret_v_f16mf4_i16mf4
365 // CHECK-RV64-SAME: (<vscale x 1 x half> [[SRC:%.*]]) #[[ATTR0]] {
366 // CHECK-RV64-NEXT: entry:
367 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x half> [[SRC]] to <vscale x 1 x i16>
368 // CHECK-RV64-NEXT: ret <vscale x 1 x i16> [[TMP0]]
370 vint16mf4_t
test_vreinterpret_v_f16mf4_i16mf4(vfloat16mf4_t src
) {
371 return __riscv_vreinterpret_i16mf4(src
);
374 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i16> @test_vreinterpret_v_f16mf2_i16mf2
375 // CHECK-RV64-SAME: (<vscale x 2 x half> [[SRC:%.*]]) #[[ATTR0]] {
376 // CHECK-RV64-NEXT: entry:
377 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x half> [[SRC]] to <vscale x 2 x i16>
378 // CHECK-RV64-NEXT: ret <vscale x 2 x i16> [[TMP0]]
380 vint16mf2_t
test_vreinterpret_v_f16mf2_i16mf2(vfloat16mf2_t src
) {
381 return __riscv_vreinterpret_i16mf2(src
);
384 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_f16m1_i16m1
385 // CHECK-RV64-SAME: (<vscale x 4 x half> [[SRC:%.*]]) #[[ATTR0]] {
386 // CHECK-RV64-NEXT: entry:
387 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x half> [[SRC]] to <vscale x 4 x i16>
388 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
390 vint16m1_t
test_vreinterpret_v_f16m1_i16m1(vfloat16m1_t src
) {
391 return __riscv_vreinterpret_i16m1(src
);
394 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i16> @test_vreinterpret_v_f16m2_i16m2
395 // CHECK-RV64-SAME: (<vscale x 8 x half> [[SRC:%.*]]) #[[ATTR0]] {
396 // CHECK-RV64-NEXT: entry:
397 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x half> [[SRC]] to <vscale x 8 x i16>
398 // CHECK-RV64-NEXT: ret <vscale x 8 x i16> [[TMP0]]
400 vint16m2_t
test_vreinterpret_v_f16m2_i16m2(vfloat16m2_t src
) {
401 return __riscv_vreinterpret_i16m2(src
);
404 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i16> @test_vreinterpret_v_f16m4_i16m4
405 // CHECK-RV64-SAME: (<vscale x 16 x half> [[SRC:%.*]]) #[[ATTR0]] {
406 // CHECK-RV64-NEXT: entry:
407 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x half> [[SRC]] to <vscale x 16 x i16>
408 // CHECK-RV64-NEXT: ret <vscale x 16 x i16> [[TMP0]]
410 vint16m4_t
test_vreinterpret_v_f16m4_i16m4(vfloat16m4_t src
) {
411 return __riscv_vreinterpret_i16m4(src
);
414 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i16> @test_vreinterpret_v_f16m8_i16m8
415 // CHECK-RV64-SAME: (<vscale x 32 x half> [[SRC:%.*]]) #[[ATTR0]] {
416 // CHECK-RV64-NEXT: entry:
417 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 32 x half> [[SRC]] to <vscale x 32 x i16>
418 // CHECK-RV64-NEXT: ret <vscale x 32 x i16> [[TMP0]]
420 vint16m8_t
test_vreinterpret_v_f16m8_i16m8(vfloat16m8_t src
) {
421 return __riscv_vreinterpret_i16m8(src
);
424 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i16> @test_vreinterpret_v_f16mf4_u16mf4
425 // CHECK-RV64-SAME: (<vscale x 1 x half> [[SRC:%.*]]) #[[ATTR0]] {
426 // CHECK-RV64-NEXT: entry:
427 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x half> [[SRC]] to <vscale x 1 x i16>
428 // CHECK-RV64-NEXT: ret <vscale x 1 x i16> [[TMP0]]
430 vuint16mf4_t
test_vreinterpret_v_f16mf4_u16mf4(vfloat16mf4_t src
) {
431 return __riscv_vreinterpret_u16mf4(src
);
434 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i16> @test_vreinterpret_v_f16mf2_u16mf2
435 // CHECK-RV64-SAME: (<vscale x 2 x half> [[SRC:%.*]]) #[[ATTR0]] {
436 // CHECK-RV64-NEXT: entry:
437 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x half> [[SRC]] to <vscale x 2 x i16>
438 // CHECK-RV64-NEXT: ret <vscale x 2 x i16> [[TMP0]]
440 vuint16mf2_t
test_vreinterpret_v_f16mf2_u16mf2(vfloat16mf2_t src
) {
441 return __riscv_vreinterpret_u16mf2(src
);
444 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_f16m1_u16m1
445 // CHECK-RV64-SAME: (<vscale x 4 x half> [[SRC:%.*]]) #[[ATTR0]] {
446 // CHECK-RV64-NEXT: entry:
447 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x half> [[SRC]] to <vscale x 4 x i16>
448 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
450 vuint16m1_t
test_vreinterpret_v_f16m1_u16m1(vfloat16m1_t src
) {
451 return __riscv_vreinterpret_u16m1(src
);
454 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i16> @test_vreinterpret_v_f16m2_u16m2
455 // CHECK-RV64-SAME: (<vscale x 8 x half> [[SRC:%.*]]) #[[ATTR0]] {
456 // CHECK-RV64-NEXT: entry:
457 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x half> [[SRC]] to <vscale x 8 x i16>
458 // CHECK-RV64-NEXT: ret <vscale x 8 x i16> [[TMP0]]
460 vuint16m2_t
test_vreinterpret_v_f16m2_u16m2(vfloat16m2_t src
) {
461 return __riscv_vreinterpret_u16m2(src
);
464 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i16> @test_vreinterpret_v_f16m4_u16m4
465 // CHECK-RV64-SAME: (<vscale x 16 x half> [[SRC:%.*]]) #[[ATTR0]] {
466 // CHECK-RV64-NEXT: entry:
467 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x half> [[SRC]] to <vscale x 16 x i16>
468 // CHECK-RV64-NEXT: ret <vscale x 16 x i16> [[TMP0]]
470 vuint16m4_t
test_vreinterpret_v_f16m4_u16m4(vfloat16m4_t src
) {
471 return __riscv_vreinterpret_u16m4(src
);
474 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i16> @test_vreinterpret_v_f16m8_u16m8
475 // CHECK-RV64-SAME: (<vscale x 32 x half> [[SRC:%.*]]) #[[ATTR0]] {
476 // CHECK-RV64-NEXT: entry:
477 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 32 x half> [[SRC]] to <vscale x 32 x i16>
478 // CHECK-RV64-NEXT: ret <vscale x 32 x i16> [[TMP0]]
480 vuint16m8_t
test_vreinterpret_v_f16m8_u16m8(vfloat16m8_t src
) {
481 return __riscv_vreinterpret_u16m8(src
);
484 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x float> @test_vreinterpret_v_i32mf2_f32mf2
485 // CHECK-RV64-SAME: (<vscale x 1 x i32> [[SRC:%.*]]) #[[ATTR0]] {
486 // CHECK-RV64-NEXT: entry:
487 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC]] to <vscale x 1 x float>
488 // CHECK-RV64-NEXT: ret <vscale x 1 x float> [[TMP0]]
490 vfloat32mf2_t
test_vreinterpret_v_i32mf2_f32mf2(vint32mf2_t src
) {
491 return __riscv_vreinterpret_f32mf2(src
);
494 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x float> @test_vreinterpret_v_i32m1_f32m1
495 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
496 // CHECK-RV64-NEXT: entry:
497 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 2 x float>
498 // CHECK-RV64-NEXT: ret <vscale x 2 x float> [[TMP0]]
500 vfloat32m1_t
test_vreinterpret_v_i32m1_f32m1(vint32m1_t src
) {
501 return __riscv_vreinterpret_f32m1(src
);
504 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x float> @test_vreinterpret_v_i32m2_f32m2
505 // CHECK-RV64-SAME: (<vscale x 4 x i32> [[SRC:%.*]]) #[[ATTR0]] {
506 // CHECK-RV64-NEXT: entry:
507 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC]] to <vscale x 4 x float>
508 // CHECK-RV64-NEXT: ret <vscale x 4 x float> [[TMP0]]
510 vfloat32m2_t
test_vreinterpret_v_i32m2_f32m2(vint32m2_t src
) {
511 return __riscv_vreinterpret_f32m2(src
);
514 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x float> @test_vreinterpret_v_i32m4_f32m4
515 // CHECK-RV64-SAME: (<vscale x 8 x i32> [[SRC:%.*]]) #[[ATTR0]] {
516 // CHECK-RV64-NEXT: entry:
517 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC]] to <vscale x 8 x float>
518 // CHECK-RV64-NEXT: ret <vscale x 8 x float> [[TMP0]]
520 vfloat32m4_t
test_vreinterpret_v_i32m4_f32m4(vint32m4_t src
) {
521 return __riscv_vreinterpret_f32m4(src
);
524 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x float> @test_vreinterpret_v_i32m8_f32m8
525 // CHECK-RV64-SAME: (<vscale x 16 x i32> [[SRC:%.*]]) #[[ATTR0]] {
526 // CHECK-RV64-NEXT: entry:
527 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC]] to <vscale x 16 x float>
528 // CHECK-RV64-NEXT: ret <vscale x 16 x float> [[TMP0]]
530 vfloat32m8_t
test_vreinterpret_v_i32m8_f32m8(vint32m8_t src
) {
531 return __riscv_vreinterpret_f32m8(src
);
534 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x float> @test_vreinterpret_v_u32mf2_f32mf2
535 // CHECK-RV64-SAME: (<vscale x 1 x i32> [[SRC:%.*]]) #[[ATTR0]] {
536 // CHECK-RV64-NEXT: entry:
537 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC]] to <vscale x 1 x float>
538 // CHECK-RV64-NEXT: ret <vscale x 1 x float> [[TMP0]]
540 vfloat32mf2_t
test_vreinterpret_v_u32mf2_f32mf2(vuint32mf2_t src
) {
541 return __riscv_vreinterpret_f32mf2(src
);
544 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x float> @test_vreinterpret_v_u32m1_f32m1
545 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
546 // CHECK-RV64-NEXT: entry:
547 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 2 x float>
548 // CHECK-RV64-NEXT: ret <vscale x 2 x float> [[TMP0]]
550 vfloat32m1_t
test_vreinterpret_v_u32m1_f32m1(vuint32m1_t src
) {
551 return __riscv_vreinterpret_f32m1(src
);
554 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x float> @test_vreinterpret_v_u32m2_f32m2
555 // CHECK-RV64-SAME: (<vscale x 4 x i32> [[SRC:%.*]]) #[[ATTR0]] {
556 // CHECK-RV64-NEXT: entry:
557 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC]] to <vscale x 4 x float>
558 // CHECK-RV64-NEXT: ret <vscale x 4 x float> [[TMP0]]
560 vfloat32m2_t
test_vreinterpret_v_u32m2_f32m2(vuint32m2_t src
) {
561 return __riscv_vreinterpret_f32m2(src
);
564 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x float> @test_vreinterpret_v_u32m4_f32m4
565 // CHECK-RV64-SAME: (<vscale x 8 x i32> [[SRC:%.*]]) #[[ATTR0]] {
566 // CHECK-RV64-NEXT: entry:
567 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC]] to <vscale x 8 x float>
568 // CHECK-RV64-NEXT: ret <vscale x 8 x float> [[TMP0]]
570 vfloat32m4_t
test_vreinterpret_v_u32m4_f32m4(vuint32m4_t src
) {
571 return __riscv_vreinterpret_f32m4(src
);
574 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x float> @test_vreinterpret_v_u32m8_f32m8
575 // CHECK-RV64-SAME: (<vscale x 16 x i32> [[SRC:%.*]]) #[[ATTR0]] {
576 // CHECK-RV64-NEXT: entry:
577 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC]] to <vscale x 16 x float>
578 // CHECK-RV64-NEXT: ret <vscale x 16 x float> [[TMP0]]
580 vfloat32m8_t
test_vreinterpret_v_u32m8_f32m8(vuint32m8_t src
) {
581 return __riscv_vreinterpret_f32m8(src
);
584 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i32> @test_vreinterpret_v_i32mf2_u32mf2
585 // CHECK-RV64-SAME: (<vscale x 1 x i32> [[SRC:%.*]]) #[[ATTR0]] {
586 // CHECK-RV64-NEXT: entry:
587 // CHECK-RV64-NEXT: ret <vscale x 1 x i32> [[SRC]]
589 vuint32mf2_t
test_vreinterpret_v_i32mf2_u32mf2(vint32mf2_t src
) {
590 return __riscv_vreinterpret_u32mf2(src
);
593 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_i32m1_u32m1
594 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
595 // CHECK-RV64-NEXT: entry:
596 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[SRC]]
598 vuint32m1_t
test_vreinterpret_v_i32m1_u32m1(vint32m1_t src
) {
599 return __riscv_vreinterpret_u32m1(src
);
602 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i32> @test_vreinterpret_v_i32m2_u32m2
603 // CHECK-RV64-SAME: (<vscale x 4 x i32> [[SRC:%.*]]) #[[ATTR0]] {
604 // CHECK-RV64-NEXT: entry:
605 // CHECK-RV64-NEXT: ret <vscale x 4 x i32> [[SRC]]
607 vuint32m2_t
test_vreinterpret_v_i32m2_u32m2(vint32m2_t src
) {
608 return __riscv_vreinterpret_u32m2(src
);
611 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i32> @test_vreinterpret_v_i32m4_u32m4
612 // CHECK-RV64-SAME: (<vscale x 8 x i32> [[SRC:%.*]]) #[[ATTR0]] {
613 // CHECK-RV64-NEXT: entry:
614 // CHECK-RV64-NEXT: ret <vscale x 8 x i32> [[SRC]]
616 vuint32m4_t
test_vreinterpret_v_i32m4_u32m4(vint32m4_t src
) {
617 return __riscv_vreinterpret_u32m4(src
);
620 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i32> @test_vreinterpret_v_i32m8_u32m8
621 // CHECK-RV64-SAME: (<vscale x 16 x i32> [[SRC:%.*]]) #[[ATTR0]] {
622 // CHECK-RV64-NEXT: entry:
623 // CHECK-RV64-NEXT: ret <vscale x 16 x i32> [[SRC]]
625 vuint32m8_t
test_vreinterpret_v_i32m8_u32m8(vint32m8_t src
) {
626 return __riscv_vreinterpret_u32m8(src
);
629 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i32> @test_vreinterpret_v_u32mf2_i32mf2
630 // CHECK-RV64-SAME: (<vscale x 1 x i32> [[SRC:%.*]]) #[[ATTR0]] {
631 // CHECK-RV64-NEXT: entry:
632 // CHECK-RV64-NEXT: ret <vscale x 1 x i32> [[SRC]]
634 vint32mf2_t
test_vreinterpret_v_u32mf2_i32mf2(vuint32mf2_t src
) {
635 return __riscv_vreinterpret_i32mf2(src
);
638 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_u32m1_i32m1
639 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
640 // CHECK-RV64-NEXT: entry:
641 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[SRC]]
643 vint32m1_t
test_vreinterpret_v_u32m1_i32m1(vuint32m1_t src
) {
644 return __riscv_vreinterpret_i32m1(src
);
647 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i32> @test_vreinterpret_v_u32m2_i32m2
648 // CHECK-RV64-SAME: (<vscale x 4 x i32> [[SRC:%.*]]) #[[ATTR0]] {
649 // CHECK-RV64-NEXT: entry:
650 // CHECK-RV64-NEXT: ret <vscale x 4 x i32> [[SRC]]
652 vint32m2_t
test_vreinterpret_v_u32m2_i32m2(vuint32m2_t src
) {
653 return __riscv_vreinterpret_i32m2(src
);
656 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i32> @test_vreinterpret_v_u32m4_i32m4
657 // CHECK-RV64-SAME: (<vscale x 8 x i32> [[SRC:%.*]]) #[[ATTR0]] {
658 // CHECK-RV64-NEXT: entry:
659 // CHECK-RV64-NEXT: ret <vscale x 8 x i32> [[SRC]]
661 vint32m4_t
test_vreinterpret_v_u32m4_i32m4(vuint32m4_t src
) {
662 return __riscv_vreinterpret_i32m4(src
);
665 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i32> @test_vreinterpret_v_u32m8_i32m8
666 // CHECK-RV64-SAME: (<vscale x 16 x i32> [[SRC:%.*]]) #[[ATTR0]] {
667 // CHECK-RV64-NEXT: entry:
668 // CHECK-RV64-NEXT: ret <vscale x 16 x i32> [[SRC]]
670 vint32m8_t
test_vreinterpret_v_u32m8_i32m8(vuint32m8_t src
) {
671 return __riscv_vreinterpret_i32m8(src
);
674 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i32> @test_vreinterpret_v_f32mf2_i32mf2
675 // CHECK-RV64-SAME: (<vscale x 1 x float> [[SRC:%.*]]) #[[ATTR0]] {
676 // CHECK-RV64-NEXT: entry:
677 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x float> [[SRC]] to <vscale x 1 x i32>
678 // CHECK-RV64-NEXT: ret <vscale x 1 x i32> [[TMP0]]
680 vint32mf2_t
test_vreinterpret_v_f32mf2_i32mf2(vfloat32mf2_t src
) {
681 return __riscv_vreinterpret_i32mf2(src
);
684 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_f32m1_i32m1
685 // CHECK-RV64-SAME: (<vscale x 2 x float> [[SRC:%.*]]) #[[ATTR0]] {
686 // CHECK-RV64-NEXT: entry:
687 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x float> [[SRC]] to <vscale x 2 x i32>
688 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
690 vint32m1_t
test_vreinterpret_v_f32m1_i32m1(vfloat32m1_t src
) {
691 return __riscv_vreinterpret_i32m1(src
);
694 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i32> @test_vreinterpret_v_f32m2_i32m2
695 // CHECK-RV64-SAME: (<vscale x 4 x float> [[SRC:%.*]]) #[[ATTR0]] {
696 // CHECK-RV64-NEXT: entry:
697 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x float> [[SRC]] to <vscale x 4 x i32>
698 // CHECK-RV64-NEXT: ret <vscale x 4 x i32> [[TMP0]]
700 vint32m2_t
test_vreinterpret_v_f32m2_i32m2(vfloat32m2_t src
) {
701 return __riscv_vreinterpret_i32m2(src
);
704 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i32> @test_vreinterpret_v_f32m4_i32m4
705 // CHECK-RV64-SAME: (<vscale x 8 x float> [[SRC:%.*]]) #[[ATTR0]] {
706 // CHECK-RV64-NEXT: entry:
707 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x float> [[SRC]] to <vscale x 8 x i32>
708 // CHECK-RV64-NEXT: ret <vscale x 8 x i32> [[TMP0]]
710 vint32m4_t
test_vreinterpret_v_f32m4_i32m4(vfloat32m4_t src
) {
711 return __riscv_vreinterpret_i32m4(src
);
714 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i32> @test_vreinterpret_v_f32m8_i32m8
715 // CHECK-RV64-SAME: (<vscale x 16 x float> [[SRC:%.*]]) #[[ATTR0]] {
716 // CHECK-RV64-NEXT: entry:
717 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x float> [[SRC]] to <vscale x 16 x i32>
718 // CHECK-RV64-NEXT: ret <vscale x 16 x i32> [[TMP0]]
720 vint32m8_t
test_vreinterpret_v_f32m8_i32m8(vfloat32m8_t src
) {
721 return __riscv_vreinterpret_i32m8(src
);
724 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i32> @test_vreinterpret_v_f32mf2_u32mf2
725 // CHECK-RV64-SAME: (<vscale x 1 x float> [[SRC:%.*]]) #[[ATTR0]] {
726 // CHECK-RV64-NEXT: entry:
727 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x float> [[SRC]] to <vscale x 1 x i32>
728 // CHECK-RV64-NEXT: ret <vscale x 1 x i32> [[TMP0]]
730 vuint32mf2_t
test_vreinterpret_v_f32mf2_u32mf2(vfloat32mf2_t src
) {
731 return __riscv_vreinterpret_u32mf2(src
);
734 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_f32m1_u32m1
735 // CHECK-RV64-SAME: (<vscale x 2 x float> [[SRC:%.*]]) #[[ATTR0]] {
736 // CHECK-RV64-NEXT: entry:
737 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x float> [[SRC]] to <vscale x 2 x i32>
738 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
740 vuint32m1_t
test_vreinterpret_v_f32m1_u32m1(vfloat32m1_t src
) {
741 return __riscv_vreinterpret_u32m1(src
);
744 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i32> @test_vreinterpret_v_f32m2_u32m2
745 // CHECK-RV64-SAME: (<vscale x 4 x float> [[SRC:%.*]]) #[[ATTR0]] {
746 // CHECK-RV64-NEXT: entry:
747 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x float> [[SRC]] to <vscale x 4 x i32>
748 // CHECK-RV64-NEXT: ret <vscale x 4 x i32> [[TMP0]]
750 vuint32m2_t
test_vreinterpret_v_f32m2_u32m2(vfloat32m2_t src
) {
751 return __riscv_vreinterpret_u32m2(src
);
754 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i32> @test_vreinterpret_v_f32m4_u32m4
755 // CHECK-RV64-SAME: (<vscale x 8 x float> [[SRC:%.*]]) #[[ATTR0]] {
756 // CHECK-RV64-NEXT: entry:
757 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x float> [[SRC]] to <vscale x 8 x i32>
758 // CHECK-RV64-NEXT: ret <vscale x 8 x i32> [[TMP0]]
760 vuint32m4_t
test_vreinterpret_v_f32m4_u32m4(vfloat32m4_t src
) {
761 return __riscv_vreinterpret_u32m4(src
);
764 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i32> @test_vreinterpret_v_f32m8_u32m8
765 // CHECK-RV64-SAME: (<vscale x 16 x float> [[SRC:%.*]]) #[[ATTR0]] {
766 // CHECK-RV64-NEXT: entry:
767 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x float> [[SRC]] to <vscale x 16 x i32>
768 // CHECK-RV64-NEXT: ret <vscale x 16 x i32> [[TMP0]]
770 vuint32m8_t
test_vreinterpret_v_f32m8_u32m8(vfloat32m8_t src
) {
771 return __riscv_vreinterpret_u32m8(src
);
774 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x double> @test_vreinterpret_v_i64m1_f64m1
775 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
776 // CHECK-RV64-NEXT: entry:
777 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 1 x double>
778 // CHECK-RV64-NEXT: ret <vscale x 1 x double> [[TMP0]]
780 vfloat64m1_t
test_vreinterpret_v_i64m1_f64m1(vint64m1_t src
) {
781 return __riscv_vreinterpret_f64m1(src
);
784 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x double> @test_vreinterpret_v_i64m2_f64m2
785 // CHECK-RV64-SAME: (<vscale x 2 x i64> [[SRC:%.*]]) #[[ATTR0]] {
786 // CHECK-RV64-NEXT: entry:
787 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC]] to <vscale x 2 x double>
788 // CHECK-RV64-NEXT: ret <vscale x 2 x double> [[TMP0]]
790 vfloat64m2_t
test_vreinterpret_v_i64m2_f64m2(vint64m2_t src
) {
791 return __riscv_vreinterpret_f64m2(src
);
794 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x double> @test_vreinterpret_v_i64m4_f64m4
795 // CHECK-RV64-SAME: (<vscale x 4 x i64> [[SRC:%.*]]) #[[ATTR0]] {
796 // CHECK-RV64-NEXT: entry:
797 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC]] to <vscale x 4 x double>
798 // CHECK-RV64-NEXT: ret <vscale x 4 x double> [[TMP0]]
800 vfloat64m4_t
test_vreinterpret_v_i64m4_f64m4(vint64m4_t src
) {
801 return __riscv_vreinterpret_f64m4(src
);
804 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x double> @test_vreinterpret_v_i64m8_f64m8
805 // CHECK-RV64-SAME: (<vscale x 8 x i64> [[SRC:%.*]]) #[[ATTR0]] {
806 // CHECK-RV64-NEXT: entry:
807 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC]] to <vscale x 8 x double>
808 // CHECK-RV64-NEXT: ret <vscale x 8 x double> [[TMP0]]
810 vfloat64m8_t
test_vreinterpret_v_i64m8_f64m8(vint64m8_t src
) {
811 return __riscv_vreinterpret_f64m8(src
);
814 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x double> @test_vreinterpret_v_u64m1_f64m1
815 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
816 // CHECK-RV64-NEXT: entry:
817 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 1 x double>
818 // CHECK-RV64-NEXT: ret <vscale x 1 x double> [[TMP0]]
820 vfloat64m1_t
test_vreinterpret_v_u64m1_f64m1(vuint64m1_t src
) {
821 return __riscv_vreinterpret_f64m1(src
);
824 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x double> @test_vreinterpret_v_u64m2_f64m2
825 // CHECK-RV64-SAME: (<vscale x 2 x i64> [[SRC:%.*]]) #[[ATTR0]] {
826 // CHECK-RV64-NEXT: entry:
827 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC]] to <vscale x 2 x double>
828 // CHECK-RV64-NEXT: ret <vscale x 2 x double> [[TMP0]]
830 vfloat64m2_t
test_vreinterpret_v_u64m2_f64m2(vuint64m2_t src
) {
831 return __riscv_vreinterpret_f64m2(src
);
834 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x double> @test_vreinterpret_v_u64m4_f64m4
835 // CHECK-RV64-SAME: (<vscale x 4 x i64> [[SRC:%.*]]) #[[ATTR0]] {
836 // CHECK-RV64-NEXT: entry:
837 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC]] to <vscale x 4 x double>
838 // CHECK-RV64-NEXT: ret <vscale x 4 x double> [[TMP0]]
840 vfloat64m4_t
test_vreinterpret_v_u64m4_f64m4(vuint64m4_t src
) {
841 return __riscv_vreinterpret_f64m4(src
);
844 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x double> @test_vreinterpret_v_u64m8_f64m8
845 // CHECK-RV64-SAME: (<vscale x 8 x i64> [[SRC:%.*]]) #[[ATTR0]] {
846 // CHECK-RV64-NEXT: entry:
847 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC]] to <vscale x 8 x double>
848 // CHECK-RV64-NEXT: ret <vscale x 8 x double> [[TMP0]]
850 vfloat64m8_t
test_vreinterpret_v_u64m8_f64m8(vuint64m8_t src
) {
851 return __riscv_vreinterpret_f64m8(src
);
854 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_i64m1_u64m1
855 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
856 // CHECK-RV64-NEXT: entry:
857 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[SRC]]
859 vuint64m1_t
test_vreinterpret_v_i64m1_u64m1(vint64m1_t src
) {
860 return __riscv_vreinterpret_u64m1(src
);
863 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vreinterpret_v_i64m2_u64m2
864 // CHECK-RV64-SAME: (<vscale x 2 x i64> [[SRC:%.*]]) #[[ATTR0]] {
865 // CHECK-RV64-NEXT: entry:
866 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[SRC]]
868 vuint64m2_t
test_vreinterpret_v_i64m2_u64m2(vint64m2_t src
) {
869 return __riscv_vreinterpret_u64m2(src
);
872 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vreinterpret_v_i64m4_u64m4
873 // CHECK-RV64-SAME: (<vscale x 4 x i64> [[SRC:%.*]]) #[[ATTR0]] {
874 // CHECK-RV64-NEXT: entry:
875 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[SRC]]
877 vuint64m4_t
test_vreinterpret_v_i64m4_u64m4(vint64m4_t src
) {
878 return __riscv_vreinterpret_u64m4(src
);
881 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vreinterpret_v_i64m8_u64m8
882 // CHECK-RV64-SAME: (<vscale x 8 x i64> [[SRC:%.*]]) #[[ATTR0]] {
883 // CHECK-RV64-NEXT: entry:
884 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[SRC]]
886 vuint64m8_t
test_vreinterpret_v_i64m8_u64m8(vint64m8_t src
) {
887 return __riscv_vreinterpret_u64m8(src
);
890 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_u64m1_i64m1
891 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
892 // CHECK-RV64-NEXT: entry:
893 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[SRC]]
895 vint64m1_t
test_vreinterpret_v_u64m1_i64m1(vuint64m1_t src
) {
896 return __riscv_vreinterpret_i64m1(src
);
899 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vreinterpret_v_u64m2_i64m2
900 // CHECK-RV64-SAME: (<vscale x 2 x i64> [[SRC:%.*]]) #[[ATTR0]] {
901 // CHECK-RV64-NEXT: entry:
902 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[SRC]]
904 vint64m2_t
test_vreinterpret_v_u64m2_i64m2(vuint64m2_t src
) {
905 return __riscv_vreinterpret_i64m2(src
);
908 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vreinterpret_v_u64m4_i64m4
909 // CHECK-RV64-SAME: (<vscale x 4 x i64> [[SRC:%.*]]) #[[ATTR0]] {
910 // CHECK-RV64-NEXT: entry:
911 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[SRC]]
913 vint64m4_t
test_vreinterpret_v_u64m4_i64m4(vuint64m4_t src
) {
914 return __riscv_vreinterpret_i64m4(src
);
917 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vreinterpret_v_u64m8_i64m8
918 // CHECK-RV64-SAME: (<vscale x 8 x i64> [[SRC:%.*]]) #[[ATTR0]] {
919 // CHECK-RV64-NEXT: entry:
920 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[SRC]]
922 vint64m8_t
test_vreinterpret_v_u64m8_i64m8(vuint64m8_t src
) {
923 return __riscv_vreinterpret_i64m8(src
);
926 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_f64m1_i64m1
927 // CHECK-RV64-SAME: (<vscale x 1 x double> [[SRC:%.*]]) #[[ATTR0]] {
928 // CHECK-RV64-NEXT: entry:
929 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x double> [[SRC]] to <vscale x 1 x i64>
930 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
932 vint64m1_t
test_vreinterpret_v_f64m1_i64m1(vfloat64m1_t src
) {
933 return __riscv_vreinterpret_i64m1(src
);
936 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vreinterpret_v_f64m2_i64m2
937 // CHECK-RV64-SAME: (<vscale x 2 x double> [[SRC:%.*]]) #[[ATTR0]] {
938 // CHECK-RV64-NEXT: entry:
939 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x double> [[SRC]] to <vscale x 2 x i64>
940 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
942 vint64m2_t
test_vreinterpret_v_f64m2_i64m2(vfloat64m2_t src
) {
943 return __riscv_vreinterpret_i64m2(src
);
946 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vreinterpret_v_f64m4_i64m4
947 // CHECK-RV64-SAME: (<vscale x 4 x double> [[SRC:%.*]]) #[[ATTR0]] {
948 // CHECK-RV64-NEXT: entry:
949 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x double> [[SRC]] to <vscale x 4 x i64>
950 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
952 vint64m4_t
test_vreinterpret_v_f64m4_i64m4(vfloat64m4_t src
) {
953 return __riscv_vreinterpret_i64m4(src
);
956 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vreinterpret_v_f64m8_i64m8
957 // CHECK-RV64-SAME: (<vscale x 8 x double> [[SRC:%.*]]) #[[ATTR0]] {
958 // CHECK-RV64-NEXT: entry:
959 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x double> [[SRC]] to <vscale x 8 x i64>
960 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
962 vint64m8_t
test_vreinterpret_v_f64m8_i64m8(vfloat64m8_t src
) {
963 return __riscv_vreinterpret_i64m8(src
);
966 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_f64m1_u64m1
967 // CHECK-RV64-SAME: (<vscale x 1 x double> [[SRC:%.*]]) #[[ATTR0]] {
968 // CHECK-RV64-NEXT: entry:
969 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x double> [[SRC]] to <vscale x 1 x i64>
970 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
972 vuint64m1_t
test_vreinterpret_v_f64m1_u64m1(vfloat64m1_t src
) {
973 return __riscv_vreinterpret_u64m1(src
);
976 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vreinterpret_v_f64m2_u64m2
977 // CHECK-RV64-SAME: (<vscale x 2 x double> [[SRC:%.*]]) #[[ATTR0]] {
978 // CHECK-RV64-NEXT: entry:
979 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x double> [[SRC]] to <vscale x 2 x i64>
980 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
982 vuint64m2_t
test_vreinterpret_v_f64m2_u64m2(vfloat64m2_t src
) {
983 return __riscv_vreinterpret_u64m2(src
);
986 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vreinterpret_v_f64m4_u64m4
987 // CHECK-RV64-SAME: (<vscale x 4 x double> [[SRC:%.*]]) #[[ATTR0]] {
988 // CHECK-RV64-NEXT: entry:
989 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x double> [[SRC]] to <vscale x 4 x i64>
990 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
992 vuint64m4_t
test_vreinterpret_v_f64m4_u64m4(vfloat64m4_t src
) {
993 return __riscv_vreinterpret_u64m4(src
);
996 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vreinterpret_v_f64m8_u64m8
997 // CHECK-RV64-SAME: (<vscale x 8 x double> [[SRC:%.*]]) #[[ATTR0]] {
998 // CHECK-RV64-NEXT: entry:
999 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x double> [[SRC]] to <vscale x 8 x i64>
1000 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
1002 vuint64m8_t
test_vreinterpret_v_f64m8_u64m8(vfloat64m8_t src
) {
1003 return __riscv_vreinterpret_u64m8(src
);
1006 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i16> @test_vreinterpret_v_i8mf4_i16mf4
1007 // CHECK-RV64-SAME: (<vscale x 2 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1008 // CHECK-RV64-NEXT: entry:
1009 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i8> [[SRC]] to <vscale x 1 x i16>
1010 // CHECK-RV64-NEXT: ret <vscale x 1 x i16> [[TMP0]]
1012 vint16mf4_t
test_vreinterpret_v_i8mf4_i16mf4(vint8mf4_t src
) {
1013 return __riscv_vreinterpret_i16mf4(src
);
1016 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i16> @test_vreinterpret_v_i8mf2_i16mf2
1017 // CHECK-RV64-SAME: (<vscale x 4 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1018 // CHECK-RV64-NEXT: entry:
1019 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i8> [[SRC]] to <vscale x 2 x i16>
1020 // CHECK-RV64-NEXT: ret <vscale x 2 x i16> [[TMP0]]
1022 vint16mf2_t
test_vreinterpret_v_i8mf2_i16mf2(vint8mf2_t src
) {
1023 return __riscv_vreinterpret_i16mf2(src
);
1026 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_i8m1_i16m1
1027 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1028 // CHECK-RV64-NEXT: entry:
1029 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 4 x i16>
1030 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
1032 vint16m1_t
test_vreinterpret_v_i8m1_i16m1(vint8m1_t src
) {
1033 return __riscv_vreinterpret_i16m1(src
);
1036 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i16> @test_vreinterpret_v_i8m2_i16m2
1037 // CHECK-RV64-SAME: (<vscale x 16 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1038 // CHECK-RV64-NEXT: entry:
1039 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC]] to <vscale x 8 x i16>
1040 // CHECK-RV64-NEXT: ret <vscale x 8 x i16> [[TMP0]]
1042 vint16m2_t
test_vreinterpret_v_i8m2_i16m2(vint8m2_t src
) {
1043 return __riscv_vreinterpret_i16m2(src
);
1046 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i16> @test_vreinterpret_v_i8m4_i16m4
1047 // CHECK-RV64-SAME: (<vscale x 32 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1048 // CHECK-RV64-NEXT: entry:
1049 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC]] to <vscale x 16 x i16>
1050 // CHECK-RV64-NEXT: ret <vscale x 16 x i16> [[TMP0]]
1052 vint16m4_t
test_vreinterpret_v_i8m4_i16m4(vint8m4_t src
) {
1053 return __riscv_vreinterpret_i16m4(src
);
1056 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i16> @test_vreinterpret_v_i8m8_i16m8
1057 // CHECK-RV64-SAME: (<vscale x 64 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1058 // CHECK-RV64-NEXT: entry:
1059 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC]] to <vscale x 32 x i16>
1060 // CHECK-RV64-NEXT: ret <vscale x 32 x i16> [[TMP0]]
1062 vint16m8_t
test_vreinterpret_v_i8m8_i16m8(vint8m8_t src
) {
1063 return __riscv_vreinterpret_i16m8(src
);
1066 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i16> @test_vreinterpret_v_u8mf4_u16mf4
1067 // CHECK-RV64-SAME: (<vscale x 2 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1068 // CHECK-RV64-NEXT: entry:
1069 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i8> [[SRC]] to <vscale x 1 x i16>
1070 // CHECK-RV64-NEXT: ret <vscale x 1 x i16> [[TMP0]]
1072 vuint16mf4_t
test_vreinterpret_v_u8mf4_u16mf4(vuint8mf4_t src
) {
1073 return __riscv_vreinterpret_u16mf4(src
);
1076 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i16> @test_vreinterpret_v_u8mf2_u16mf2
1077 // CHECK-RV64-SAME: (<vscale x 4 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1078 // CHECK-RV64-NEXT: entry:
1079 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i8> [[SRC]] to <vscale x 2 x i16>
1080 // CHECK-RV64-NEXT: ret <vscale x 2 x i16> [[TMP0]]
1082 vuint16mf2_t
test_vreinterpret_v_u8mf2_u16mf2(vuint8mf2_t src
) {
1083 return __riscv_vreinterpret_u16mf2(src
);
1086 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_u8m1_u16m1
1087 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1088 // CHECK-RV64-NEXT: entry:
1089 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 4 x i16>
1090 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
1092 vuint16m1_t
test_vreinterpret_v_u8m1_u16m1(vuint8m1_t src
) {
1093 return __riscv_vreinterpret_u16m1(src
);
1096 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i16> @test_vreinterpret_v_u8m2_u16m2
1097 // CHECK-RV64-SAME: (<vscale x 16 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1098 // CHECK-RV64-NEXT: entry:
1099 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC]] to <vscale x 8 x i16>
1100 // CHECK-RV64-NEXT: ret <vscale x 8 x i16> [[TMP0]]
1102 vuint16m2_t
test_vreinterpret_v_u8m2_u16m2(vuint8m2_t src
) {
1103 return __riscv_vreinterpret_u16m2(src
);
1106 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i16> @test_vreinterpret_v_u8m4_u16m4
1107 // CHECK-RV64-SAME: (<vscale x 32 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1108 // CHECK-RV64-NEXT: entry:
1109 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC]] to <vscale x 16 x i16>
1110 // CHECK-RV64-NEXT: ret <vscale x 16 x i16> [[TMP0]]
1112 vuint16m4_t
test_vreinterpret_v_u8m4_u16m4(vuint8m4_t src
) {
1113 return __riscv_vreinterpret_u16m4(src
);
1116 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i16> @test_vreinterpret_v_u8m8_u16m8
1117 // CHECK-RV64-SAME: (<vscale x 64 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1118 // CHECK-RV64-NEXT: entry:
1119 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC]] to <vscale x 32 x i16>
1120 // CHECK-RV64-NEXT: ret <vscale x 32 x i16> [[TMP0]]
1122 vuint16m8_t
test_vreinterpret_v_u8m8_u16m8(vuint8m8_t src
) {
1123 return __riscv_vreinterpret_u16m8(src
);
1126 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i32> @test_vreinterpret_v_i8mf2_i32mf2
1127 // CHECK-RV64-SAME: (<vscale x 4 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1128 // CHECK-RV64-NEXT: entry:
1129 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i8> [[SRC]] to <vscale x 1 x i32>
1130 // CHECK-RV64-NEXT: ret <vscale x 1 x i32> [[TMP0]]
1132 vint32mf2_t
test_vreinterpret_v_i8mf2_i32mf2(vint8mf2_t src
) {
1133 return __riscv_vreinterpret_i32mf2(src
);
1136 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_i8m1_i32m1
1137 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1138 // CHECK-RV64-NEXT: entry:
1139 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 2 x i32>
1140 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
1142 vint32m1_t
test_vreinterpret_v_i8m1_i32m1(vint8m1_t src
) {
1143 return __riscv_vreinterpret_i32m1(src
);
1146 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i32> @test_vreinterpret_v_i8m2_i32m2
1147 // CHECK-RV64-SAME: (<vscale x 16 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1148 // CHECK-RV64-NEXT: entry:
1149 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC]] to <vscale x 4 x i32>
1150 // CHECK-RV64-NEXT: ret <vscale x 4 x i32> [[TMP0]]
1152 vint32m2_t
test_vreinterpret_v_i8m2_i32m2(vint8m2_t src
) {
1153 return __riscv_vreinterpret_i32m2(src
);
1156 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i32> @test_vreinterpret_v_i8m4_i32m4
1157 // CHECK-RV64-SAME: (<vscale x 32 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1158 // CHECK-RV64-NEXT: entry:
1159 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC]] to <vscale x 8 x i32>
1160 // CHECK-RV64-NEXT: ret <vscale x 8 x i32> [[TMP0]]
1162 vint32m4_t
test_vreinterpret_v_i8m4_i32m4(vint8m4_t src
) {
1163 return __riscv_vreinterpret_i32m4(src
);
1166 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i32> @test_vreinterpret_v_i8m8_i32m8
1167 // CHECK-RV64-SAME: (<vscale x 64 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1168 // CHECK-RV64-NEXT: entry:
1169 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC]] to <vscale x 16 x i32>
1170 // CHECK-RV64-NEXT: ret <vscale x 16 x i32> [[TMP0]]
1172 vint32m8_t
test_vreinterpret_v_i8m8_i32m8(vint8m8_t src
) {
1173 return __riscv_vreinterpret_i32m8(src
);
1176 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i32> @test_vreinterpret_v_u8mf2_u32mf2
1177 // CHECK-RV64-SAME: (<vscale x 4 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1178 // CHECK-RV64-NEXT: entry:
1179 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i8> [[SRC]] to <vscale x 1 x i32>
1180 // CHECK-RV64-NEXT: ret <vscale x 1 x i32> [[TMP0]]
1182 vuint32mf2_t
test_vreinterpret_v_u8mf2_u32mf2(vuint8mf2_t src
) {
1183 return __riscv_vreinterpret_u32mf2(src
);
1186 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_u8m1_u32m1
1187 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1188 // CHECK-RV64-NEXT: entry:
1189 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 2 x i32>
1190 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
1192 vuint32m1_t
test_vreinterpret_v_u8m1_u32m1(vuint8m1_t src
) {
1193 return __riscv_vreinterpret_u32m1(src
);
1196 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i32> @test_vreinterpret_v_u8m2_u32m2
1197 // CHECK-RV64-SAME: (<vscale x 16 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1198 // CHECK-RV64-NEXT: entry:
1199 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC]] to <vscale x 4 x i32>
1200 // CHECK-RV64-NEXT: ret <vscale x 4 x i32> [[TMP0]]
1202 vuint32m2_t
test_vreinterpret_v_u8m2_u32m2(vuint8m2_t src
) {
1203 return __riscv_vreinterpret_u32m2(src
);
1206 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i32> @test_vreinterpret_v_u8m4_u32m4
1207 // CHECK-RV64-SAME: (<vscale x 32 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1208 // CHECK-RV64-NEXT: entry:
1209 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC]] to <vscale x 8 x i32>
1210 // CHECK-RV64-NEXT: ret <vscale x 8 x i32> [[TMP0]]
1212 vuint32m4_t
test_vreinterpret_v_u8m4_u32m4(vuint8m4_t src
) {
1213 return __riscv_vreinterpret_u32m4(src
);
1216 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i32> @test_vreinterpret_v_u8m8_u32m8
1217 // CHECK-RV64-SAME: (<vscale x 64 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1218 // CHECK-RV64-NEXT: entry:
1219 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC]] to <vscale x 16 x i32>
1220 // CHECK-RV64-NEXT: ret <vscale x 16 x i32> [[TMP0]]
1222 vuint32m8_t
test_vreinterpret_v_u8m8_u32m8(vuint8m8_t src
) {
1223 return __riscv_vreinterpret_u32m8(src
);
1226 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_i8m1_i64m1
1227 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1228 // CHECK-RV64-NEXT: entry:
1229 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 1 x i64>
1230 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
1232 vint64m1_t
test_vreinterpret_v_i8m1_i64m1(vint8m1_t src
) {
1233 return __riscv_vreinterpret_i64m1(src
);
1236 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vreinterpret_v_i8m2_i64m2
1237 // CHECK-RV64-SAME: (<vscale x 16 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1238 // CHECK-RV64-NEXT: entry:
1239 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC]] to <vscale x 2 x i64>
1240 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
1242 vint64m2_t
test_vreinterpret_v_i8m2_i64m2(vint8m2_t src
) {
1243 return __riscv_vreinterpret_i64m2(src
);
1246 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vreinterpret_v_i8m4_i64m4
1247 // CHECK-RV64-SAME: (<vscale x 32 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1248 // CHECK-RV64-NEXT: entry:
1249 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC]] to <vscale x 4 x i64>
1250 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
1252 vint64m4_t
test_vreinterpret_v_i8m4_i64m4(vint8m4_t src
) {
1253 return __riscv_vreinterpret_i64m4(src
);
1256 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vreinterpret_v_i8m8_i64m8
1257 // CHECK-RV64-SAME: (<vscale x 64 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1258 // CHECK-RV64-NEXT: entry:
1259 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC]] to <vscale x 8 x i64>
1260 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
1262 vint64m8_t
test_vreinterpret_v_i8m8_i64m8(vint8m8_t src
) {
1263 return __riscv_vreinterpret_i64m8(src
);
1266 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_u8m1_u64m1
1267 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1268 // CHECK-RV64-NEXT: entry:
1269 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 1 x i64>
1270 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
1272 vuint64m1_t
test_vreinterpret_v_u8m1_u64m1(vuint8m1_t src
) {
1273 return __riscv_vreinterpret_u64m1(src
);
1276 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vreinterpret_v_u8m2_u64m2
1277 // CHECK-RV64-SAME: (<vscale x 16 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1278 // CHECK-RV64-NEXT: entry:
1279 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC]] to <vscale x 2 x i64>
1280 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
1282 vuint64m2_t
test_vreinterpret_v_u8m2_u64m2(vuint8m2_t src
) {
1283 return __riscv_vreinterpret_u64m2(src
);
1286 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vreinterpret_v_u8m4_u64m4
1287 // CHECK-RV64-SAME: (<vscale x 32 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1288 // CHECK-RV64-NEXT: entry:
1289 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC]] to <vscale x 4 x i64>
1290 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
1292 vuint64m4_t
test_vreinterpret_v_u8m4_u64m4(vuint8m4_t src
) {
1293 return __riscv_vreinterpret_u64m4(src
);
1296 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vreinterpret_v_u8m8_u64m8
1297 // CHECK-RV64-SAME: (<vscale x 64 x i8> [[SRC:%.*]]) #[[ATTR0]] {
1298 // CHECK-RV64-NEXT: entry:
1299 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC]] to <vscale x 8 x i64>
1300 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
1302 vuint64m8_t
test_vreinterpret_v_u8m8_u64m8(vuint8m8_t src
) {
1303 return __riscv_vreinterpret_u64m8(src
);
1306 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vreinterpret_v_i16mf4_i8mf4
1307 // CHECK-RV64-SAME: (<vscale x 1 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1308 // CHECK-RV64-NEXT: entry:
1309 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i16> [[SRC]] to <vscale x 2 x i8>
1310 // CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
1312 vint8mf4_t
test_vreinterpret_v_i16mf4_i8mf4(vint16mf4_t src
) {
1313 return __riscv_vreinterpret_i8mf4(src
);
1316 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vreinterpret_v_i16mf2_i8mf2
1317 // CHECK-RV64-SAME: (<vscale x 2 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1318 // CHECK-RV64-NEXT: entry:
1319 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i16> [[SRC]] to <vscale x 4 x i8>
1320 // CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
1322 vint8mf2_t
test_vreinterpret_v_i16mf2_i8mf2(vint16mf2_t src
) {
1323 return __riscv_vreinterpret_i8mf2(src
);
1326 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_i16m1_i8m1
1327 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1328 // CHECK-RV64-NEXT: entry:
1329 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 8 x i8>
1330 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
1332 vint8m1_t
test_vreinterpret_v_i16m1_i8m1(vint16m1_t src
) {
1333 return __riscv_vreinterpret_i8m1(src
);
1336 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vreinterpret_v_i16m2_i8m2
1337 // CHECK-RV64-SAME: (<vscale x 8 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1338 // CHECK-RV64-NEXT: entry:
1339 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC]] to <vscale x 16 x i8>
1340 // CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
1342 vint8m2_t
test_vreinterpret_v_i16m2_i8m2(vint16m2_t src
) {
1343 return __riscv_vreinterpret_i8m2(src
);
1346 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vreinterpret_v_i16m4_i8m4
1347 // CHECK-RV64-SAME: (<vscale x 16 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1348 // CHECK-RV64-NEXT: entry:
1349 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC]] to <vscale x 32 x i8>
1350 // CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
1352 vint8m4_t
test_vreinterpret_v_i16m4_i8m4(vint16m4_t src
) {
1353 return __riscv_vreinterpret_i8m4(src
);
1356 // CHECK-RV64-LABEL: define dso_local <vscale x 64 x i8> @test_vreinterpret_v_i16m8_i8m8
1357 // CHECK-RV64-SAME: (<vscale x 32 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1358 // CHECK-RV64-NEXT: entry:
1359 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC]] to <vscale x 64 x i8>
1360 // CHECK-RV64-NEXT: ret <vscale x 64 x i8> [[TMP0]]
1362 vint8m8_t
test_vreinterpret_v_i16m8_i8m8(vint16m8_t src
) {
1363 return __riscv_vreinterpret_i8m8(src
);
1366 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vreinterpret_v_u16mf4_u8mf4
1367 // CHECK-RV64-SAME: (<vscale x 1 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1368 // CHECK-RV64-NEXT: entry:
1369 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i16> [[SRC]] to <vscale x 2 x i8>
1370 // CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
1372 vuint8mf4_t
test_vreinterpret_v_u16mf4_u8mf4(vuint16mf4_t src
) {
1373 return __riscv_vreinterpret_u8mf4(src
);
1376 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vreinterpret_v_u16mf2_u8mf2
1377 // CHECK-RV64-SAME: (<vscale x 2 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1378 // CHECK-RV64-NEXT: entry:
1379 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i16> [[SRC]] to <vscale x 4 x i8>
1380 // CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
1382 vuint8mf2_t
test_vreinterpret_v_u16mf2_u8mf2(vuint16mf2_t src
) {
1383 return __riscv_vreinterpret_u8mf2(src
);
1386 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_u16m1_u8m1
1387 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1388 // CHECK-RV64-NEXT: entry:
1389 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 8 x i8>
1390 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
1392 vuint8m1_t
test_vreinterpret_v_u16m1_u8m1(vuint16m1_t src
) {
1393 return __riscv_vreinterpret_u8m1(src
);
1396 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vreinterpret_v_u16m2_u8m2
1397 // CHECK-RV64-SAME: (<vscale x 8 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1398 // CHECK-RV64-NEXT: entry:
1399 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC]] to <vscale x 16 x i8>
1400 // CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
1402 vuint8m2_t
test_vreinterpret_v_u16m2_u8m2(vuint16m2_t src
) {
1403 return __riscv_vreinterpret_u8m2(src
);
1406 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vreinterpret_v_u16m4_u8m4
1407 // CHECK-RV64-SAME: (<vscale x 16 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1408 // CHECK-RV64-NEXT: entry:
1409 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC]] to <vscale x 32 x i8>
1410 // CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
1412 vuint8m4_t
test_vreinterpret_v_u16m4_u8m4(vuint16m4_t src
) {
1413 return __riscv_vreinterpret_u8m4(src
);
1416 // CHECK-RV64-LABEL: define dso_local <vscale x 64 x i8> @test_vreinterpret_v_u16m8_u8m8
1417 // CHECK-RV64-SAME: (<vscale x 32 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1418 // CHECK-RV64-NEXT: entry:
1419 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC]] to <vscale x 64 x i8>
1420 // CHECK-RV64-NEXT: ret <vscale x 64 x i8> [[TMP0]]
1422 vuint8m8_t
test_vreinterpret_v_u16m8_u8m8(vuint16m8_t src
) {
1423 return __riscv_vreinterpret_u8m8(src
);
1426 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i32> @test_vreinterpret_v_i16mf2_i32mf2
1427 // CHECK-RV64-SAME: (<vscale x 2 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1428 // CHECK-RV64-NEXT: entry:
1429 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i16> [[SRC]] to <vscale x 1 x i32>
1430 // CHECK-RV64-NEXT: ret <vscale x 1 x i32> [[TMP0]]
1432 vint32mf2_t
test_vreinterpret_v_i16mf2_i32mf2(vint16mf2_t src
) {
1433 return __riscv_vreinterpret_i32mf2(src
);
1436 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_i16m1_i32m1
1437 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1438 // CHECK-RV64-NEXT: entry:
1439 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 2 x i32>
1440 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
1442 vint32m1_t
test_vreinterpret_v_i16m1_i32m1(vint16m1_t src
) {
1443 return __riscv_vreinterpret_i32m1(src
);
1446 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i32> @test_vreinterpret_v_i16m2_i32m2
1447 // CHECK-RV64-SAME: (<vscale x 8 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1448 // CHECK-RV64-NEXT: entry:
1449 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC]] to <vscale x 4 x i32>
1450 // CHECK-RV64-NEXT: ret <vscale x 4 x i32> [[TMP0]]
1452 vint32m2_t
test_vreinterpret_v_i16m2_i32m2(vint16m2_t src
) {
1453 return __riscv_vreinterpret_i32m2(src
);
1456 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i32> @test_vreinterpret_v_i16m4_i32m4
1457 // CHECK-RV64-SAME: (<vscale x 16 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1458 // CHECK-RV64-NEXT: entry:
1459 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC]] to <vscale x 8 x i32>
1460 // CHECK-RV64-NEXT: ret <vscale x 8 x i32> [[TMP0]]
1462 vint32m4_t
test_vreinterpret_v_i16m4_i32m4(vint16m4_t src
) {
1463 return __riscv_vreinterpret_i32m4(src
);
1466 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i32> @test_vreinterpret_v_i16m8_i32m8
1467 // CHECK-RV64-SAME: (<vscale x 32 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1468 // CHECK-RV64-NEXT: entry:
1469 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC]] to <vscale x 16 x i32>
1470 // CHECK-RV64-NEXT: ret <vscale x 16 x i32> [[TMP0]]
1472 vint32m8_t
test_vreinterpret_v_i16m8_i32m8(vint16m8_t src
) {
1473 return __riscv_vreinterpret_i32m8(src
);
1476 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i32> @test_vreinterpret_v_u16mf2_u32mf2
1477 // CHECK-RV64-SAME: (<vscale x 2 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1478 // CHECK-RV64-NEXT: entry:
1479 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i16> [[SRC]] to <vscale x 1 x i32>
1480 // CHECK-RV64-NEXT: ret <vscale x 1 x i32> [[TMP0]]
1482 vuint32mf2_t
test_vreinterpret_v_u16mf2_u32mf2(vuint16mf2_t src
) {
1483 return __riscv_vreinterpret_u32mf2(src
);
1486 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_u16m1_u32m1
1487 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1488 // CHECK-RV64-NEXT: entry:
1489 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 2 x i32>
1490 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
1492 vuint32m1_t
test_vreinterpret_v_u16m1_u32m1(vuint16m1_t src
) {
1493 return __riscv_vreinterpret_u32m1(src
);
1496 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i32> @test_vreinterpret_v_u16m2_u32m2
1497 // CHECK-RV64-SAME: (<vscale x 8 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1498 // CHECK-RV64-NEXT: entry:
1499 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC]] to <vscale x 4 x i32>
1500 // CHECK-RV64-NEXT: ret <vscale x 4 x i32> [[TMP0]]
1502 vuint32m2_t
test_vreinterpret_v_u16m2_u32m2(vuint16m2_t src
) {
1503 return __riscv_vreinterpret_u32m2(src
);
1506 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i32> @test_vreinterpret_v_u16m4_u32m4
1507 // CHECK-RV64-SAME: (<vscale x 16 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1508 // CHECK-RV64-NEXT: entry:
1509 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC]] to <vscale x 8 x i32>
1510 // CHECK-RV64-NEXT: ret <vscale x 8 x i32> [[TMP0]]
1512 vuint32m4_t
test_vreinterpret_v_u16m4_u32m4(vuint16m4_t src
) {
1513 return __riscv_vreinterpret_u32m4(src
);
1516 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i32> @test_vreinterpret_v_u16m8_u32m8
1517 // CHECK-RV64-SAME: (<vscale x 32 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1518 // CHECK-RV64-NEXT: entry:
1519 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC]] to <vscale x 16 x i32>
1520 // CHECK-RV64-NEXT: ret <vscale x 16 x i32> [[TMP0]]
1522 vuint32m8_t
test_vreinterpret_v_u16m8_u32m8(vuint16m8_t src
) {
1523 return __riscv_vreinterpret_u32m8(src
);
1526 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_i16m1_i64m1
1527 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1528 // CHECK-RV64-NEXT: entry:
1529 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 1 x i64>
1530 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
1532 vint64m1_t
test_vreinterpret_v_i16m1_i64m1(vint16m1_t src
) {
1533 return __riscv_vreinterpret_i64m1(src
);
1536 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vreinterpret_v_i16m2_i64m2
1537 // CHECK-RV64-SAME: (<vscale x 8 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1538 // CHECK-RV64-NEXT: entry:
1539 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC]] to <vscale x 2 x i64>
1540 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
1542 vint64m2_t
test_vreinterpret_v_i16m2_i64m2(vint16m2_t src
) {
1543 return __riscv_vreinterpret_i64m2(src
);
1546 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vreinterpret_v_i16m4_i64m4
1547 // CHECK-RV64-SAME: (<vscale x 16 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1548 // CHECK-RV64-NEXT: entry:
1549 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC]] to <vscale x 4 x i64>
1550 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
1552 vint64m4_t
test_vreinterpret_v_i16m4_i64m4(vint16m4_t src
) {
1553 return __riscv_vreinterpret_i64m4(src
);
1556 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vreinterpret_v_i16m8_i64m8
1557 // CHECK-RV64-SAME: (<vscale x 32 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1558 // CHECK-RV64-NEXT: entry:
1559 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC]] to <vscale x 8 x i64>
1560 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
1562 vint64m8_t
test_vreinterpret_v_i16m8_i64m8(vint16m8_t src
) {
1563 return __riscv_vreinterpret_i64m8(src
);
1566 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_u16m1_u64m1
1567 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1568 // CHECK-RV64-NEXT: entry:
1569 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 1 x i64>
1570 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
1572 vuint64m1_t
test_vreinterpret_v_u16m1_u64m1(vuint16m1_t src
) {
1573 return __riscv_vreinterpret_u64m1(src
);
1576 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vreinterpret_v_u16m2_u64m2
1577 // CHECK-RV64-SAME: (<vscale x 8 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1578 // CHECK-RV64-NEXT: entry:
1579 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC]] to <vscale x 2 x i64>
1580 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
1582 vuint64m2_t
test_vreinterpret_v_u16m2_u64m2(vuint16m2_t src
) {
1583 return __riscv_vreinterpret_u64m2(src
);
1586 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vreinterpret_v_u16m4_u64m4
1587 // CHECK-RV64-SAME: (<vscale x 16 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1588 // CHECK-RV64-NEXT: entry:
1589 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC]] to <vscale x 4 x i64>
1590 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
1592 vuint64m4_t
test_vreinterpret_v_u16m4_u64m4(vuint16m4_t src
) {
1593 return __riscv_vreinterpret_u64m4(src
);
1596 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vreinterpret_v_u16m8_u64m8
1597 // CHECK-RV64-SAME: (<vscale x 32 x i16> [[SRC:%.*]]) #[[ATTR0]] {
1598 // CHECK-RV64-NEXT: entry:
1599 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC]] to <vscale x 8 x i64>
1600 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
1602 vuint64m8_t
test_vreinterpret_v_u16m8_u64m8(vuint16m8_t src
) {
1603 return __riscv_vreinterpret_u64m8(src
);
1606 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vreinterpret_v_i32mf2_i8mf2
1607 // CHECK-RV64-SAME: (<vscale x 1 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1608 // CHECK-RV64-NEXT: entry:
1609 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC]] to <vscale x 4 x i8>
1610 // CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
1612 vint8mf2_t
test_vreinterpret_v_i32mf2_i8mf2(vint32mf2_t src
) {
1613 return __riscv_vreinterpret_i8mf2(src
);
1616 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_i32m1_i8m1
1617 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1618 // CHECK-RV64-NEXT: entry:
1619 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 8 x i8>
1620 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
1622 vint8m1_t
test_vreinterpret_v_i32m1_i8m1(vint32m1_t src
) {
1623 return __riscv_vreinterpret_i8m1(src
);
1626 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vreinterpret_v_i32m2_i8m2
1627 // CHECK-RV64-SAME: (<vscale x 4 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1628 // CHECK-RV64-NEXT: entry:
1629 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC]] to <vscale x 16 x i8>
1630 // CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
1632 vint8m2_t
test_vreinterpret_v_i32m2_i8m2(vint32m2_t src
) {
1633 return __riscv_vreinterpret_i8m2(src
);
1636 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vreinterpret_v_i32m4_i8m4
1637 // CHECK-RV64-SAME: (<vscale x 8 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1638 // CHECK-RV64-NEXT: entry:
1639 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC]] to <vscale x 32 x i8>
1640 // CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
1642 vint8m4_t
test_vreinterpret_v_i32m4_i8m4(vint32m4_t src
) {
1643 return __riscv_vreinterpret_i8m4(src
);
1646 // CHECK-RV64-LABEL: define dso_local <vscale x 64 x i8> @test_vreinterpret_v_i32m8_i8m8
1647 // CHECK-RV64-SAME: (<vscale x 16 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1648 // CHECK-RV64-NEXT: entry:
1649 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC]] to <vscale x 64 x i8>
1650 // CHECK-RV64-NEXT: ret <vscale x 64 x i8> [[TMP0]]
1652 vint8m8_t
test_vreinterpret_v_i32m8_i8m8(vint32m8_t src
) {
1653 return __riscv_vreinterpret_i8m8(src
);
1656 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vreinterpret_v_u32mf2_u8mf2
1657 // CHECK-RV64-SAME: (<vscale x 1 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1658 // CHECK-RV64-NEXT: entry:
1659 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC]] to <vscale x 4 x i8>
1660 // CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
1662 vuint8mf2_t
test_vreinterpret_v_u32mf2_u8mf2(vuint32mf2_t src
) {
1663 return __riscv_vreinterpret_u8mf2(src
);
1666 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_u32m1_u8m1
1667 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1668 // CHECK-RV64-NEXT: entry:
1669 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 8 x i8>
1670 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
1672 vuint8m1_t
test_vreinterpret_v_u32m1_u8m1(vuint32m1_t src
) {
1673 return __riscv_vreinterpret_u8m1(src
);
1676 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vreinterpret_v_u32m2_u8m2
1677 // CHECK-RV64-SAME: (<vscale x 4 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1678 // CHECK-RV64-NEXT: entry:
1679 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC]] to <vscale x 16 x i8>
1680 // CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
1682 vuint8m2_t
test_vreinterpret_v_u32m2_u8m2(vuint32m2_t src
) {
1683 return __riscv_vreinterpret_u8m2(src
);
1686 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vreinterpret_v_u32m4_u8m4
1687 // CHECK-RV64-SAME: (<vscale x 8 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1688 // CHECK-RV64-NEXT: entry:
1689 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC]] to <vscale x 32 x i8>
1690 // CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
1692 vuint8m4_t
test_vreinterpret_v_u32m4_u8m4(vuint32m4_t src
) {
1693 return __riscv_vreinterpret_u8m4(src
);
1696 // CHECK-RV64-LABEL: define dso_local <vscale x 64 x i8> @test_vreinterpret_v_u32m8_u8m8
1697 // CHECK-RV64-SAME: (<vscale x 16 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1698 // CHECK-RV64-NEXT: entry:
1699 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC]] to <vscale x 64 x i8>
1700 // CHECK-RV64-NEXT: ret <vscale x 64 x i8> [[TMP0]]
1702 vuint8m8_t
test_vreinterpret_v_u32m8_u8m8(vuint32m8_t src
) {
1703 return __riscv_vreinterpret_u8m8(src
);
1706 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i16> @test_vreinterpret_v_i32mf2_i16mf2
1707 // CHECK-RV64-SAME: (<vscale x 1 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1708 // CHECK-RV64-NEXT: entry:
1709 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC]] to <vscale x 2 x i16>
1710 // CHECK-RV64-NEXT: ret <vscale x 2 x i16> [[TMP0]]
1712 vint16mf2_t
test_vreinterpret_v_i32mf2_i16mf2(vint32mf2_t src
) {
1713 return __riscv_vreinterpret_i16mf2(src
);
1716 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_i32m1_i16m1
1717 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1718 // CHECK-RV64-NEXT: entry:
1719 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 4 x i16>
1720 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
1722 vint16m1_t
test_vreinterpret_v_i32m1_i16m1(vint32m1_t src
) {
1723 return __riscv_vreinterpret_i16m1(src
);
1726 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i16> @test_vreinterpret_v_i32m2_i16m2
1727 // CHECK-RV64-SAME: (<vscale x 4 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1728 // CHECK-RV64-NEXT: entry:
1729 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC]] to <vscale x 8 x i16>
1730 // CHECK-RV64-NEXT: ret <vscale x 8 x i16> [[TMP0]]
1732 vint16m2_t
test_vreinterpret_v_i32m2_i16m2(vint32m2_t src
) {
1733 return __riscv_vreinterpret_i16m2(src
);
1736 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i16> @test_vreinterpret_v_i32m4_i16m4
1737 // CHECK-RV64-SAME: (<vscale x 8 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1738 // CHECK-RV64-NEXT: entry:
1739 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC]] to <vscale x 16 x i16>
1740 // CHECK-RV64-NEXT: ret <vscale x 16 x i16> [[TMP0]]
1742 vint16m4_t
test_vreinterpret_v_i32m4_i16m4(vint32m4_t src
) {
1743 return __riscv_vreinterpret_i16m4(src
);
1746 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i16> @test_vreinterpret_v_i32m8_i16m8
1747 // CHECK-RV64-SAME: (<vscale x 16 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1748 // CHECK-RV64-NEXT: entry:
1749 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC]] to <vscale x 32 x i16>
1750 // CHECK-RV64-NEXT: ret <vscale x 32 x i16> [[TMP0]]
1752 vint16m8_t
test_vreinterpret_v_i32m8_i16m8(vint32m8_t src
) {
1753 return __riscv_vreinterpret_i16m8(src
);
1756 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i16> @test_vreinterpret_v_u32mf2_u16mf2
1757 // CHECK-RV64-SAME: (<vscale x 1 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1758 // CHECK-RV64-NEXT: entry:
1759 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC]] to <vscale x 2 x i16>
1760 // CHECK-RV64-NEXT: ret <vscale x 2 x i16> [[TMP0]]
1762 vuint16mf2_t
test_vreinterpret_v_u32mf2_u16mf2(vuint32mf2_t src
) {
1763 return __riscv_vreinterpret_u16mf2(src
);
1766 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_u32m1_u16m1
1767 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1768 // CHECK-RV64-NEXT: entry:
1769 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 4 x i16>
1770 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
1772 vuint16m1_t
test_vreinterpret_v_u32m1_u16m1(vuint32m1_t src
) {
1773 return __riscv_vreinterpret_u16m1(src
);
1776 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i16> @test_vreinterpret_v_u32m2_u16m2
1777 // CHECK-RV64-SAME: (<vscale x 4 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1778 // CHECK-RV64-NEXT: entry:
1779 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC]] to <vscale x 8 x i16>
1780 // CHECK-RV64-NEXT: ret <vscale x 8 x i16> [[TMP0]]
1782 vuint16m2_t
test_vreinterpret_v_u32m2_u16m2(vuint32m2_t src
) {
1783 return __riscv_vreinterpret_u16m2(src
);
1786 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i16> @test_vreinterpret_v_u32m4_u16m4
1787 // CHECK-RV64-SAME: (<vscale x 8 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1788 // CHECK-RV64-NEXT: entry:
1789 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC]] to <vscale x 16 x i16>
1790 // CHECK-RV64-NEXT: ret <vscale x 16 x i16> [[TMP0]]
1792 vuint16m4_t
test_vreinterpret_v_u32m4_u16m4(vuint32m4_t src
) {
1793 return __riscv_vreinterpret_u16m4(src
);
1796 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i16> @test_vreinterpret_v_u32m8_u16m8
1797 // CHECK-RV64-SAME: (<vscale x 16 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1798 // CHECK-RV64-NEXT: entry:
1799 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC]] to <vscale x 32 x i16>
1800 // CHECK-RV64-NEXT: ret <vscale x 32 x i16> [[TMP0]]
1802 vuint16m8_t
test_vreinterpret_v_u32m8_u16m8(vuint32m8_t src
) {
1803 return __riscv_vreinterpret_u16m8(src
);
1806 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_i32m1_i64m1
1807 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1808 // CHECK-RV64-NEXT: entry:
1809 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 1 x i64>
1810 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
1812 vint64m1_t
test_vreinterpret_v_i32m1_i64m1(vint32m1_t src
) {
1813 return __riscv_vreinterpret_i64m1(src
);
1816 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vreinterpret_v_i32m2_i64m2
1817 // CHECK-RV64-SAME: (<vscale x 4 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1818 // CHECK-RV64-NEXT: entry:
1819 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC]] to <vscale x 2 x i64>
1820 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
1822 vint64m2_t
test_vreinterpret_v_i32m2_i64m2(vint32m2_t src
) {
1823 return __riscv_vreinterpret_i64m2(src
);
1826 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vreinterpret_v_i32m4_i64m4
1827 // CHECK-RV64-SAME: (<vscale x 8 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1828 // CHECK-RV64-NEXT: entry:
1829 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC]] to <vscale x 4 x i64>
1830 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
1832 vint64m4_t
test_vreinterpret_v_i32m4_i64m4(vint32m4_t src
) {
1833 return __riscv_vreinterpret_i64m4(src
);
1836 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vreinterpret_v_i32m8_i64m8
1837 // CHECK-RV64-SAME: (<vscale x 16 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1838 // CHECK-RV64-NEXT: entry:
1839 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC]] to <vscale x 8 x i64>
1840 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
1842 vint64m8_t
test_vreinterpret_v_i32m8_i64m8(vint32m8_t src
) {
1843 return __riscv_vreinterpret_i64m8(src
);
1846 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_u32m1_u64m1
1847 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1848 // CHECK-RV64-NEXT: entry:
1849 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 1 x i64>
1850 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
1852 vuint64m1_t
test_vreinterpret_v_u32m1_u64m1(vuint32m1_t src
) {
1853 return __riscv_vreinterpret_u64m1(src
);
1856 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vreinterpret_v_u32m2_u64m2
1857 // CHECK-RV64-SAME: (<vscale x 4 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1858 // CHECK-RV64-NEXT: entry:
1859 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC]] to <vscale x 2 x i64>
1860 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
1862 vuint64m2_t
test_vreinterpret_v_u32m2_u64m2(vuint32m2_t src
) {
1863 return __riscv_vreinterpret_u64m2(src
);
1866 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vreinterpret_v_u32m4_u64m4
1867 // CHECK-RV64-SAME: (<vscale x 8 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1868 // CHECK-RV64-NEXT: entry:
1869 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC]] to <vscale x 4 x i64>
1870 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
1872 vuint64m4_t
test_vreinterpret_v_u32m4_u64m4(vuint32m4_t src
) {
1873 return __riscv_vreinterpret_u64m4(src
);
1876 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vreinterpret_v_u32m8_u64m8
1877 // CHECK-RV64-SAME: (<vscale x 16 x i32> [[SRC:%.*]]) #[[ATTR0]] {
1878 // CHECK-RV64-NEXT: entry:
1879 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC]] to <vscale x 8 x i64>
1880 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
1882 vuint64m8_t
test_vreinterpret_v_u32m8_u64m8(vuint32m8_t src
) {
1883 return __riscv_vreinterpret_u64m8(src
);
1886 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_i64m1_i8m1
1887 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
1888 // CHECK-RV64-NEXT: entry:
1889 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 8 x i8>
1890 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
1892 vint8m1_t
test_vreinterpret_v_i64m1_i8m1(vint64m1_t src
) {
1893 return __riscv_vreinterpret_i8m1(src
);
1896 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vreinterpret_v_i64m2_i8m2
1897 // CHECK-RV64-SAME: (<vscale x 2 x i64> [[SRC:%.*]]) #[[ATTR0]] {
1898 // CHECK-RV64-NEXT: entry:
1899 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC]] to <vscale x 16 x i8>
1900 // CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
1902 vint8m2_t
test_vreinterpret_v_i64m2_i8m2(vint64m2_t src
) {
1903 return __riscv_vreinterpret_i8m2(src
);
1906 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vreinterpret_v_i64m4_i8m4
1907 // CHECK-RV64-SAME: (<vscale x 4 x i64> [[SRC:%.*]]) #[[ATTR0]] {
1908 // CHECK-RV64-NEXT: entry:
1909 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC]] to <vscale x 32 x i8>
1910 // CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
1912 vint8m4_t
test_vreinterpret_v_i64m4_i8m4(vint64m4_t src
) {
1913 return __riscv_vreinterpret_i8m4(src
);
1916 // CHECK-RV64-LABEL: define dso_local <vscale x 64 x i8> @test_vreinterpret_v_i64m8_i8m8
1917 // CHECK-RV64-SAME: (<vscale x 8 x i64> [[SRC:%.*]]) #[[ATTR0]] {
1918 // CHECK-RV64-NEXT: entry:
1919 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC]] to <vscale x 64 x i8>
1920 // CHECK-RV64-NEXT: ret <vscale x 64 x i8> [[TMP0]]
1922 vint8m8_t
test_vreinterpret_v_i64m8_i8m8(vint64m8_t src
) {
1923 return __riscv_vreinterpret_i8m8(src
);
1926 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_u64m1_u8m1
1927 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
1928 // CHECK-RV64-NEXT: entry:
1929 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 8 x i8>
1930 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
1932 vuint8m1_t
test_vreinterpret_v_u64m1_u8m1(vuint64m1_t src
) {
1933 return __riscv_vreinterpret_u8m1(src
);
1936 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vreinterpret_v_u64m2_u8m2
1937 // CHECK-RV64-SAME: (<vscale x 2 x i64> [[SRC:%.*]]) #[[ATTR0]] {
1938 // CHECK-RV64-NEXT: entry:
1939 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC]] to <vscale x 16 x i8>
1940 // CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
1942 vuint8m2_t
test_vreinterpret_v_u64m2_u8m2(vuint64m2_t src
) {
1943 return __riscv_vreinterpret_u8m2(src
);
1946 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vreinterpret_v_u64m4_u8m4
1947 // CHECK-RV64-SAME: (<vscale x 4 x i64> [[SRC:%.*]]) #[[ATTR0]] {
1948 // CHECK-RV64-NEXT: entry:
1949 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC]] to <vscale x 32 x i8>
1950 // CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
1952 vuint8m4_t
test_vreinterpret_v_u64m4_u8m4(vuint64m4_t src
) {
1953 return __riscv_vreinterpret_u8m4(src
);
1956 // CHECK-RV64-LABEL: define dso_local <vscale x 64 x i8> @test_vreinterpret_v_u64m8_u8m8
1957 // CHECK-RV64-SAME: (<vscale x 8 x i64> [[SRC:%.*]]) #[[ATTR0]] {
1958 // CHECK-RV64-NEXT: entry:
1959 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC]] to <vscale x 64 x i8>
1960 // CHECK-RV64-NEXT: ret <vscale x 64 x i8> [[TMP0]]
1962 vuint8m8_t
test_vreinterpret_v_u64m8_u8m8(vuint64m8_t src
) {
1963 return __riscv_vreinterpret_u8m8(src
);
1966 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_i64m1_i16m1
1967 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
1968 // CHECK-RV64-NEXT: entry:
1969 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 4 x i16>
1970 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
1972 vint16m1_t
test_vreinterpret_v_i64m1_i16m1(vint64m1_t src
) {
1973 return __riscv_vreinterpret_i16m1(src
);
1976 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i16> @test_vreinterpret_v_i64m2_i16m2
1977 // CHECK-RV64-SAME: (<vscale x 2 x i64> [[SRC:%.*]]) #[[ATTR0]] {
1978 // CHECK-RV64-NEXT: entry:
1979 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC]] to <vscale x 8 x i16>
1980 // CHECK-RV64-NEXT: ret <vscale x 8 x i16> [[TMP0]]
1982 vint16m2_t
test_vreinterpret_v_i64m2_i16m2(vint64m2_t src
) {
1983 return __riscv_vreinterpret_i16m2(src
);
1986 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i16> @test_vreinterpret_v_i64m4_i16m4
1987 // CHECK-RV64-SAME: (<vscale x 4 x i64> [[SRC:%.*]]) #[[ATTR0]] {
1988 // CHECK-RV64-NEXT: entry:
1989 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC]] to <vscale x 16 x i16>
1990 // CHECK-RV64-NEXT: ret <vscale x 16 x i16> [[TMP0]]
1992 vint16m4_t
test_vreinterpret_v_i64m4_i16m4(vint64m4_t src
) {
1993 return __riscv_vreinterpret_i16m4(src
);
1996 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i16> @test_vreinterpret_v_i64m8_i16m8
1997 // CHECK-RV64-SAME: (<vscale x 8 x i64> [[SRC:%.*]]) #[[ATTR0]] {
1998 // CHECK-RV64-NEXT: entry:
1999 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC]] to <vscale x 32 x i16>
2000 // CHECK-RV64-NEXT: ret <vscale x 32 x i16> [[TMP0]]
2002 vint16m8_t
test_vreinterpret_v_i64m8_i16m8(vint64m8_t src
) {
2003 return __riscv_vreinterpret_i16m8(src
);
2006 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_u64m1_u16m1
2007 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
2008 // CHECK-RV64-NEXT: entry:
2009 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 4 x i16>
2010 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
2012 vuint16m1_t
test_vreinterpret_v_u64m1_u16m1(vuint64m1_t src
) {
2013 return __riscv_vreinterpret_u16m1(src
);
2016 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i16> @test_vreinterpret_v_u64m2_u16m2
2017 // CHECK-RV64-SAME: (<vscale x 2 x i64> [[SRC:%.*]]) #[[ATTR0]] {
2018 // CHECK-RV64-NEXT: entry:
2019 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC]] to <vscale x 8 x i16>
2020 // CHECK-RV64-NEXT: ret <vscale x 8 x i16> [[TMP0]]
2022 vuint16m2_t
test_vreinterpret_v_u64m2_u16m2(vuint64m2_t src
) {
2023 return __riscv_vreinterpret_u16m2(src
);
2026 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i16> @test_vreinterpret_v_u64m4_u16m4
2027 // CHECK-RV64-SAME: (<vscale x 4 x i64> [[SRC:%.*]]) #[[ATTR0]] {
2028 // CHECK-RV64-NEXT: entry:
2029 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC]] to <vscale x 16 x i16>
2030 // CHECK-RV64-NEXT: ret <vscale x 16 x i16> [[TMP0]]
2032 vuint16m4_t
test_vreinterpret_v_u64m4_u16m4(vuint64m4_t src
) {
2033 return __riscv_vreinterpret_u16m4(src
);
2036 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i16> @test_vreinterpret_v_u64m8_u16m8
2037 // CHECK-RV64-SAME: (<vscale x 8 x i64> [[SRC:%.*]]) #[[ATTR0]] {
2038 // CHECK-RV64-NEXT: entry:
2039 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC]] to <vscale x 32 x i16>
2040 // CHECK-RV64-NEXT: ret <vscale x 32 x i16> [[TMP0]]
2042 vuint16m8_t
test_vreinterpret_v_u64m8_u16m8(vuint64m8_t src
) {
2043 return __riscv_vreinterpret_u16m8(src
);
2046 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_i64m1_i32m1
2047 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
2048 // CHECK-RV64-NEXT: entry:
2049 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 2 x i32>
2050 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
2052 vint32m1_t
test_vreinterpret_v_i64m1_i32m1(vint64m1_t src
) {
2053 return __riscv_vreinterpret_i32m1(src
);
2056 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i32> @test_vreinterpret_v_i64m2_i32m2
2057 // CHECK-RV64-SAME: (<vscale x 2 x i64> [[SRC:%.*]]) #[[ATTR0]] {
2058 // CHECK-RV64-NEXT: entry:
2059 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC]] to <vscale x 4 x i32>
2060 // CHECK-RV64-NEXT: ret <vscale x 4 x i32> [[TMP0]]
2062 vint32m2_t
test_vreinterpret_v_i64m2_i32m2(vint64m2_t src
) {
2063 return __riscv_vreinterpret_i32m2(src
);
2066 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i32> @test_vreinterpret_v_i64m4_i32m4
2067 // CHECK-RV64-SAME: (<vscale x 4 x i64> [[SRC:%.*]]) #[[ATTR0]] {
2068 // CHECK-RV64-NEXT: entry:
2069 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC]] to <vscale x 8 x i32>
2070 // CHECK-RV64-NEXT: ret <vscale x 8 x i32> [[TMP0]]
2072 vint32m4_t
test_vreinterpret_v_i64m4_i32m4(vint64m4_t src
) {
2073 return __riscv_vreinterpret_i32m4(src
);
2076 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i32> @test_vreinterpret_v_i64m8_i32m8
2077 // CHECK-RV64-SAME: (<vscale x 8 x i64> [[SRC:%.*]]) #[[ATTR0]] {
2078 // CHECK-RV64-NEXT: entry:
2079 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC]] to <vscale x 16 x i32>
2080 // CHECK-RV64-NEXT: ret <vscale x 16 x i32> [[TMP0]]
2082 vint32m8_t
test_vreinterpret_v_i64m8_i32m8(vint64m8_t src
) {
2083 return __riscv_vreinterpret_i32m8(src
);
2086 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_u64m1_u32m1
2087 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
2088 // CHECK-RV64-NEXT: entry:
2089 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 2 x i32>
2090 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
2092 vuint32m1_t
test_vreinterpret_v_u64m1_u32m1(vuint64m1_t src
) {
2093 return __riscv_vreinterpret_u32m1(src
);
2096 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i32> @test_vreinterpret_v_u64m2_u32m2
2097 // CHECK-RV64-SAME: (<vscale x 2 x i64> [[SRC:%.*]]) #[[ATTR0]] {
2098 // CHECK-RV64-NEXT: entry:
2099 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC]] to <vscale x 4 x i32>
2100 // CHECK-RV64-NEXT: ret <vscale x 4 x i32> [[TMP0]]
2102 vuint32m2_t
test_vreinterpret_v_u64m2_u32m2(vuint64m2_t src
) {
2103 return __riscv_vreinterpret_u32m2(src
);
2106 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i32> @test_vreinterpret_v_u64m4_u32m4
2107 // CHECK-RV64-SAME: (<vscale x 4 x i64> [[SRC:%.*]]) #[[ATTR0]] {
2108 // CHECK-RV64-NEXT: entry:
2109 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC]] to <vscale x 8 x i32>
2110 // CHECK-RV64-NEXT: ret <vscale x 8 x i32> [[TMP0]]
2112 vuint32m4_t
test_vreinterpret_v_u64m4_u32m4(vuint64m4_t src
) {
2113 return __riscv_vreinterpret_u32m4(src
);
2116 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i32> @test_vreinterpret_v_u64m8_u32m8
2117 // CHECK-RV64-SAME: (<vscale x 8 x i64> [[SRC:%.*]]) #[[ATTR0]] {
2118 // CHECK-RV64-NEXT: entry:
2119 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC]] to <vscale x 16 x i32>
2120 // CHECK-RV64-NEXT: ret <vscale x 16 x i32> [[TMP0]]
2122 vuint32m8_t
test_vreinterpret_v_u64m8_u32m8(vuint64m8_t src
) {
2123 return __riscv_vreinterpret_u32m8(src
);
2126 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_i8m1_b64
2127 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
2128 // CHECK-RV64-NEXT: entry:
2129 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
2130 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2131 // CHECK-RV64-NEXT: ret <vscale x 1 x i1> [[TMP1]]
2133 vbool64_t
test_vreinterpret_v_i8m1_b64(vint8m1_t src
) {
2134 return __riscv_vreinterpret_b64(src
);
2137 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b64_i8m1
2138 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2139 // CHECK-RV64-NEXT: entry:
2140 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
2141 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
2142 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP1]]
2144 vint8m1_t
test_vreinterpret_v_b64_i8m1(vbool64_t src
) {
2145 return __riscv_vreinterpret_i8m1(src
);
2148 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_i8m1_b32
2149 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
2150 // CHECK-RV64-NEXT: entry:
2151 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
2152 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2153 // CHECK-RV64-NEXT: ret <vscale x 2 x i1> [[TMP1]]
2155 vbool32_t
test_vreinterpret_v_i8m1_b32(vint8m1_t src
) {
2156 return __riscv_vreinterpret_b32(src
);
2159 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b32_i8m1
2160 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2161 // CHECK-RV64-NEXT: entry:
2162 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
2163 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
2164 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP1]]
2166 vint8m1_t
test_vreinterpret_v_b32_i8m1(vbool32_t src
) {
2167 return __riscv_vreinterpret_i8m1(src
);
2170 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_i8m1_b16
2171 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
2172 // CHECK-RV64-NEXT: entry:
2173 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
2174 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2175 // CHECK-RV64-NEXT: ret <vscale x 4 x i1> [[TMP1]]
2177 vbool16_t
test_vreinterpret_v_i8m1_b16(vint8m1_t src
) {
2178 return __riscv_vreinterpret_b16(src
);
2181 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b16_i8m1
2182 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2183 // CHECK-RV64-NEXT: entry:
2184 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
2185 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
2186 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP1]]
2188 vint8m1_t
test_vreinterpret_v_b16_i8m1(vbool16_t src
) {
2189 return __riscv_vreinterpret_i8m1(src
);
2192 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_i8m1_b8
2193 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
2194 // CHECK-RV64-NEXT: entry:
2195 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
2196 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2197 // CHECK-RV64-NEXT: ret <vscale x 8 x i1> [[TMP1]]
2199 vbool8_t
test_vreinterpret_v_i8m1_b8(vint8m1_t src
) {
2200 return __riscv_vreinterpret_b8(src
);
2203 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b8_i8m1
2204 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2205 // CHECK-RV64-NEXT: entry:
2206 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
2207 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
2208 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP1]]
2210 vint8m1_t
test_vreinterpret_v_b8_i8m1(vbool8_t src
) {
2211 return __riscv_vreinterpret_i8m1(src
);
2214 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i1> @test_vreinterpret_v_i8m1_b4
2215 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
2216 // CHECK-RV64-NEXT: entry:
2217 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
2218 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 16 x i1> @llvm.vector.extract.nxv16i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2219 // CHECK-RV64-NEXT: ret <vscale x 16 x i1> [[TMP1]]
2221 vbool4_t
test_vreinterpret_v_i8m1_b4(vint8m1_t src
) {
2222 return __riscv_vreinterpret_b4(src
);
2225 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b4_i8m1
2226 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2227 // CHECK-RV64-NEXT: entry:
2228 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv16i1(<vscale x 64 x i1> poison, <vscale x 16 x i1> [[SRC]], i64 0)
2229 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
2230 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP1]]
2232 vint8m1_t
test_vreinterpret_v_b4_i8m1(vbool4_t src
) {
2233 return __riscv_vreinterpret_i8m1(src
);
2236 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i1> @test_vreinterpret_v_i8m1_b2
2237 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
2238 // CHECK-RV64-NEXT: entry:
2239 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
2240 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 32 x i1> @llvm.vector.extract.nxv32i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2241 // CHECK-RV64-NEXT: ret <vscale x 32 x i1> [[TMP1]]
2243 vbool2_t
test_vreinterpret_v_i8m1_b2(vint8m1_t src
) {
2244 return __riscv_vreinterpret_b2(src
);
2247 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b2_i8m1
2248 // CHECK-RV64-SAME: (<vscale x 32 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2249 // CHECK-RV64-NEXT: entry:
2250 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv32i1(<vscale x 64 x i1> poison, <vscale x 32 x i1> [[SRC]], i64 0)
2251 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
2252 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP1]]
2254 vint8m1_t
test_vreinterpret_v_b2_i8m1(vbool2_t src
) {
2255 return __riscv_vreinterpret_i8m1(src
);
2258 // CHECK-RV64-LABEL: define dso_local <vscale x 64 x i1> @test_vreinterpret_v_i8m1_b1
2259 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
2260 // CHECK-RV64-NEXT: entry:
2261 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
2262 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 64 x i1> @llvm.vector.extract.nxv64i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2263 // CHECK-RV64-NEXT: ret <vscale x 64 x i1> [[TMP1]]
2265 vbool1_t
test_vreinterpret_v_i8m1_b1(vint8m1_t src
) {
2266 return __riscv_vreinterpret_b1(src
);
2269 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b1_i8m1
2270 // CHECK-RV64-SAME: (<vscale x 64 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2271 // CHECK-RV64-NEXT: entry:
2272 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv64i1(<vscale x 64 x i1> poison, <vscale x 64 x i1> [[SRC]], i64 0)
2273 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
2274 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP1]]
2276 vint8m1_t
test_vreinterpret_v_b1_i8m1(vbool1_t src
) {
2277 return __riscv_vreinterpret_i8m1(src
);
2280 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_u8m1_b64
2281 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
2282 // CHECK-RV64-NEXT: entry:
2283 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
2284 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2285 // CHECK-RV64-NEXT: ret <vscale x 1 x i1> [[TMP1]]
2287 vbool64_t
test_vreinterpret_v_u8m1_b64(vuint8m1_t src
) {
2288 return __riscv_vreinterpret_b64(src
);
2291 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b64_u8m1
2292 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2293 // CHECK-RV64-NEXT: entry:
2294 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
2295 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
2296 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP1]]
2298 vuint8m1_t
test_vreinterpret_v_b64_u8m1(vbool64_t src
) {
2299 return __riscv_vreinterpret_u8m1(src
);
2302 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_u8m1_b32
2303 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
2304 // CHECK-RV64-NEXT: entry:
2305 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
2306 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2307 // CHECK-RV64-NEXT: ret <vscale x 2 x i1> [[TMP1]]
2309 vbool32_t
test_vreinterpret_v_u8m1_b32(vuint8m1_t src
) {
2310 return __riscv_vreinterpret_b32(src
);
2313 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b32_u8m1
2314 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2315 // CHECK-RV64-NEXT: entry:
2316 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
2317 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
2318 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP1]]
2320 vuint8m1_t
test_vreinterpret_v_b32_u8m1(vbool32_t src
) {
2321 return __riscv_vreinterpret_u8m1(src
);
2324 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_u8m1_b16
2325 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
2326 // CHECK-RV64-NEXT: entry:
2327 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
2328 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2329 // CHECK-RV64-NEXT: ret <vscale x 4 x i1> [[TMP1]]
2331 vbool16_t
test_vreinterpret_v_u8m1_b16(vuint8m1_t src
) {
2332 return __riscv_vreinterpret_b16(src
);
2335 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b16_u8m1
2336 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2337 // CHECK-RV64-NEXT: entry:
2338 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
2339 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
2340 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP1]]
2342 vuint8m1_t
test_vreinterpret_v_b16_u8m1(vbool16_t src
) {
2343 return __riscv_vreinterpret_u8m1(src
);
2346 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_u8m1_b8
2347 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
2348 // CHECK-RV64-NEXT: entry:
2349 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
2350 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2351 // CHECK-RV64-NEXT: ret <vscale x 8 x i1> [[TMP1]]
2353 vbool8_t
test_vreinterpret_v_u8m1_b8(vuint8m1_t src
) {
2354 return __riscv_vreinterpret_b8(src
);
2357 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b8_u8m1
2358 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2359 // CHECK-RV64-NEXT: entry:
2360 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
2361 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
2362 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP1]]
2364 vuint8m1_t
test_vreinterpret_v_b8_u8m1(vbool8_t src
) {
2365 return __riscv_vreinterpret_u8m1(src
);
2368 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i1> @test_vreinterpret_v_u8m1_b4
2369 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
2370 // CHECK-RV64-NEXT: entry:
2371 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
2372 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 16 x i1> @llvm.vector.extract.nxv16i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2373 // CHECK-RV64-NEXT: ret <vscale x 16 x i1> [[TMP1]]
2375 vbool4_t
test_vreinterpret_v_u8m1_b4(vuint8m1_t src
) {
2376 return __riscv_vreinterpret_b4(src
);
2379 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b4_u8m1
2380 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2381 // CHECK-RV64-NEXT: entry:
2382 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv16i1(<vscale x 64 x i1> poison, <vscale x 16 x i1> [[SRC]], i64 0)
2383 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
2384 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP1]]
2386 vuint8m1_t
test_vreinterpret_v_b4_u8m1(vbool4_t src
) {
2387 return __riscv_vreinterpret_u8m1(src
);
2390 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i1> @test_vreinterpret_v_u8m1_b2
2391 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
2392 // CHECK-RV64-NEXT: entry:
2393 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
2394 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 32 x i1> @llvm.vector.extract.nxv32i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2395 // CHECK-RV64-NEXT: ret <vscale x 32 x i1> [[TMP1]]
2397 vbool2_t
test_vreinterpret_v_u8m1_b2(vuint8m1_t src
) {
2398 return __riscv_vreinterpret_b2(src
);
2401 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b2_u8m1
2402 // CHECK-RV64-SAME: (<vscale x 32 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2403 // CHECK-RV64-NEXT: entry:
2404 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv32i1(<vscale x 64 x i1> poison, <vscale x 32 x i1> [[SRC]], i64 0)
2405 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
2406 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP1]]
2408 vuint8m1_t
test_vreinterpret_v_b2_u8m1(vbool2_t src
) {
2409 return __riscv_vreinterpret_u8m1(src
);
2412 // CHECK-RV64-LABEL: define dso_local <vscale x 64 x i1> @test_vreinterpret_v_u8m1_b1
2413 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
2414 // CHECK-RV64-NEXT: entry:
2415 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
2416 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 64 x i1> @llvm.vector.extract.nxv64i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2417 // CHECK-RV64-NEXT: ret <vscale x 64 x i1> [[TMP1]]
2419 vbool1_t
test_vreinterpret_v_u8m1_b1(vuint8m1_t src
) {
2420 return __riscv_vreinterpret_b1(src
);
2423 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b1_u8m1
2424 // CHECK-RV64-SAME: (<vscale x 64 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2425 // CHECK-RV64-NEXT: entry:
2426 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv64i1(<vscale x 64 x i1> poison, <vscale x 64 x i1> [[SRC]], i64 0)
2427 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
2428 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP1]]
2430 vuint8m1_t
test_vreinterpret_v_b1_u8m1(vbool1_t src
) {
2431 return __riscv_vreinterpret_u8m1(src
);
2434 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_i16m1_b64
2435 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
2436 // CHECK-RV64-NEXT: entry:
2437 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
2438 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2439 // CHECK-RV64-NEXT: ret <vscale x 1 x i1> [[TMP1]]
2441 vbool64_t
test_vreinterpret_v_i16m1_b64(vint16m1_t src
) {
2442 return __riscv_vreinterpret_b64(src
);
2445 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b64_i16m1
2446 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2447 // CHECK-RV64-NEXT: entry:
2448 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
2449 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
2450 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP1]]
2452 vint16m1_t
test_vreinterpret_v_b64_i16m1(vbool64_t src
) {
2453 return __riscv_vreinterpret_i16m1(src
);
2456 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_i16m1_b32
2457 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
2458 // CHECK-RV64-NEXT: entry:
2459 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
2460 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2461 // CHECK-RV64-NEXT: ret <vscale x 2 x i1> [[TMP1]]
2463 vbool32_t
test_vreinterpret_v_i16m1_b32(vint16m1_t src
) {
2464 return __riscv_vreinterpret_b32(src
);
2467 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b32_i16m1
2468 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2469 // CHECK-RV64-NEXT: entry:
2470 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
2471 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
2472 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP1]]
2474 vint16m1_t
test_vreinterpret_v_b32_i16m1(vbool32_t src
) {
2475 return __riscv_vreinterpret_i16m1(src
);
2478 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_i16m1_b16
2479 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
2480 // CHECK-RV64-NEXT: entry:
2481 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
2482 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2483 // CHECK-RV64-NEXT: ret <vscale x 4 x i1> [[TMP1]]
2485 vbool16_t
test_vreinterpret_v_i16m1_b16(vint16m1_t src
) {
2486 return __riscv_vreinterpret_b16(src
);
2489 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b16_i16m1
2490 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2491 // CHECK-RV64-NEXT: entry:
2492 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
2493 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
2494 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP1]]
2496 vint16m1_t
test_vreinterpret_v_b16_i16m1(vbool16_t src
) {
2497 return __riscv_vreinterpret_i16m1(src
);
2500 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_i16m1_b8
2501 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
2502 // CHECK-RV64-NEXT: entry:
2503 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
2504 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2505 // CHECK-RV64-NEXT: ret <vscale x 8 x i1> [[TMP1]]
2507 vbool8_t
test_vreinterpret_v_i16m1_b8(vint16m1_t src
) {
2508 return __riscv_vreinterpret_b8(src
);
2511 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b8_i16m1
2512 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2513 // CHECK-RV64-NEXT: entry:
2514 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
2515 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
2516 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP1]]
2518 vint16m1_t
test_vreinterpret_v_b8_i16m1(vbool8_t src
) {
2519 return __riscv_vreinterpret_i16m1(src
);
2522 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i1> @test_vreinterpret_v_i16m1_b4
2523 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
2524 // CHECK-RV64-NEXT: entry:
2525 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
2526 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 16 x i1> @llvm.vector.extract.nxv16i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2527 // CHECK-RV64-NEXT: ret <vscale x 16 x i1> [[TMP1]]
2529 vbool4_t
test_vreinterpret_v_i16m1_b4(vint16m1_t src
) {
2530 return __riscv_vreinterpret_b4(src
);
2533 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b4_i16m1
2534 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2535 // CHECK-RV64-NEXT: entry:
2536 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv16i1(<vscale x 64 x i1> poison, <vscale x 16 x i1> [[SRC]], i64 0)
2537 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
2538 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP1]]
2540 vint16m1_t
test_vreinterpret_v_b4_i16m1(vbool4_t src
) {
2541 return __riscv_vreinterpret_i16m1(src
);
2544 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i1> @test_vreinterpret_v_i16m1_b2
2545 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
2546 // CHECK-RV64-NEXT: entry:
2547 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
2548 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 32 x i1> @llvm.vector.extract.nxv32i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2549 // CHECK-RV64-NEXT: ret <vscale x 32 x i1> [[TMP1]]
2551 vbool2_t
test_vreinterpret_v_i16m1_b2(vint16m1_t src
) {
2552 return __riscv_vreinterpret_b2(src
);
2555 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b2_i16m1
2556 // CHECK-RV64-SAME: (<vscale x 32 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2557 // CHECK-RV64-NEXT: entry:
2558 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv32i1(<vscale x 64 x i1> poison, <vscale x 32 x i1> [[SRC]], i64 0)
2559 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
2560 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP1]]
2562 vint16m1_t
test_vreinterpret_v_b2_i16m1(vbool2_t src
) {
2563 return __riscv_vreinterpret_i16m1(src
);
2566 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_u16m1_b64
2567 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
2568 // CHECK-RV64-NEXT: entry:
2569 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
2570 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2571 // CHECK-RV64-NEXT: ret <vscale x 1 x i1> [[TMP1]]
2573 vbool64_t
test_vreinterpret_v_u16m1_b64(vuint16m1_t src
) {
2574 return __riscv_vreinterpret_b64(src
);
2577 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b64_u16m1
2578 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2579 // CHECK-RV64-NEXT: entry:
2580 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
2581 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
2582 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP1]]
2584 vuint16m1_t
test_vreinterpret_v_b64_u16m1(vbool64_t src
) {
2585 return __riscv_vreinterpret_u16m1(src
);
2588 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_u16m1_b32
2589 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
2590 // CHECK-RV64-NEXT: entry:
2591 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
2592 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2593 // CHECK-RV64-NEXT: ret <vscale x 2 x i1> [[TMP1]]
2595 vbool32_t
test_vreinterpret_v_u16m1_b32(vuint16m1_t src
) {
2596 return __riscv_vreinterpret_b32(src
);
2599 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b32_u16m1
2600 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2601 // CHECK-RV64-NEXT: entry:
2602 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
2603 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
2604 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP1]]
2606 vuint16m1_t
test_vreinterpret_v_b32_u16m1(vbool32_t src
) {
2607 return __riscv_vreinterpret_u16m1(src
);
2610 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_u16m1_b16
2611 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
2612 // CHECK-RV64-NEXT: entry:
2613 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
2614 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2615 // CHECK-RV64-NEXT: ret <vscale x 4 x i1> [[TMP1]]
2617 vbool16_t
test_vreinterpret_v_u16m1_b16(vuint16m1_t src
) {
2618 return __riscv_vreinterpret_b16(src
);
2621 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b16_u16m1
2622 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2623 // CHECK-RV64-NEXT: entry:
2624 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
2625 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
2626 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP1]]
2628 vuint16m1_t
test_vreinterpret_v_b16_u16m1(vbool16_t src
) {
2629 return __riscv_vreinterpret_u16m1(src
);
2632 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_u16m1_b8
2633 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
2634 // CHECK-RV64-NEXT: entry:
2635 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
2636 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2637 // CHECK-RV64-NEXT: ret <vscale x 8 x i1> [[TMP1]]
2639 vbool8_t
test_vreinterpret_v_u16m1_b8(vuint16m1_t src
) {
2640 return __riscv_vreinterpret_b8(src
);
2643 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b8_u16m1
2644 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2645 // CHECK-RV64-NEXT: entry:
2646 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
2647 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
2648 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP1]]
2650 vuint16m1_t
test_vreinterpret_v_b8_u16m1(vbool8_t src
) {
2651 return __riscv_vreinterpret_u16m1(src
);
2654 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i1> @test_vreinterpret_v_u16m1_b4
2655 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
2656 // CHECK-RV64-NEXT: entry:
2657 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
2658 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 16 x i1> @llvm.vector.extract.nxv16i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2659 // CHECK-RV64-NEXT: ret <vscale x 16 x i1> [[TMP1]]
2661 vbool4_t
test_vreinterpret_v_u16m1_b4(vuint16m1_t src
) {
2662 return __riscv_vreinterpret_b4(src
);
2665 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b4_u16m1
2666 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2667 // CHECK-RV64-NEXT: entry:
2668 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv16i1(<vscale x 64 x i1> poison, <vscale x 16 x i1> [[SRC]], i64 0)
2669 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
2670 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP1]]
2672 vuint16m1_t
test_vreinterpret_v_b4_u16m1(vbool4_t src
) {
2673 return __riscv_vreinterpret_u16m1(src
);
2676 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i1> @test_vreinterpret_v_u16m1_b2
2677 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
2678 // CHECK-RV64-NEXT: entry:
2679 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
2680 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 32 x i1> @llvm.vector.extract.nxv32i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2681 // CHECK-RV64-NEXT: ret <vscale x 32 x i1> [[TMP1]]
2683 vbool2_t
test_vreinterpret_v_u16m1_b2(vuint16m1_t src
) {
2684 return __riscv_vreinterpret_b2(src
);
2687 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b2_u16m1
2688 // CHECK-RV64-SAME: (<vscale x 32 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2689 // CHECK-RV64-NEXT: entry:
2690 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv32i1(<vscale x 64 x i1> poison, <vscale x 32 x i1> [[SRC]], i64 0)
2691 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
2692 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP1]]
2694 vuint16m1_t
test_vreinterpret_v_b2_u16m1(vbool2_t src
) {
2695 return __riscv_vreinterpret_u16m1(src
);
2698 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_i32m1_b64
2699 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
2700 // CHECK-RV64-NEXT: entry:
2701 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
2702 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2703 // CHECK-RV64-NEXT: ret <vscale x 1 x i1> [[TMP1]]
2705 vbool64_t
test_vreinterpret_v_i32m1_b64(vint32m1_t src
) {
2706 return __riscv_vreinterpret_b64(src
);
2709 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b64_i32m1
2710 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2711 // CHECK-RV64-NEXT: entry:
2712 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
2713 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
2714 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP1]]
2716 vint32m1_t
test_vreinterpret_v_b64_i32m1(vbool64_t src
) {
2717 return __riscv_vreinterpret_i32m1(src
);
2720 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_i32m1_b32
2721 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
2722 // CHECK-RV64-NEXT: entry:
2723 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
2724 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2725 // CHECK-RV64-NEXT: ret <vscale x 2 x i1> [[TMP1]]
2727 vbool32_t
test_vreinterpret_v_i32m1_b32(vint32m1_t src
) {
2728 return __riscv_vreinterpret_b32(src
);
2731 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b32_i32m1
2732 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2733 // CHECK-RV64-NEXT: entry:
2734 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
2735 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
2736 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP1]]
2738 vint32m1_t
test_vreinterpret_v_b32_i32m1(vbool32_t src
) {
2739 return __riscv_vreinterpret_i32m1(src
);
2742 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_i32m1_b16
2743 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
2744 // CHECK-RV64-NEXT: entry:
2745 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
2746 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2747 // CHECK-RV64-NEXT: ret <vscale x 4 x i1> [[TMP1]]
2749 vbool16_t
test_vreinterpret_v_i32m1_b16(vint32m1_t src
) {
2750 return __riscv_vreinterpret_b16(src
);
2753 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b16_i32m1
2754 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2755 // CHECK-RV64-NEXT: entry:
2756 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
2757 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
2758 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP1]]
2760 vint32m1_t
test_vreinterpret_v_b16_i32m1(vbool16_t src
) {
2761 return __riscv_vreinterpret_i32m1(src
);
2764 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_i32m1_b8
2765 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
2766 // CHECK-RV64-NEXT: entry:
2767 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
2768 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2769 // CHECK-RV64-NEXT: ret <vscale x 8 x i1> [[TMP1]]
2771 vbool8_t
test_vreinterpret_v_i32m1_b8(vint32m1_t src
) {
2772 return __riscv_vreinterpret_b8(src
);
2775 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b8_i32m1
2776 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2777 // CHECK-RV64-NEXT: entry:
2778 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
2779 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
2780 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP1]]
2782 vint32m1_t
test_vreinterpret_v_b8_i32m1(vbool8_t src
) {
2783 return __riscv_vreinterpret_i32m1(src
);
2786 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i1> @test_vreinterpret_v_i32m1_b4
2787 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
2788 // CHECK-RV64-NEXT: entry:
2789 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
2790 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 16 x i1> @llvm.vector.extract.nxv16i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2791 // CHECK-RV64-NEXT: ret <vscale x 16 x i1> [[TMP1]]
2793 vbool4_t
test_vreinterpret_v_i32m1_b4(vint32m1_t src
) {
2794 return __riscv_vreinterpret_b4(src
);
2797 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b4_i32m1
2798 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2799 // CHECK-RV64-NEXT: entry:
2800 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv16i1(<vscale x 64 x i1> poison, <vscale x 16 x i1> [[SRC]], i64 0)
2801 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
2802 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP1]]
2804 vint32m1_t
test_vreinterpret_v_b4_i32m1(vbool4_t src
) {
2805 return __riscv_vreinterpret_i32m1(src
);
2808 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_u32m1_b64
2809 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
2810 // CHECK-RV64-NEXT: entry:
2811 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
2812 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2813 // CHECK-RV64-NEXT: ret <vscale x 1 x i1> [[TMP1]]
2815 vbool64_t
test_vreinterpret_v_u32m1_b64(vuint32m1_t src
) {
2816 return __riscv_vreinterpret_b64(src
);
2819 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b64_u32m1
2820 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2821 // CHECK-RV64-NEXT: entry:
2822 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
2823 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
2824 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP1]]
2826 vuint32m1_t
test_vreinterpret_v_b64_u32m1(vbool64_t src
) {
2827 return __riscv_vreinterpret_u32m1(src
);
2830 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_u32m1_b32
2831 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
2832 // CHECK-RV64-NEXT: entry:
2833 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
2834 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2835 // CHECK-RV64-NEXT: ret <vscale x 2 x i1> [[TMP1]]
2837 vbool32_t
test_vreinterpret_v_u32m1_b32(vuint32m1_t src
) {
2838 return __riscv_vreinterpret_b32(src
);
2841 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b32_u32m1
2842 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2843 // CHECK-RV64-NEXT: entry:
2844 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
2845 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
2846 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP1]]
2848 vuint32m1_t
test_vreinterpret_v_b32_u32m1(vbool32_t src
) {
2849 return __riscv_vreinterpret_u32m1(src
);
2852 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_u32m1_b16
2853 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
2854 // CHECK-RV64-NEXT: entry:
2855 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
2856 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2857 // CHECK-RV64-NEXT: ret <vscale x 4 x i1> [[TMP1]]
2859 vbool16_t
test_vreinterpret_v_u32m1_b16(vuint32m1_t src
) {
2860 return __riscv_vreinterpret_b16(src
);
2863 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b16_u32m1
2864 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2865 // CHECK-RV64-NEXT: entry:
2866 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
2867 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
2868 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP1]]
2870 vuint32m1_t
test_vreinterpret_v_b16_u32m1(vbool16_t src
) {
2871 return __riscv_vreinterpret_u32m1(src
);
2874 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_u32m1_b8
2875 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
2876 // CHECK-RV64-NEXT: entry:
2877 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
2878 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2879 // CHECK-RV64-NEXT: ret <vscale x 8 x i1> [[TMP1]]
2881 vbool8_t
test_vreinterpret_v_u32m1_b8(vuint32m1_t src
) {
2882 return __riscv_vreinterpret_b8(src
);
2885 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b8_u32m1
2886 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2887 // CHECK-RV64-NEXT: entry:
2888 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
2889 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
2890 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP1]]
2892 vuint32m1_t
test_vreinterpret_v_b8_u32m1(vbool8_t src
) {
2893 return __riscv_vreinterpret_u32m1(src
);
2896 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i1> @test_vreinterpret_v_u32m1_b4
2897 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
2898 // CHECK-RV64-NEXT: entry:
2899 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
2900 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 16 x i1> @llvm.vector.extract.nxv16i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2901 // CHECK-RV64-NEXT: ret <vscale x 16 x i1> [[TMP1]]
2903 vbool4_t
test_vreinterpret_v_u32m1_b4(vuint32m1_t src
) {
2904 return __riscv_vreinterpret_b4(src
);
2907 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b4_u32m1
2908 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2909 // CHECK-RV64-NEXT: entry:
2910 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv16i1(<vscale x 64 x i1> poison, <vscale x 16 x i1> [[SRC]], i64 0)
2911 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
2912 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP1]]
2914 vuint32m1_t
test_vreinterpret_v_b4_u32m1(vbool4_t src
) {
2915 return __riscv_vreinterpret_u32m1(src
);
2918 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_i64m1_b64
2919 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
2920 // CHECK-RV64-NEXT: entry:
2921 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
2922 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2923 // CHECK-RV64-NEXT: ret <vscale x 1 x i1> [[TMP1]]
2925 vbool64_t
test_vreinterpret_v_i64m1_b64(vint64m1_t src
) {
2926 return __riscv_vreinterpret_b64(src
);
2929 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b64_i64m1
2930 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2931 // CHECK-RV64-NEXT: entry:
2932 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
2933 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
2934 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP1]]
2936 vint64m1_t
test_vreinterpret_v_b64_i64m1(vbool64_t src
) {
2937 return __riscv_vreinterpret_i64m1(src
);
2940 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_i64m1_b32
2941 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
2942 // CHECK-RV64-NEXT: entry:
2943 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
2944 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2945 // CHECK-RV64-NEXT: ret <vscale x 2 x i1> [[TMP1]]
2947 vbool32_t
test_vreinterpret_v_i64m1_b32(vint64m1_t src
) {
2948 return __riscv_vreinterpret_b32(src
);
2951 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b32_i64m1
2952 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2953 // CHECK-RV64-NEXT: entry:
2954 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
2955 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
2956 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP1]]
2958 vint64m1_t
test_vreinterpret_v_b32_i64m1(vbool32_t src
) {
2959 return __riscv_vreinterpret_i64m1(src
);
2962 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_i64m1_b16
2963 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
2964 // CHECK-RV64-NEXT: entry:
2965 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
2966 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2967 // CHECK-RV64-NEXT: ret <vscale x 4 x i1> [[TMP1]]
2969 vbool16_t
test_vreinterpret_v_i64m1_b16(vint64m1_t src
) {
2970 return __riscv_vreinterpret_b16(src
);
2973 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b16_i64m1
2974 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2975 // CHECK-RV64-NEXT: entry:
2976 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
2977 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
2978 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP1]]
2980 vint64m1_t
test_vreinterpret_v_b16_i64m1(vbool16_t src
) {
2981 return __riscv_vreinterpret_i64m1(src
);
2984 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_i64m1_b8
2985 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
2986 // CHECK-RV64-NEXT: entry:
2987 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
2988 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
2989 // CHECK-RV64-NEXT: ret <vscale x 8 x i1> [[TMP1]]
2991 vbool8_t
test_vreinterpret_v_i64m1_b8(vint64m1_t src
) {
2992 return __riscv_vreinterpret_b8(src
);
2995 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b8_i64m1
2996 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
2997 // CHECK-RV64-NEXT: entry:
2998 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
2999 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
3000 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP1]]
3002 vint64m1_t
test_vreinterpret_v_b8_i64m1(vbool8_t src
) {
3003 return __riscv_vreinterpret_i64m1(src
);
3006 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_u64m1_b64
3007 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
3008 // CHECK-RV64-NEXT: entry:
3009 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
3010 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
3011 // CHECK-RV64-NEXT: ret <vscale x 1 x i1> [[TMP1]]
3013 vbool64_t
test_vreinterpret_v_u64m1_b64(vuint64m1_t src
) {
3014 return __riscv_vreinterpret_b64(src
);
3017 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b64_u64m1
3018 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
3019 // CHECK-RV64-NEXT: entry:
3020 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
3021 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
3022 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP1]]
3024 vuint64m1_t
test_vreinterpret_v_b64_u64m1(vbool64_t src
) {
3025 return __riscv_vreinterpret_u64m1(src
);
3028 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_u64m1_b32
3029 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
3030 // CHECK-RV64-NEXT: entry:
3031 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
3032 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
3033 // CHECK-RV64-NEXT: ret <vscale x 2 x i1> [[TMP1]]
3035 vbool32_t
test_vreinterpret_v_u64m1_b32(vuint64m1_t src
) {
3036 return __riscv_vreinterpret_b32(src
);
3039 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b32_u64m1
3040 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
3041 // CHECK-RV64-NEXT: entry:
3042 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
3043 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
3044 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP1]]
3046 vuint64m1_t
test_vreinterpret_v_b32_u64m1(vbool32_t src
) {
3047 return __riscv_vreinterpret_u64m1(src
);
3050 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_u64m1_b16
3051 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
3052 // CHECK-RV64-NEXT: entry:
3053 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
3054 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
3055 // CHECK-RV64-NEXT: ret <vscale x 4 x i1> [[TMP1]]
3057 vbool16_t
test_vreinterpret_v_u64m1_b16(vuint64m1_t src
) {
3058 return __riscv_vreinterpret_b16(src
);
3061 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b16_u64m1
3062 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
3063 // CHECK-RV64-NEXT: entry:
3064 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
3065 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
3066 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP1]]
3068 vuint64m1_t
test_vreinterpret_v_b16_u64m1(vbool16_t src
) {
3069 return __riscv_vreinterpret_u64m1(src
);
3072 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_u64m1_b8
3073 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
3074 // CHECK-RV64-NEXT: entry:
3075 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
3076 // CHECK-RV64-NEXT: [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
3077 // CHECK-RV64-NEXT: ret <vscale x 8 x i1> [[TMP1]]
3079 vbool8_t
test_vreinterpret_v_u64m1_b8(vuint64m1_t src
) {
3080 return __riscv_vreinterpret_b8(src
);
3083 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b8_u64m1
3084 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
3085 // CHECK-RV64-NEXT: entry:
3086 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
3087 // CHECK-RV64-NEXT: [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
3088 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP1]]
3090 vuint64m1_t
test_vreinterpret_v_b8_u64m1(vbool8_t src
) {
3091 return __riscv_vreinterpret_u64m1(src
);