1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc --mtriple=loongarch64 --mattr=+lasx < %s | FileCheck %s
5 define void @v32i8_icmp_eq_imm(ptr %res, ptr %a0) nounwind {
6 ; CHECK-LABEL: v32i8_icmp_eq_imm:
8 ; CHECK-NEXT: xvld $xr0, $a1, 0
9 ; CHECK-NEXT: xvseqi.b $xr0, $xr0, 15
10 ; CHECK-NEXT: xvst $xr0, $a0, 0
12 %v0 = load <32 x i8>, ptr %a0
13 %cmp = icmp eq <32 x i8> %v0, <i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15>
14 %ext = sext <32 x i1> %cmp to <32 x i8>
15 store <32 x i8> %ext, ptr %res
19 define void @v32i8_icmp_eq(ptr %res, ptr %a0, ptr %a1) nounwind {
20 ; CHECK-LABEL: v32i8_icmp_eq:
22 ; CHECK-NEXT: xvld $xr0, $a1, 0
23 ; CHECK-NEXT: xvld $xr1, $a2, 0
24 ; CHECK-NEXT: xvseq.b $xr0, $xr0, $xr1
25 ; CHECK-NEXT: xvst $xr0, $a0, 0
27 %v0 = load <32 x i8>, ptr %a0
28 %v1 = load <32 x i8>, ptr %a1
29 %cmp = icmp eq <32 x i8> %v0, %v1
30 %ext = sext <32 x i1> %cmp to <32 x i8>
31 store <32 x i8> %ext, ptr %res
35 define void @v16i16_icmp_eq_imm(ptr %res, ptr %a0) nounwind {
36 ; CHECK-LABEL: v16i16_icmp_eq_imm:
38 ; CHECK-NEXT: xvld $xr0, $a1, 0
39 ; CHECK-NEXT: xvseqi.h $xr0, $xr0, 15
40 ; CHECK-NEXT: xvst $xr0, $a0, 0
42 %v0 = load <16 x i16>, ptr %a0
43 %cmp = icmp eq <16 x i16> %v0, <i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15>
44 %ext = sext <16 x i1> %cmp to <16 x i16>
45 store <16 x i16> %ext, ptr %res
49 define void @v16i16_icmp_eq(ptr %res, ptr %a0, ptr %a1) nounwind {
50 ; CHECK-LABEL: v16i16_icmp_eq:
52 ; CHECK-NEXT: xvld $xr0, $a1, 0
53 ; CHECK-NEXT: xvld $xr1, $a2, 0
54 ; CHECK-NEXT: xvseq.h $xr0, $xr0, $xr1
55 ; CHECK-NEXT: xvst $xr0, $a0, 0
57 %v0 = load <16 x i16>, ptr %a0
58 %v1 = load <16 x i16>, ptr %a1
59 %cmp = icmp eq <16 x i16> %v0, %v1
60 %ext = sext <16 x i1> %cmp to <16 x i16>
61 store <16 x i16> %ext, ptr %res
65 define void @v8i32_icmp_eq_imm(ptr %res, ptr %a0) nounwind {
66 ; CHECK-LABEL: v8i32_icmp_eq_imm:
68 ; CHECK-NEXT: xvld $xr0, $a1, 0
69 ; CHECK-NEXT: xvseqi.w $xr0, $xr0, 15
70 ; CHECK-NEXT: xvst $xr0, $a0, 0
72 %v0 = load <8 x i32>, ptr %a0
73 %cmp = icmp eq <8 x i32> %v0, <i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15>
74 %ext = sext <8 x i1> %cmp to <8 x i32>
75 store <8 x i32> %ext, ptr %res
79 define void @v8i32_icmp_eq(ptr %res, ptr %a0, ptr %a1) nounwind {
80 ; CHECK-LABEL: v8i32_icmp_eq:
82 ; CHECK-NEXT: xvld $xr0, $a1, 0
83 ; CHECK-NEXT: xvld $xr1, $a2, 0
84 ; CHECK-NEXT: xvseq.w $xr0, $xr0, $xr1
85 ; CHECK-NEXT: xvst $xr0, $a0, 0
87 %v0 = load <8 x i32>, ptr %a0
88 %v1 = load <8 x i32>, ptr %a1
89 %cmp = icmp eq <8 x i32> %v0, %v1
90 %ext = sext <8 x i1> %cmp to <8 x i32>
91 store <8 x i32> %ext, ptr %res
95 define void @v4i64_icmp_eq_imm(ptr %res, ptr %a0) nounwind {
96 ; CHECK-LABEL: v4i64_icmp_eq_imm:
98 ; CHECK-NEXT: xvld $xr0, $a1, 0
99 ; CHECK-NEXT: xvseqi.d $xr0, $xr0, 15
100 ; CHECK-NEXT: xvst $xr0, $a0, 0
102 %v0 = load <4 x i64>, ptr %a0
103 %cmp = icmp eq <4 x i64> %v0, <i64 15, i64 15, i64 15, i64 15>
104 %ext = sext <4 x i1> %cmp to <4 x i64>
105 store <4 x i64> %ext, ptr %res
109 define void @v4i64_icmp_eq(ptr %res, ptr %a0, ptr %a1) nounwind {
110 ; CHECK-LABEL: v4i64_icmp_eq:
112 ; CHECK-NEXT: xvld $xr0, $a1, 0
113 ; CHECK-NEXT: xvld $xr1, $a2, 0
114 ; CHECK-NEXT: xvseq.d $xr0, $xr0, $xr1
115 ; CHECK-NEXT: xvst $xr0, $a0, 0
117 %v0 = load <4 x i64>, ptr %a0
118 %v1 = load <4 x i64>, ptr %a1
119 %cmp = icmp eq <4 x i64> %v0, %v1
120 %ext = sext <4 x i1> %cmp to <4 x i64>
121 store <4 x i64> %ext, ptr %res
126 define void @v32i8_icmp_sle_imm(ptr %res, ptr %a0) nounwind {
127 ; CHECK-LABEL: v32i8_icmp_sle_imm:
129 ; CHECK-NEXT: xvld $xr0, $a1, 0
130 ; CHECK-NEXT: xvslei.b $xr0, $xr0, 15
131 ; CHECK-NEXT: xvst $xr0, $a0, 0
133 %v0 = load <32 x i8>, ptr %a0
134 %cmp = icmp sle <32 x i8> %v0, <i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15>
135 %ext = sext <32 x i1> %cmp to <32 x i8>
136 store <32 x i8> %ext, ptr %res
140 define void @v32i8_icmp_sle(ptr %res, ptr %a0, ptr %a1) nounwind {
141 ; CHECK-LABEL: v32i8_icmp_sle:
143 ; CHECK-NEXT: xvld $xr0, $a1, 0
144 ; CHECK-NEXT: xvld $xr1, $a2, 0
145 ; CHECK-NEXT: xvsle.b $xr0, $xr0, $xr1
146 ; CHECK-NEXT: xvst $xr0, $a0, 0
148 %v0 = load <32 x i8>, ptr %a0
149 %v1 = load <32 x i8>, ptr %a1
150 %cmp = icmp sle <32 x i8> %v0, %v1
151 %ext = sext <32 x i1> %cmp to <32 x i8>
152 store <32 x i8> %ext, ptr %res
156 define void @v16i16_icmp_sle_imm(ptr %res, ptr %a0) nounwind {
157 ; CHECK-LABEL: v16i16_icmp_sle_imm:
159 ; CHECK-NEXT: xvld $xr0, $a1, 0
160 ; CHECK-NEXT: xvslei.h $xr0, $xr0, 15
161 ; CHECK-NEXT: xvst $xr0, $a0, 0
163 %v0 = load <16 x i16>, ptr %a0
164 %cmp = icmp sle <16 x i16> %v0, <i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15>
165 %ext = sext <16 x i1> %cmp to <16 x i16>
166 store <16 x i16> %ext, ptr %res
170 define void @v16i16_icmp_sle(ptr %res, ptr %a0, ptr %a1) nounwind {
171 ; CHECK-LABEL: v16i16_icmp_sle:
173 ; CHECK-NEXT: xvld $xr0, $a1, 0
174 ; CHECK-NEXT: xvld $xr1, $a2, 0
175 ; CHECK-NEXT: xvsle.h $xr0, $xr0, $xr1
176 ; CHECK-NEXT: xvst $xr0, $a0, 0
178 %v0 = load <16 x i16>, ptr %a0
179 %v1 = load <16 x i16>, ptr %a1
180 %cmp = icmp sle <16 x i16> %v0, %v1
181 %ext = sext <16 x i1> %cmp to <16 x i16>
182 store <16 x i16> %ext, ptr %res
186 define void @v8i32_icmp_sle_imm(ptr %res, ptr %a0) nounwind {
187 ; CHECK-LABEL: v8i32_icmp_sle_imm:
189 ; CHECK-NEXT: xvld $xr0, $a1, 0
190 ; CHECK-NEXT: xvslei.w $xr0, $xr0, 15
191 ; CHECK-NEXT: xvst $xr0, $a0, 0
193 %v0 = load <8 x i32>, ptr %a0
194 %cmp = icmp sle <8 x i32> %v0, <i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15>
195 %ext = sext <8 x i1> %cmp to <8 x i32>
196 store <8 x i32> %ext, ptr %res
200 define void @v8i32_icmp_sle(ptr %res, ptr %a0, ptr %a1) nounwind {
201 ; CHECK-LABEL: v8i32_icmp_sle:
203 ; CHECK-NEXT: xvld $xr0, $a1, 0
204 ; CHECK-NEXT: xvld $xr1, $a2, 0
205 ; CHECK-NEXT: xvsle.w $xr0, $xr0, $xr1
206 ; CHECK-NEXT: xvst $xr0, $a0, 0
208 %v0 = load <8 x i32>, ptr %a0
209 %v1 = load <8 x i32>, ptr %a1
210 %cmp = icmp sle <8 x i32> %v0, %v1
211 %ext = sext <8 x i1> %cmp to <8 x i32>
212 store <8 x i32> %ext, ptr %res
216 define void @v4i64_icmp_sle_imm(ptr %res, ptr %a0) nounwind {
217 ; CHECK-LABEL: v4i64_icmp_sle_imm:
219 ; CHECK-NEXT: xvld $xr0, $a1, 0
220 ; CHECK-NEXT: xvslei.d $xr0, $xr0, 15
221 ; CHECK-NEXT: xvst $xr0, $a0, 0
223 %v0 = load <4 x i64>, ptr %a0
224 %cmp = icmp sle <4 x i64> %v0, <i64 15, i64 15, i64 15, i64 15>
225 %ext = sext <4 x i1> %cmp to <4 x i64>
226 store <4 x i64> %ext, ptr %res
230 define void @v4i64_icmp_sle(ptr %res, ptr %a0, ptr %a1) nounwind {
231 ; CHECK-LABEL: v4i64_icmp_sle:
233 ; CHECK-NEXT: xvld $xr0, $a1, 0
234 ; CHECK-NEXT: xvld $xr1, $a2, 0
235 ; CHECK-NEXT: xvsle.d $xr0, $xr0, $xr1
236 ; CHECK-NEXT: xvst $xr0, $a0, 0
238 %v0 = load <4 x i64>, ptr %a0
239 %v1 = load <4 x i64>, ptr %a1
240 %cmp = icmp sle <4 x i64> %v0, %v1
241 %ext = sext <4 x i1> %cmp to <4 x i64>
242 store <4 x i64> %ext, ptr %res
247 define void @v32i8_icmp_ule_imm(ptr %res, ptr %a0) nounwind {
248 ; CHECK-LABEL: v32i8_icmp_ule_imm:
250 ; CHECK-NEXT: xvld $xr0, $a1, 0
251 ; CHECK-NEXT: xvslei.bu $xr0, $xr0, 31
252 ; CHECK-NEXT: xvst $xr0, $a0, 0
254 %v0 = load <32 x i8>, ptr %a0
255 %cmp = icmp ule <32 x i8> %v0, <i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31>
256 %ext = sext <32 x i1> %cmp to <32 x i8>
257 store <32 x i8> %ext, ptr %res
261 define void @v32i8_icmp_ule(ptr %res, ptr %a0, ptr %a1) nounwind {
262 ; CHECK-LABEL: v32i8_icmp_ule:
264 ; CHECK-NEXT: xvld $xr0, $a1, 0
265 ; CHECK-NEXT: xvld $xr1, $a2, 0
266 ; CHECK-NEXT: xvsle.bu $xr0, $xr0, $xr1
267 ; CHECK-NEXT: xvst $xr0, $a0, 0
269 %v0 = load <32 x i8>, ptr %a0
270 %v1 = load <32 x i8>, ptr %a1
271 %cmp = icmp ule <32 x i8> %v0, %v1
272 %ext = sext <32 x i1> %cmp to <32 x i8>
273 store <32 x i8> %ext, ptr %res
277 define void @v16i16_icmp_ule_imm(ptr %res, ptr %a0) nounwind {
278 ; CHECK-LABEL: v16i16_icmp_ule_imm:
280 ; CHECK-NEXT: xvld $xr0, $a1, 0
281 ; CHECK-NEXT: xvslei.hu $xr0, $xr0, 31
282 ; CHECK-NEXT: xvst $xr0, $a0, 0
284 %v0 = load <16 x i16>, ptr %a0
285 %cmp = icmp ule <16 x i16> %v0, <i16 31, i16 31, i16 31, i16 31, i16 31, i16 31, i16 31, i16 31, i16 31, i16 31, i16 31, i16 31, i16 31, i16 31, i16 31, i16 31>
286 %ext = sext <16 x i1> %cmp to <16 x i16>
287 store <16 x i16> %ext, ptr %res
291 define void @v16i16_icmp_ule(ptr %res, ptr %a0, ptr %a1) nounwind {
292 ; CHECK-LABEL: v16i16_icmp_ule:
294 ; CHECK-NEXT: xvld $xr0, $a1, 0
295 ; CHECK-NEXT: xvld $xr1, $a2, 0
296 ; CHECK-NEXT: xvsle.hu $xr0, $xr0, $xr1
297 ; CHECK-NEXT: xvst $xr0, $a0, 0
299 %v0 = load <16 x i16>, ptr %a0
300 %v1 = load <16 x i16>, ptr %a1
301 %cmp = icmp ule <16 x i16> %v0, %v1
302 %ext = sext <16 x i1> %cmp to <16 x i16>
303 store <16 x i16> %ext, ptr %res
307 define void @v8i32_icmp_ule_imm(ptr %res, ptr %a0) nounwind {
308 ; CHECK-LABEL: v8i32_icmp_ule_imm:
310 ; CHECK-NEXT: xvld $xr0, $a1, 0
311 ; CHECK-NEXT: xvslei.wu $xr0, $xr0, 31
312 ; CHECK-NEXT: xvst $xr0, $a0, 0
314 %v0 = load <8 x i32>, ptr %a0
315 %cmp = icmp ule <8 x i32> %v0, <i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31>
316 %ext = sext <8 x i1> %cmp to <8 x i32>
317 store <8 x i32> %ext, ptr %res
321 define void @v8i32_icmp_ule(ptr %res, ptr %a0, ptr %a1) nounwind {
322 ; CHECK-LABEL: v8i32_icmp_ule:
324 ; CHECK-NEXT: xvld $xr0, $a1, 0
325 ; CHECK-NEXT: xvld $xr1, $a2, 0
326 ; CHECK-NEXT: xvsle.wu $xr0, $xr0, $xr1
327 ; CHECK-NEXT: xvst $xr0, $a0, 0
329 %v0 = load <8 x i32>, ptr %a0
330 %v1 = load <8 x i32>, ptr %a1
331 %cmp = icmp ule <8 x i32> %v0, %v1
332 %ext = sext <8 x i1> %cmp to <8 x i32>
333 store <8 x i32> %ext, ptr %res
337 define void @v4i64_icmp_ule_imm(ptr %res, ptr %a0) nounwind {
338 ; CHECK-LABEL: v4i64_icmp_ule_imm:
340 ; CHECK-NEXT: xvld $xr0, $a1, 0
341 ; CHECK-NEXT: xvslei.du $xr0, $xr0, 31
342 ; CHECK-NEXT: xvst $xr0, $a0, 0
344 %v0 = load <4 x i64>, ptr %a0
345 %cmp = icmp ule <4 x i64> %v0, <i64 31, i64 31, i64 31, i64 31>
346 %ext = sext <4 x i1> %cmp to <4 x i64>
347 store <4 x i64> %ext, ptr %res
351 define void @v4i64_icmp_ule(ptr %res, ptr %a0, ptr %a1) nounwind {
352 ; CHECK-LABEL: v4i64_icmp_ule:
354 ; CHECK-NEXT: xvld $xr0, $a1, 0
355 ; CHECK-NEXT: xvld $xr1, $a2, 0
356 ; CHECK-NEXT: xvsle.du $xr0, $xr0, $xr1
357 ; CHECK-NEXT: xvst $xr0, $a0, 0
359 %v0 = load <4 x i64>, ptr %a0
360 %v1 = load <4 x i64>, ptr %a1
361 %cmp = icmp ule <4 x i64> %v0, %v1
362 %ext = sext <4 x i1> %cmp to <4 x i64>
363 store <4 x i64> %ext, ptr %res
368 define void @v32i8_icmp_slt_imm(ptr %res, ptr %a0) nounwind {
369 ; CHECK-LABEL: v32i8_icmp_slt_imm:
371 ; CHECK-NEXT: xvld $xr0, $a1, 0
372 ; CHECK-NEXT: xvslti.b $xr0, $xr0, 15
373 ; CHECK-NEXT: xvst $xr0, $a0, 0
375 %v0 = load <32 x i8>, ptr %a0
376 %cmp = icmp slt <32 x i8> %v0, <i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15, i8 15>
377 %ext = sext <32 x i1> %cmp to <32 x i8>
378 store <32 x i8> %ext, ptr %res
382 define void @v32i8_icmp_slt(ptr %res, ptr %a0, ptr %a1) nounwind {
383 ; CHECK-LABEL: v32i8_icmp_slt:
385 ; CHECK-NEXT: xvld $xr0, $a1, 0
386 ; CHECK-NEXT: xvld $xr1, $a2, 0
387 ; CHECK-NEXT: xvslt.b $xr0, $xr0, $xr1
388 ; CHECK-NEXT: xvst $xr0, $a0, 0
390 %v0 = load <32 x i8>, ptr %a0
391 %v1 = load <32 x i8>, ptr %a1
392 %cmp = icmp slt <32 x i8> %v0, %v1
393 %ext = sext <32 x i1> %cmp to <32 x i8>
394 store <32 x i8> %ext, ptr %res
398 define void @v16i16_icmp_slt_imm(ptr %res, ptr %a0) nounwind {
399 ; CHECK-LABEL: v16i16_icmp_slt_imm:
401 ; CHECK-NEXT: xvld $xr0, $a1, 0
402 ; CHECK-NEXT: xvslti.h $xr0, $xr0, 15
403 ; CHECK-NEXT: xvst $xr0, $a0, 0
405 %v0 = load <16 x i16>, ptr %a0
406 %cmp = icmp slt <16 x i16> %v0, <i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15>
407 %ext = sext <16 x i1> %cmp to <16 x i16>
408 store <16 x i16> %ext, ptr %res
412 define void @v16i16_icmp_slt(ptr %res, ptr %a0, ptr %a1) nounwind {
413 ; CHECK-LABEL: v16i16_icmp_slt:
415 ; CHECK-NEXT: xvld $xr0, $a1, 0
416 ; CHECK-NEXT: xvld $xr1, $a2, 0
417 ; CHECK-NEXT: xvslt.h $xr0, $xr0, $xr1
418 ; CHECK-NEXT: xvst $xr0, $a0, 0
420 %v0 = load <16 x i16>, ptr %a0
421 %v1 = load <16 x i16>, ptr %a1
422 %cmp = icmp slt <16 x i16> %v0, %v1
423 %ext = sext <16 x i1> %cmp to <16 x i16>
424 store <16 x i16> %ext, ptr %res
428 define void @v8i32_icmp_slt_imm(ptr %res, ptr %a0) nounwind {
429 ; CHECK-LABEL: v8i32_icmp_slt_imm:
431 ; CHECK-NEXT: xvld $xr0, $a1, 0
432 ; CHECK-NEXT: xvslti.w $xr0, $xr0, 15
433 ; CHECK-NEXT: xvst $xr0, $a0, 0
435 %v0 = load <8 x i32>, ptr %a0
436 %cmp = icmp slt <8 x i32> %v0, <i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15>
437 %ext = sext <8 x i1> %cmp to <8 x i32>
438 store <8 x i32> %ext, ptr %res
442 define void @v8i32_icmp_slt(ptr %res, ptr %a0, ptr %a1) nounwind {
443 ; CHECK-LABEL: v8i32_icmp_slt:
445 ; CHECK-NEXT: xvld $xr0, $a1, 0
446 ; CHECK-NEXT: xvld $xr1, $a2, 0
447 ; CHECK-NEXT: xvslt.w $xr0, $xr0, $xr1
448 ; CHECK-NEXT: xvst $xr0, $a0, 0
450 %v0 = load <8 x i32>, ptr %a0
451 %v1 = load <8 x i32>, ptr %a1
452 %cmp = icmp slt <8 x i32> %v0, %v1
453 %ext = sext <8 x i1> %cmp to <8 x i32>
454 store <8 x i32> %ext, ptr %res
458 define void @v4i64_icmp_slt_imm(ptr %res, ptr %a0) nounwind {
459 ; CHECK-LABEL: v4i64_icmp_slt_imm:
461 ; CHECK-NEXT: xvld $xr0, $a1, 0
462 ; CHECK-NEXT: xvslti.d $xr0, $xr0, 15
463 ; CHECK-NEXT: xvst $xr0, $a0, 0
465 %v0 = load <4 x i64>, ptr %a0
466 %cmp = icmp slt <4 x i64> %v0, <i64 15, i64 15, i64 15, i64 15>
467 %ext = sext <4 x i1> %cmp to <4 x i64>
468 store <4 x i64> %ext, ptr %res
472 define void @v4i64_icmp_slt(ptr %res, ptr %a0, ptr %a1) nounwind {
473 ; CHECK-LABEL: v4i64_icmp_slt:
475 ; CHECK-NEXT: xvld $xr0, $a1, 0
476 ; CHECK-NEXT: xvld $xr1, $a2, 0
477 ; CHECK-NEXT: xvslt.d $xr0, $xr0, $xr1
478 ; CHECK-NEXT: xvst $xr0, $a0, 0
480 %v0 = load <4 x i64>, ptr %a0
481 %v1 = load <4 x i64>, ptr %a1
482 %cmp = icmp slt <4 x i64> %v0, %v1
483 %ext = sext <4 x i1> %cmp to <4 x i64>
484 store <4 x i64> %ext, ptr %res
489 define void @v32i8_icmp_ult_imm(ptr %res, ptr %a0) nounwind {
490 ; CHECK-LABEL: v32i8_icmp_ult_imm:
492 ; CHECK-NEXT: xvld $xr0, $a1, 0
493 ; CHECK-NEXT: xvslti.bu $xr0, $xr0, 31
494 ; CHECK-NEXT: xvst $xr0, $a0, 0
496 %v0 = load <32 x i8>, ptr %a0
497 %cmp = icmp ult <32 x i8> %v0, <i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31, i8 31>
498 %ext = sext <32 x i1> %cmp to <32 x i8>
499 store <32 x i8> %ext, ptr %res
503 define void @v32i8_icmp_ult(ptr %res, ptr %a0, ptr %a1) nounwind {
504 ; CHECK-LABEL: v32i8_icmp_ult:
506 ; CHECK-NEXT: xvld $xr0, $a1, 0
507 ; CHECK-NEXT: xvld $xr1, $a2, 0
508 ; CHECK-NEXT: xvslt.bu $xr0, $xr0, $xr1
509 ; CHECK-NEXT: xvst $xr0, $a0, 0
511 %v0 = load <32 x i8>, ptr %a0
512 %v1 = load <32 x i8>, ptr %a1
513 %cmp = icmp ult <32 x i8> %v0, %v1
514 %ext = sext <32 x i1> %cmp to <32 x i8>
515 store <32 x i8> %ext, ptr %res
519 define void @v16i16_icmp_ult_imm(ptr %res, ptr %a0) nounwind {
520 ; CHECK-LABEL: v16i16_icmp_ult_imm:
522 ; CHECK-NEXT: xvld $xr0, $a1, 0
523 ; CHECK-NEXT: xvslti.hu $xr0, $xr0, 31
524 ; CHECK-NEXT: xvst $xr0, $a0, 0
526 %v0 = load <16 x i16>, ptr %a0
527 %cmp = icmp ult <16 x i16> %v0, <i16 31, i16 31, i16 31, i16 31, i16 31, i16 31, i16 31, i16 31, i16 31, i16 31, i16 31, i16 31, i16 31, i16 31, i16 31, i16 31>
528 %ext = sext <16 x i1> %cmp to <16 x i16>
529 store <16 x i16> %ext, ptr %res
533 define void @v16i16_icmp_ult(ptr %res, ptr %a0, ptr %a1) nounwind {
534 ; CHECK-LABEL: v16i16_icmp_ult:
536 ; CHECK-NEXT: xvld $xr0, $a1, 0
537 ; CHECK-NEXT: xvld $xr1, $a2, 0
538 ; CHECK-NEXT: xvslt.hu $xr0, $xr0, $xr1
539 ; CHECK-NEXT: xvst $xr0, $a0, 0
541 %v0 = load <16 x i16>, ptr %a0
542 %v1 = load <16 x i16>, ptr %a1
543 %cmp = icmp ult <16 x i16> %v0, %v1
544 %ext = sext <16 x i1> %cmp to <16 x i16>
545 store <16 x i16> %ext, ptr %res
549 define void @v8i32_icmp_ult_imm(ptr %res, ptr %a0) nounwind {
550 ; CHECK-LABEL: v8i32_icmp_ult_imm:
552 ; CHECK-NEXT: xvld $xr0, $a1, 0
553 ; CHECK-NEXT: xvslti.wu $xr0, $xr0, 31
554 ; CHECK-NEXT: xvst $xr0, $a0, 0
556 %v0 = load <8 x i32>, ptr %a0
557 %cmp = icmp ult <8 x i32> %v0, <i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31>
558 %ext = sext <8 x i1> %cmp to <8 x i32>
559 store <8 x i32> %ext, ptr %res
563 define void @v8i32_icmp_ult(ptr %res, ptr %a0, ptr %a1) nounwind {
564 ; CHECK-LABEL: v8i32_icmp_ult:
566 ; CHECK-NEXT: xvld $xr0, $a1, 0
567 ; CHECK-NEXT: xvld $xr1, $a2, 0
568 ; CHECK-NEXT: xvslt.wu $xr0, $xr0, $xr1
569 ; CHECK-NEXT: xvst $xr0, $a0, 0
571 %v0 = load <8 x i32>, ptr %a0
572 %v1 = load <8 x i32>, ptr %a1
573 %cmp = icmp ult <8 x i32> %v0, %v1
574 %ext = sext <8 x i1> %cmp to <8 x i32>
575 store <8 x i32> %ext, ptr %res
579 define void @v4i64_icmp_ult_imm(ptr %res, ptr %a0) nounwind {
580 ; CHECK-LABEL: v4i64_icmp_ult_imm:
582 ; CHECK-NEXT: xvld $xr0, $a1, 0
583 ; CHECK-NEXT: xvslti.du $xr0, $xr0, 31
584 ; CHECK-NEXT: xvst $xr0, $a0, 0
586 %v0 = load <4 x i64>, ptr %a0
587 %cmp = icmp ult <4 x i64> %v0, <i64 31, i64 31, i64 31, i64 31>
588 %ext = sext <4 x i1> %cmp to <4 x i64>
589 store <4 x i64> %ext, ptr %res
593 define void @v4i64_icmp_ult(ptr %res, ptr %a0, ptr %a1) nounwind {
594 ; CHECK-LABEL: v4i64_icmp_ult:
596 ; CHECK-NEXT: xvld $xr0, $a1, 0
597 ; CHECK-NEXT: xvld $xr1, $a2, 0
598 ; CHECK-NEXT: xvslt.du $xr0, $xr0, $xr1
599 ; CHECK-NEXT: xvst $xr0, $a0, 0
601 %v0 = load <4 x i64>, ptr %a0
602 %v1 = load <4 x i64>, ptr %a1
603 %cmp = icmp ult <4 x i64> %v0, %v1
604 %ext = sext <4 x i1> %cmp to <4 x i64>
605 store <4 x i64> %ext, ptr %res
610 define void @v32i8_icmp_ne(ptr %res, ptr %a0, ptr %a1) nounwind {
611 ; CHECK-LABEL: v32i8_icmp_ne:
613 ; CHECK-NEXT: xvld $xr0, $a1, 0
614 ; CHECK-NEXT: xvld $xr1, $a2, 0
615 ; CHECK-NEXT: xvseq.b $xr0, $xr0, $xr1
616 ; CHECK-NEXT: xvxori.b $xr0, $xr0, 255
617 ; CHECK-NEXT: xvst $xr0, $a0, 0
619 %v0 = load <32 x i8>, ptr %a0
620 %v1 = load <32 x i8>, ptr %a1
621 %cmp = icmp ne <32 x i8> %v0, %v1
622 %ext = sext <32 x i1> %cmp to <32 x i8>
623 store <32 x i8> %ext, ptr %res
627 define void @v16i16_icmp_ne(ptr %res, ptr %a0, ptr %a1) nounwind {
628 ; CHECK-LABEL: v16i16_icmp_ne:
630 ; CHECK-NEXT: xvld $xr0, $a1, 0
631 ; CHECK-NEXT: xvld $xr1, $a2, 0
632 ; CHECK-NEXT: xvseq.h $xr0, $xr0, $xr1
633 ; CHECK-NEXT: xvrepli.b $xr1, -1
634 ; CHECK-NEXT: xvxor.v $xr0, $xr0, $xr1
635 ; CHECK-NEXT: xvst $xr0, $a0, 0
637 %v0 = load <16 x i16>, ptr %a0
638 %v1 = load <16 x i16>, ptr %a1
639 %cmp = icmp ne <16 x i16> %v0, %v1
640 %ext = sext <16 x i1> %cmp to <16 x i16>
641 store <16 x i16> %ext, ptr %res
645 define void @v8i32_icmp_ne(ptr %res, ptr %a0, ptr %a1) nounwind {
646 ; CHECK-LABEL: v8i32_icmp_ne:
648 ; CHECK-NEXT: xvld $xr0, $a1, 0
649 ; CHECK-NEXT: xvld $xr1, $a2, 0
650 ; CHECK-NEXT: xvseq.w $xr0, $xr0, $xr1
651 ; CHECK-NEXT: xvrepli.b $xr1, -1
652 ; CHECK-NEXT: xvxor.v $xr0, $xr0, $xr1
653 ; CHECK-NEXT: xvst $xr0, $a0, 0
655 %v0 = load <8 x i32>, ptr %a0
656 %v1 = load <8 x i32>, ptr %a1
657 %cmp = icmp ne <8 x i32> %v0, %v1
658 %ext = sext <8 x i1> %cmp to <8 x i32>
659 store <8 x i32> %ext, ptr %res
663 define void @v4i64_icmp_ne(ptr %res, ptr %a0, ptr %a1) nounwind {
664 ; CHECK-LABEL: v4i64_icmp_ne:
666 ; CHECK-NEXT: xvld $xr0, $a1, 0
667 ; CHECK-NEXT: xvld $xr1, $a2, 0
668 ; CHECK-NEXT: xvseq.d $xr0, $xr0, $xr1
669 ; CHECK-NEXT: xvrepli.b $xr1, -1
670 ; CHECK-NEXT: xvxor.v $xr0, $xr0, $xr1
671 ; CHECK-NEXT: xvst $xr0, $a0, 0
673 %v0 = load <4 x i64>, ptr %a0
674 %v1 = load <4 x i64>, ptr %a1
675 %cmp = icmp ne <4 x i64> %v0, %v1
676 %ext = sext <4 x i1> %cmp to <4 x i64>
677 store <4 x i64> %ext, ptr %res
682 define void @v32i8_icmp_sge(ptr %res, ptr %a0, ptr %a1) nounwind {
683 ; CHECK-LABEL: v32i8_icmp_sge:
685 ; CHECK-NEXT: xvld $xr0, $a1, 0
686 ; CHECK-NEXT: xvld $xr1, $a2, 0
687 ; CHECK-NEXT: xvsle.b $xr0, $xr1, $xr0
688 ; CHECK-NEXT: xvst $xr0, $a0, 0
690 %v0 = load <32 x i8>, ptr %a0
691 %v1 = load <32 x i8>, ptr %a1
692 %cmp = icmp sge <32 x i8> %v0, %v1
693 %ext = sext <32 x i1> %cmp to <32 x i8>
694 store <32 x i8> %ext, ptr %res
698 define void @v16i16_icmp_sge(ptr %res, ptr %a0, ptr %a1) nounwind {
699 ; CHECK-LABEL: v16i16_icmp_sge:
701 ; CHECK-NEXT: xvld $xr0, $a1, 0
702 ; CHECK-NEXT: xvld $xr1, $a2, 0
703 ; CHECK-NEXT: xvsle.h $xr0, $xr1, $xr0
704 ; CHECK-NEXT: xvst $xr0, $a0, 0
706 %v0 = load <16 x i16>, ptr %a0
707 %v1 = load <16 x i16>, ptr %a1
708 %cmp = icmp sge <16 x i16> %v0, %v1
709 %ext = sext <16 x i1> %cmp to <16 x i16>
710 store <16 x i16> %ext, ptr %res
714 define void @v8i32_icmp_sge(ptr %res, ptr %a0, ptr %a1) nounwind {
715 ; CHECK-LABEL: v8i32_icmp_sge:
717 ; CHECK-NEXT: xvld $xr0, $a1, 0
718 ; CHECK-NEXT: xvld $xr1, $a2, 0
719 ; CHECK-NEXT: xvsle.w $xr0, $xr1, $xr0
720 ; CHECK-NEXT: xvst $xr0, $a0, 0
722 %v0 = load <8 x i32>, ptr %a0
723 %v1 = load <8 x i32>, ptr %a1
724 %cmp = icmp sge <8 x i32> %v0, %v1
725 %ext = sext <8 x i1> %cmp to <8 x i32>
726 store <8 x i32> %ext, ptr %res
730 define void @v4i64_icmp_sge(ptr %res, ptr %a0, ptr %a1) nounwind {
731 ; CHECK-LABEL: v4i64_icmp_sge:
733 ; CHECK-NEXT: xvld $xr0, $a1, 0
734 ; CHECK-NEXT: xvld $xr1, $a2, 0
735 ; CHECK-NEXT: xvsle.d $xr0, $xr1, $xr0
736 ; CHECK-NEXT: xvst $xr0, $a0, 0
738 %v0 = load <4 x i64>, ptr %a0
739 %v1 = load <4 x i64>, ptr %a1
740 %cmp = icmp sge <4 x i64> %v0, %v1
741 %ext = sext <4 x i1> %cmp to <4 x i64>
742 store <4 x i64> %ext, ptr %res
747 define void @v32i8_icmp_uge(ptr %res, ptr %a0, ptr %a1) nounwind {
748 ; CHECK-LABEL: v32i8_icmp_uge:
750 ; CHECK-NEXT: xvld $xr0, $a1, 0
751 ; CHECK-NEXT: xvld $xr1, $a2, 0
752 ; CHECK-NEXT: xvsle.bu $xr0, $xr1, $xr0
753 ; CHECK-NEXT: xvst $xr0, $a0, 0
755 %v0 = load <32 x i8>, ptr %a0
756 %v1 = load <32 x i8>, ptr %a1
757 %cmp = icmp uge <32 x i8> %v0, %v1
758 %ext = sext <32 x i1> %cmp to <32 x i8>
759 store <32 x i8> %ext, ptr %res
763 define void @v16i16_icmp_uge(ptr %res, ptr %a0, ptr %a1) nounwind {
764 ; CHECK-LABEL: v16i16_icmp_uge:
766 ; CHECK-NEXT: xvld $xr0, $a1, 0
767 ; CHECK-NEXT: xvld $xr1, $a2, 0
768 ; CHECK-NEXT: xvsle.hu $xr0, $xr1, $xr0
769 ; CHECK-NEXT: xvst $xr0, $a0, 0
771 %v0 = load <16 x i16>, ptr %a0
772 %v1 = load <16 x i16>, ptr %a1
773 %cmp = icmp uge <16 x i16> %v0, %v1
774 %ext = sext <16 x i1> %cmp to <16 x i16>
775 store <16 x i16> %ext, ptr %res
779 define void @v8i32_icmp_uge(ptr %res, ptr %a0, ptr %a1) nounwind {
780 ; CHECK-LABEL: v8i32_icmp_uge:
782 ; CHECK-NEXT: xvld $xr0, $a1, 0
783 ; CHECK-NEXT: xvld $xr1, $a2, 0
784 ; CHECK-NEXT: xvsle.wu $xr0, $xr1, $xr0
785 ; CHECK-NEXT: xvst $xr0, $a0, 0
787 %v0 = load <8 x i32>, ptr %a0
788 %v1 = load <8 x i32>, ptr %a1
789 %cmp = icmp uge <8 x i32> %v0, %v1
790 %ext = sext <8 x i1> %cmp to <8 x i32>
791 store <8 x i32> %ext, ptr %res
795 define void @v4i64_icmp_uge(ptr %res, ptr %a0, ptr %a1) nounwind {
796 ; CHECK-LABEL: v4i64_icmp_uge:
798 ; CHECK-NEXT: xvld $xr0, $a1, 0
799 ; CHECK-NEXT: xvld $xr1, $a2, 0
800 ; CHECK-NEXT: xvsle.du $xr0, $xr1, $xr0
801 ; CHECK-NEXT: xvst $xr0, $a0, 0
803 %v0 = load <4 x i64>, ptr %a0
804 %v1 = load <4 x i64>, ptr %a1
805 %cmp = icmp uge <4 x i64> %v0, %v1
806 %ext = sext <4 x i1> %cmp to <4 x i64>
807 store <4 x i64> %ext, ptr %res
812 define void @v32i8_icmp_sgt(ptr %res, ptr %a0, ptr %a1) nounwind {
813 ; CHECK-LABEL: v32i8_icmp_sgt:
815 ; CHECK-NEXT: xvld $xr0, $a1, 0
816 ; CHECK-NEXT: xvld $xr1, $a2, 0
817 ; CHECK-NEXT: xvslt.b $xr0, $xr1, $xr0
818 ; CHECK-NEXT: xvst $xr0, $a0, 0
820 %v0 = load <32 x i8>, ptr %a0
821 %v1 = load <32 x i8>, ptr %a1
822 %cmp = icmp sgt <32 x i8> %v0, %v1
823 %ext = sext <32 x i1> %cmp to <32 x i8>
824 store <32 x i8> %ext, ptr %res
828 define void @v16i16_icmp_sgt(ptr %res, ptr %a0, ptr %a1) nounwind {
829 ; CHECK-LABEL: v16i16_icmp_sgt:
831 ; CHECK-NEXT: xvld $xr0, $a1, 0
832 ; CHECK-NEXT: xvld $xr1, $a2, 0
833 ; CHECK-NEXT: xvslt.h $xr0, $xr1, $xr0
834 ; CHECK-NEXT: xvst $xr0, $a0, 0
836 %v0 = load <16 x i16>, ptr %a0
837 %v1 = load <16 x i16>, ptr %a1
838 %cmp = icmp sgt <16 x i16> %v0, %v1
839 %ext = sext <16 x i1> %cmp to <16 x i16>
840 store <16 x i16> %ext, ptr %res
844 define void @v8i32_icmp_sgt(ptr %res, ptr %a0, ptr %a1) nounwind {
845 ; CHECK-LABEL: v8i32_icmp_sgt:
847 ; CHECK-NEXT: xvld $xr0, $a1, 0
848 ; CHECK-NEXT: xvld $xr1, $a2, 0
849 ; CHECK-NEXT: xvslt.w $xr0, $xr1, $xr0
850 ; CHECK-NEXT: xvst $xr0, $a0, 0
852 %v0 = load <8 x i32>, ptr %a0
853 %v1 = load <8 x i32>, ptr %a1
854 %cmp = icmp sgt <8 x i32> %v0, %v1
855 %ext = sext <8 x i1> %cmp to <8 x i32>
856 store <8 x i32> %ext, ptr %res
860 define void @v4i64_icmp_sgt(ptr %res, ptr %a0, ptr %a1) nounwind {
861 ; CHECK-LABEL: v4i64_icmp_sgt:
863 ; CHECK-NEXT: xvld $xr0, $a1, 0
864 ; CHECK-NEXT: xvld $xr1, $a2, 0
865 ; CHECK-NEXT: xvslt.d $xr0, $xr1, $xr0
866 ; CHECK-NEXT: xvst $xr0, $a0, 0
868 %v0 = load <4 x i64>, ptr %a0
869 %v1 = load <4 x i64>, ptr %a1
870 %cmp = icmp sgt <4 x i64> %v0, %v1
871 %ext = sext <4 x i1> %cmp to <4 x i64>
872 store <4 x i64> %ext, ptr %res
877 define void @v32i8_icmp_ugt(ptr %res, ptr %a0, ptr %a1) nounwind {
878 ; CHECK-LABEL: v32i8_icmp_ugt:
880 ; CHECK-NEXT: xvld $xr0, $a1, 0
881 ; CHECK-NEXT: xvld $xr1, $a2, 0
882 ; CHECK-NEXT: xvslt.bu $xr0, $xr1, $xr0
883 ; CHECK-NEXT: xvst $xr0, $a0, 0
885 %v0 = load <32 x i8>, ptr %a0
886 %v1 = load <32 x i8>, ptr %a1
887 %cmp = icmp ugt <32 x i8> %v0, %v1
888 %ext = sext <32 x i1> %cmp to <32 x i8>
889 store <32 x i8> %ext, ptr %res
893 define void @v16i16_icmp_ugt(ptr %res, ptr %a0, ptr %a1) nounwind {
894 ; CHECK-LABEL: v16i16_icmp_ugt:
896 ; CHECK-NEXT: xvld $xr0, $a1, 0
897 ; CHECK-NEXT: xvld $xr1, $a2, 0
898 ; CHECK-NEXT: xvslt.hu $xr0, $xr1, $xr0
899 ; CHECK-NEXT: xvst $xr0, $a0, 0
901 %v0 = load <16 x i16>, ptr %a0
902 %v1 = load <16 x i16>, ptr %a1
903 %cmp = icmp ugt <16 x i16> %v0, %v1
904 %ext = sext <16 x i1> %cmp to <16 x i16>
905 store <16 x i16> %ext, ptr %res
909 define void @v8i32_icmp_ugt(ptr %res, ptr %a0, ptr %a1) nounwind {
910 ; CHECK-LABEL: v8i32_icmp_ugt:
912 ; CHECK-NEXT: xvld $xr0, $a1, 0
913 ; CHECK-NEXT: xvld $xr1, $a2, 0
914 ; CHECK-NEXT: xvslt.wu $xr0, $xr1, $xr0
915 ; CHECK-NEXT: xvst $xr0, $a0, 0
917 %v0 = load <8 x i32>, ptr %a0
918 %v1 = load <8 x i32>, ptr %a1
919 %cmp = icmp ugt <8 x i32> %v0, %v1
920 %ext = sext <8 x i1> %cmp to <8 x i32>
921 store <8 x i32> %ext, ptr %res
925 define void @v4i64_icmp_ugt(ptr %res, ptr %a0, ptr %a1) nounwind {
926 ; CHECK-LABEL: v4i64_icmp_ugt:
928 ; CHECK-NEXT: xvld $xr0, $a1, 0
929 ; CHECK-NEXT: xvld $xr1, $a2, 0
930 ; CHECK-NEXT: xvslt.du $xr0, $xr1, $xr0
931 ; CHECK-NEXT: xvst $xr0, $a0, 0
933 %v0 = load <4 x i64>, ptr %a0
934 %v1 = load <4 x i64>, ptr %a1
935 %cmp = icmp ugt <4 x i64> %v0, %v1
936 %ext = sext <4 x i1> %cmp to <4 x i64>
937 store <4 x i64> %ext, ptr %res