1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-linux | FileCheck %s --check-prefixes=X64,SSE
3 ; RUN: llc < %s -mtriple=x86_64-linux -mattr=avx | FileCheck %s --check-prefixes=X64,AVX,AVX1
4 ; RUN: llc < %s -mtriple=x86_64-linux -mattr=avx2 | FileCheck %s --check-prefixes=X64,AVX,AVX2
5 ; RUN: llc < %s -mtriple=i686 -mattr=cmov | FileCheck %s --check-prefix=X86
7 declare i8 @llvm.umin.i8(i8, i8)
8 declare i16 @llvm.umin.i16(i16, i16)
9 declare i24 @llvm.umin.i24(i24, i24)
10 declare i32 @llvm.umin.i32(i32, i32)
11 declare i64 @llvm.umin.i64(i64, i64)
12 declare i128 @llvm.umin.i128(i128, i128)
14 declare <1 x i32> @llvm.umin.v1i32(<1 x i32>, <1 x i32>)
15 declare <2 x i32> @llvm.umin.v2i32(<2 x i32>, <2 x i32>)
16 declare <3 x i32> @llvm.umin.v3i32(<3 x i32>, <3 x i32>)
17 declare <4 x i32> @llvm.umin.v4i32(<4 x i32>, <4 x i32>)
18 declare <8 x i32> @llvm.umin.v8i32(<8 x i32>, <8 x i32>)
20 declare <8 x i16> @llvm.umin.v8i16(<8 x i16>, <8 x i16>)
21 declare <16 x i8> @llvm.umin.v16i8(<16 x i8>, <16 x i8>)
23 define i8 @test_i8(i8 %a, i8 %b) nounwind {
26 ; X64-NEXT: movl %esi, %eax
27 ; X64-NEXT: cmpb %al, %dil
28 ; X64-NEXT: cmovbl %edi, %eax
29 ; X64-NEXT: # kill: def $al killed $al killed $eax
34 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
35 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
36 ; X86-NEXT: cmpb %al, %cl
37 ; X86-NEXT: cmovbl %ecx, %eax
38 ; X86-NEXT: # kill: def $al killed $al killed $eax
40 %r = call i8 @llvm.umin.i8(i8 %a, i8 %b)
44 define i16 @test_i16(i16 %a, i16 %b) nounwind {
45 ; X64-LABEL: test_i16:
47 ; X64-NEXT: movl %esi, %eax
48 ; X64-NEXT: cmpw %ax, %di
49 ; X64-NEXT: cmovbl %edi, %eax
50 ; X64-NEXT: # kill: def $ax killed $ax killed $eax
53 ; X86-LABEL: test_i16:
55 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
56 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
57 ; X86-NEXT: cmpw %ax, %cx
58 ; X86-NEXT: cmovbl %ecx, %eax
59 ; X86-NEXT: # kill: def $ax killed $ax killed $eax
61 %r = call i16 @llvm.umin.i16(i16 %a, i16 %b)
65 define i24 @test_i24(i24 %a, i24 %b) nounwind {
66 ; X64-LABEL: test_i24:
68 ; X64-NEXT: movl %edi, %eax
69 ; X64-NEXT: andl $16777215, %esi # imm = 0xFFFFFF
70 ; X64-NEXT: andl $16777215, %eax # imm = 0xFFFFFF
71 ; X64-NEXT: cmpl %esi, %eax
72 ; X64-NEXT: cmovael %esi, %eax
75 ; X86-LABEL: test_i24:
77 ; X86-NEXT: movl $16777215, %eax # imm = 0xFFFFFF
78 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
79 ; X86-NEXT: andl %eax, %ecx
80 ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax
81 ; X86-NEXT: cmpl %ecx, %eax
82 ; X86-NEXT: cmovael %ecx, %eax
84 %r = call i24 @llvm.umin.i24(i24 %a, i24 %b)
88 define i32 @test_i32(i32 %a, i32 %b) nounwind {
89 ; X64-LABEL: test_i32:
91 ; X64-NEXT: movl %esi, %eax
92 ; X64-NEXT: cmpl %esi, %edi
93 ; X64-NEXT: cmovbl %edi, %eax
96 ; X86-LABEL: test_i32:
98 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
99 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
100 ; X86-NEXT: cmpl %eax, %ecx
101 ; X86-NEXT: cmovbl %ecx, %eax
103 %r = call i32 @llvm.umin.i32(i32 %a, i32 %b)
107 define i64 @test_i64(i64 %a, i64 %b) nounwind {
108 ; X64-LABEL: test_i64:
110 ; X64-NEXT: movq %rsi, %rax
111 ; X64-NEXT: cmpq %rsi, %rdi
112 ; X64-NEXT: cmovbq %rdi, %rax
115 ; X86-LABEL: test_i64:
117 ; X86-NEXT: pushl %edi
118 ; X86-NEXT: pushl %esi
119 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
120 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
121 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
122 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
123 ; X86-NEXT: cmpl %eax, %ecx
124 ; X86-NEXT: movl %esi, %edi
125 ; X86-NEXT: sbbl %edx, %edi
126 ; X86-NEXT: cmovbl %ecx, %eax
127 ; X86-NEXT: cmovbl %esi, %edx
128 ; X86-NEXT: popl %esi
129 ; X86-NEXT: popl %edi
131 %r = call i64 @llvm.umin.i64(i64 %a, i64 %b)
135 define i128 @test_i128(i128 %a, i128 %b) nounwind {
136 ; X64-LABEL: test_i128:
138 ; X64-NEXT: movq %rdx, %rax
139 ; X64-NEXT: cmpq %rdx, %rdi
140 ; X64-NEXT: movq %rsi, %rdx
141 ; X64-NEXT: sbbq %rcx, %rdx
142 ; X64-NEXT: cmovbq %rdi, %rax
143 ; X64-NEXT: cmovbq %rsi, %rcx
144 ; X64-NEXT: movq %rcx, %rdx
147 ; X86-LABEL: test_i128:
149 ; X86-NEXT: pushl %ebp
150 ; X86-NEXT: pushl %ebx
151 ; X86-NEXT: pushl %edi
152 ; X86-NEXT: pushl %esi
153 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
154 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
155 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
156 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
157 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
158 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebp
159 ; X86-NEXT: cmpl %edx, %ebx
160 ; X86-NEXT: sbbl %esi, %ebp
161 ; X86-NEXT: movl %eax, %ebp
162 ; X86-NEXT: sbbl %ecx, %ebp
163 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebp
164 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
165 ; X86-NEXT: movl %edi, %eax
166 ; X86-NEXT: sbbl %ebp, %eax
167 ; X86-NEXT: cmovbl %ebx, %edx
168 ; X86-NEXT: cmovbl {{[0-9]+}}(%esp), %esi
169 ; X86-NEXT: cmovbl {{[0-9]+}}(%esp), %ecx
170 ; X86-NEXT: cmovbl %edi, %ebp
171 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
172 ; X86-NEXT: movl %ebp, 12(%eax)
173 ; X86-NEXT: movl %ecx, 8(%eax)
174 ; X86-NEXT: movl %esi, 4(%eax)
175 ; X86-NEXT: movl %edx, (%eax)
176 ; X86-NEXT: popl %esi
177 ; X86-NEXT: popl %edi
178 ; X86-NEXT: popl %ebx
179 ; X86-NEXT: popl %ebp
181 %r = call i128 @llvm.umin.i128(i128 %a, i128 %b)
185 define <1 x i32> @test_v1i32(<1 x i32> %a, <1 x i32> %b) nounwind {
186 ; X64-LABEL: test_v1i32:
188 ; X64-NEXT: movl %esi, %eax
189 ; X64-NEXT: cmpl %esi, %edi
190 ; X64-NEXT: cmovbl %edi, %eax
193 ; X86-LABEL: test_v1i32:
195 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
196 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
197 ; X86-NEXT: cmpl %eax, %ecx
198 ; X86-NEXT: cmovbl %ecx, %eax
200 %r = call <1 x i32> @llvm.umin.v1i32(<1 x i32> %a, <1 x i32> %b)
204 define <2 x i32> @test_v2i32(<2 x i32> %a, <2 x i32> %b) nounwind {
205 ; SSE-LABEL: test_v2i32:
207 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
208 ; SSE-NEXT: movdqa %xmm0, %xmm3
209 ; SSE-NEXT: pxor %xmm2, %xmm3
210 ; SSE-NEXT: pxor %xmm1, %xmm2
211 ; SSE-NEXT: pcmpgtd %xmm3, %xmm2
212 ; SSE-NEXT: pand %xmm2, %xmm0
213 ; SSE-NEXT: pandn %xmm1, %xmm2
214 ; SSE-NEXT: por %xmm2, %xmm0
217 ; AVX-LABEL: test_v2i32:
219 ; AVX-NEXT: vpminud %xmm1, %xmm0, %xmm0
222 ; X86-LABEL: test_v2i32:
224 ; X86-NEXT: pushl %esi
225 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
226 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
227 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
228 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
229 ; X86-NEXT: cmpl %eax, %esi
230 ; X86-NEXT: cmovbl %esi, %eax
231 ; X86-NEXT: cmpl %edx, %ecx
232 ; X86-NEXT: cmovbl %ecx, %edx
233 ; X86-NEXT: popl %esi
235 %r = call <2 x i32> @llvm.umin.v2i32(<2 x i32> %a, <2 x i32> %b)
239 define <3 x i32> @test_v3i32(<3 x i32> %a, <3 x i32> %b) nounwind {
240 ; SSE-LABEL: test_v3i32:
242 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
243 ; SSE-NEXT: movdqa %xmm0, %xmm3
244 ; SSE-NEXT: pxor %xmm2, %xmm3
245 ; SSE-NEXT: pxor %xmm1, %xmm2
246 ; SSE-NEXT: pcmpgtd %xmm3, %xmm2
247 ; SSE-NEXT: pand %xmm2, %xmm0
248 ; SSE-NEXT: pandn %xmm1, %xmm2
249 ; SSE-NEXT: por %xmm2, %xmm0
252 ; AVX-LABEL: test_v3i32:
254 ; AVX-NEXT: vpminud %xmm1, %xmm0, %xmm0
257 ; X86-LABEL: test_v3i32:
259 ; X86-NEXT: pushl %ebx
260 ; X86-NEXT: pushl %edi
261 ; X86-NEXT: pushl %esi
262 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
263 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
264 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
265 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
266 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
267 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
268 ; X86-NEXT: cmpl %eax, %ebx
269 ; X86-NEXT: cmovbl %ebx, %eax
270 ; X86-NEXT: cmpl %edx, %edi
271 ; X86-NEXT: cmovbl %edi, %edx
272 ; X86-NEXT: cmpl %ecx, %esi
273 ; X86-NEXT: cmovbl %esi, %ecx
274 ; X86-NEXT: popl %esi
275 ; X86-NEXT: popl %edi
276 ; X86-NEXT: popl %ebx
278 %r = call <3 x i32> @llvm.umin.v3i32(<3 x i32> %a, <3 x i32> %b)
282 define <4 x i32> @test_v4i32(<4 x i32> %a, <4 x i32> %b) nounwind {
283 ; SSE-LABEL: test_v4i32:
285 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
286 ; SSE-NEXT: movdqa %xmm0, %xmm3
287 ; SSE-NEXT: pxor %xmm2, %xmm3
288 ; SSE-NEXT: pxor %xmm1, %xmm2
289 ; SSE-NEXT: pcmpgtd %xmm3, %xmm2
290 ; SSE-NEXT: pand %xmm2, %xmm0
291 ; SSE-NEXT: pandn %xmm1, %xmm2
292 ; SSE-NEXT: por %xmm2, %xmm0
295 ; AVX-LABEL: test_v4i32:
297 ; AVX-NEXT: vpminud %xmm1, %xmm0, %xmm0
300 ; X86-LABEL: test_v4i32:
302 ; X86-NEXT: pushl %edi
303 ; X86-NEXT: pushl %esi
304 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
305 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
306 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
307 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
308 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
309 ; X86-NEXT: cmpl %edi, %eax
310 ; X86-NEXT: cmovbl %eax, %edi
311 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
312 ; X86-NEXT: cmpl %esi, %eax
313 ; X86-NEXT: cmovbl %eax, %esi
314 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
315 ; X86-NEXT: cmpl %edx, %eax
316 ; X86-NEXT: cmovbl %eax, %edx
317 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
318 ; X86-NEXT: cmpl %ecx, %eax
319 ; X86-NEXT: cmovbl %eax, %ecx
320 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
321 ; X86-NEXT: movl %ecx, 12(%eax)
322 ; X86-NEXT: movl %edx, 8(%eax)
323 ; X86-NEXT: movl %esi, 4(%eax)
324 ; X86-NEXT: movl %edi, (%eax)
325 ; X86-NEXT: popl %esi
326 ; X86-NEXT: popl %edi
328 %r = call <4 x i32> @llvm.umin.v4i32(<4 x i32> %a, <4 x i32> %b)
332 define <8 x i32> @test_v8i32(<8 x i32> %a, <8 x i32> %b) nounwind {
333 ; SSE-LABEL: test_v8i32:
335 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648,2147483648,2147483648]
336 ; SSE-NEXT: movdqa %xmm0, %xmm5
337 ; SSE-NEXT: pxor %xmm4, %xmm5
338 ; SSE-NEXT: movdqa %xmm2, %xmm6
339 ; SSE-NEXT: pxor %xmm4, %xmm6
340 ; SSE-NEXT: pcmpgtd %xmm5, %xmm6
341 ; SSE-NEXT: pand %xmm6, %xmm0
342 ; SSE-NEXT: pandn %xmm2, %xmm6
343 ; SSE-NEXT: por %xmm6, %xmm0
344 ; SSE-NEXT: movdqa %xmm1, %xmm2
345 ; SSE-NEXT: pxor %xmm4, %xmm2
346 ; SSE-NEXT: pxor %xmm3, %xmm4
347 ; SSE-NEXT: pcmpgtd %xmm2, %xmm4
348 ; SSE-NEXT: pand %xmm4, %xmm1
349 ; SSE-NEXT: pandn %xmm3, %xmm4
350 ; SSE-NEXT: por %xmm4, %xmm1
353 ; AVX1-LABEL: test_v8i32:
355 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm2
356 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
357 ; AVX1-NEXT: vpminud %xmm2, %xmm3, %xmm2
358 ; AVX1-NEXT: vpminud %xmm1, %xmm0, %xmm0
359 ; AVX1-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0
362 ; AVX2-LABEL: test_v8i32:
364 ; AVX2-NEXT: vpminud %ymm1, %ymm0, %ymm0
367 ; X86-LABEL: test_v8i32:
369 ; X86-NEXT: pushl %ebp
370 ; X86-NEXT: pushl %ebx
371 ; X86-NEXT: pushl %edi
372 ; X86-NEXT: pushl %esi
373 ; X86-NEXT: subl $8, %esp
374 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
375 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
376 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
377 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
378 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
379 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebp
380 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
381 ; X86-NEXT: cmpl %ebp, %eax
382 ; X86-NEXT: cmovbl %eax, %ebp
383 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
384 ; X86-NEXT: cmpl %ebx, %eax
385 ; X86-NEXT: cmovbl %eax, %ebx
386 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
387 ; X86-NEXT: cmpl %edi, %eax
388 ; X86-NEXT: cmovbl %eax, %edi
389 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
390 ; X86-NEXT: cmpl %esi, %eax
391 ; X86-NEXT: cmovbl %eax, %esi
392 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
393 ; X86-NEXT: cmpl %edx, %eax
394 ; X86-NEXT: cmovbl %eax, %edx
395 ; X86-NEXT: movl %edx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
396 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
397 ; X86-NEXT: cmpl %ecx, %eax
398 ; X86-NEXT: cmovbl %eax, %ecx
399 ; X86-NEXT: movl %ecx, (%esp) # 4-byte Spill
400 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
401 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
402 ; X86-NEXT: cmpl %edx, %eax
403 ; X86-NEXT: cmovbl %eax, %edx
404 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
405 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
406 ; X86-NEXT: cmpl %eax, %ecx
407 ; X86-NEXT: cmovbl %ecx, %eax
408 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
409 ; X86-NEXT: movl %eax, 28(%ecx)
410 ; X86-NEXT: movl %edx, 24(%ecx)
411 ; X86-NEXT: movl (%esp), %eax # 4-byte Reload
412 ; X86-NEXT: movl %eax, 20(%ecx)
413 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload
414 ; X86-NEXT: movl %eax, 16(%ecx)
415 ; X86-NEXT: movl %esi, 12(%ecx)
416 ; X86-NEXT: movl %edi, 8(%ecx)
417 ; X86-NEXT: movl %ebx, 4(%ecx)
418 ; X86-NEXT: movl %ebp, (%ecx)
419 ; X86-NEXT: movl %ecx, %eax
420 ; X86-NEXT: addl $8, %esp
421 ; X86-NEXT: popl %esi
422 ; X86-NEXT: popl %edi
423 ; X86-NEXT: popl %ebx
424 ; X86-NEXT: popl %ebp
426 %r = call <8 x i32> @llvm.umin.v8i32(<8 x i32> %a, <8 x i32> %b)
430 define <8 x i16> @test_v8i16(<8 x i16> %a, <8 x i16> %b) nounwind {
431 ; SSE-LABEL: test_v8i16:
433 ; SSE-NEXT: movdqa %xmm0, %xmm2
434 ; SSE-NEXT: psubusw %xmm1, %xmm2
435 ; SSE-NEXT: psubw %xmm2, %xmm0
438 ; AVX-LABEL: test_v8i16:
440 ; AVX-NEXT: vpminuw %xmm1, %xmm0, %xmm0
443 ; X86-LABEL: test_v8i16:
445 ; X86-NEXT: pushl %ebp
446 ; X86-NEXT: pushl %ebx
447 ; X86-NEXT: pushl %edi
448 ; X86-NEXT: pushl %esi
449 ; X86-NEXT: subl $8, %esp
450 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
451 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
452 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
453 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
454 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
455 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebp
456 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
457 ; X86-NEXT: cmpw %bp, %ax
458 ; X86-NEXT: cmovbl %eax, %ebp
459 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
460 ; X86-NEXT: cmpw %bx, %ax
461 ; X86-NEXT: cmovbl %eax, %ebx
462 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
463 ; X86-NEXT: cmpw %di, %ax
464 ; X86-NEXT: cmovbl %eax, %edi
465 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
466 ; X86-NEXT: cmpw %si, %ax
467 ; X86-NEXT: cmovbl %eax, %esi
468 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
469 ; X86-NEXT: cmpw %dx, %ax
470 ; X86-NEXT: cmovbl %eax, %edx
471 ; X86-NEXT: movl %edx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
472 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
473 ; X86-NEXT: cmpw %cx, %ax
474 ; X86-NEXT: cmovbl %eax, %ecx
475 ; X86-NEXT: movl %ecx, (%esp) # 4-byte Spill
476 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
477 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
478 ; X86-NEXT: cmpw %dx, %ax
479 ; X86-NEXT: cmovbl %eax, %edx
480 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
481 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
482 ; X86-NEXT: cmpw %ax, %cx
483 ; X86-NEXT: cmovbl %ecx, %eax
484 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
485 ; X86-NEXT: movw %ax, 14(%ecx)
486 ; X86-NEXT: movw %dx, 12(%ecx)
487 ; X86-NEXT: movl (%esp), %eax # 4-byte Reload
488 ; X86-NEXT: movw %ax, 10(%ecx)
489 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload
490 ; X86-NEXT: movw %ax, 8(%ecx)
491 ; X86-NEXT: movw %si, 6(%ecx)
492 ; X86-NEXT: movw %di, 4(%ecx)
493 ; X86-NEXT: movw %bx, 2(%ecx)
494 ; X86-NEXT: movw %bp, (%ecx)
495 ; X86-NEXT: movl %ecx, %eax
496 ; X86-NEXT: addl $8, %esp
497 ; X86-NEXT: popl %esi
498 ; X86-NEXT: popl %edi
499 ; X86-NEXT: popl %ebx
500 ; X86-NEXT: popl %ebp
502 %r = call <8 x i16> @llvm.umin.v8i16(<8 x i16> %a, <8 x i16> %b)
506 define <16 x i8> @test_v16i8(<16 x i8> %a, <16 x i8> %b) nounwind {
507 ; SSE-LABEL: test_v16i8:
509 ; SSE-NEXT: pminub %xmm1, %xmm0
512 ; AVX-LABEL: test_v16i8:
514 ; AVX-NEXT: vpminub %xmm1, %xmm0, %xmm0
517 ; X86-LABEL: test_v16i8:
519 ; X86-NEXT: pushl %ebp
520 ; X86-NEXT: pushl %ebx
521 ; X86-NEXT: pushl %edi
522 ; X86-NEXT: pushl %esi
523 ; X86-NEXT: subl $40, %esp
524 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
525 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
526 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
527 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
528 ; X86-NEXT: cmpb %bl, %al
529 ; X86-NEXT: cmovbl %eax, %ebx
530 ; X86-NEXT: movl %ebx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
531 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
532 ; X86-NEXT: cmpb %dl, %al
533 ; X86-NEXT: cmovbl %eax, %edx
534 ; X86-NEXT: movl %edx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
535 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
536 ; X86-NEXT: cmpb %cl, %al
537 ; X86-NEXT: cmovbl %eax, %ecx
538 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
539 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
540 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
541 ; X86-NEXT: cmpb %cl, %al
542 ; X86-NEXT: cmovbl %eax, %ecx
543 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
544 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
545 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
546 ; X86-NEXT: cmpb %cl, %al
547 ; X86-NEXT: cmovbl %eax, %ecx
548 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
549 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
550 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
551 ; X86-NEXT: cmpb %cl, %al
552 ; X86-NEXT: cmovbl %eax, %ecx
553 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
554 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
555 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
556 ; X86-NEXT: cmpb %cl, %al
557 ; X86-NEXT: cmovbl %eax, %ecx
558 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
559 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
560 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
561 ; X86-NEXT: cmpb %cl, %al
562 ; X86-NEXT: cmovbl %eax, %ecx
563 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
564 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
565 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
566 ; X86-NEXT: cmpb %cl, %al
567 ; X86-NEXT: cmovbl %eax, %ecx
568 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
569 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
570 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
571 ; X86-NEXT: cmpb %cl, %al
572 ; X86-NEXT: cmovbl %eax, %ecx
573 ; X86-NEXT: movl %ecx, (%esp) # 4-byte Spill
574 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
575 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
576 ; X86-NEXT: cmpb %cl, %al
577 ; X86-NEXT: cmovbl %eax, %ecx
578 ; X86-NEXT: movl %ecx, %ebp
579 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
580 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
581 ; X86-NEXT: cmpb %cl, %al
582 ; X86-NEXT: cmovbl %eax, %ecx
583 ; X86-NEXT: movl %ecx, %edi
584 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
585 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
586 ; X86-NEXT: cmpb %cl, %al
587 ; X86-NEXT: cmovbl %eax, %ecx
588 ; X86-NEXT: movl %ecx, %esi
589 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
590 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
591 ; X86-NEXT: cmpb %bl, %al
592 ; X86-NEXT: cmovbl %eax, %ebx
593 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
594 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
595 ; X86-NEXT: cmpb %dl, %al
596 ; X86-NEXT: cmovbl %eax, %edx
597 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
598 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
599 ; X86-NEXT: cmpb %cl, %al
600 ; X86-NEXT: cmovbl %eax, %ecx
601 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
602 ; X86-NEXT: movb %cl, 15(%eax)
603 ; X86-NEXT: movb %dl, 14(%eax)
604 ; X86-NEXT: movb %bl, 13(%eax)
605 ; X86-NEXT: movl %esi, %ecx
606 ; X86-NEXT: movb %cl, 12(%eax)
607 ; X86-NEXT: movl %edi, %ecx
608 ; X86-NEXT: movb %cl, 11(%eax)
609 ; X86-NEXT: movl %ebp, %ecx
610 ; X86-NEXT: movb %cl, 10(%eax)
611 ; X86-NEXT: movl (%esp), %ecx # 4-byte Reload
612 ; X86-NEXT: movb %cl, 9(%eax)
613 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
614 ; X86-NEXT: movb %cl, 8(%eax)
615 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
616 ; X86-NEXT: movb %cl, 7(%eax)
617 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
618 ; X86-NEXT: movb %cl, 6(%eax)
619 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
620 ; X86-NEXT: movb %cl, 5(%eax)
621 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
622 ; X86-NEXT: movb %cl, 4(%eax)
623 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
624 ; X86-NEXT: movb %cl, 3(%eax)
625 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
626 ; X86-NEXT: movb %cl, 2(%eax)
627 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
628 ; X86-NEXT: movb %cl, 1(%eax)
629 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
630 ; X86-NEXT: movb %cl, (%eax)
631 ; X86-NEXT: addl $40, %esp
632 ; X86-NEXT: popl %esi
633 ; X86-NEXT: popl %edi
634 ; X86-NEXT: popl %ebx
635 ; X86-NEXT: popl %ebp
637 %r = call <16 x i8> @llvm.umin.v16i8(<16 x i8> %a, <16 x i8> %b)
641 define i16 @test_signbits_i16(i16 %a, i16 %b) nounwind {
642 ; X64-LABEL: test_signbits_i16:
644 ; X64-NEXT: movswl %si, %eax
645 ; X64-NEXT: movswl %di, %ecx
646 ; X64-NEXT: shrl $15, %ecx
647 ; X64-NEXT: shrl $8, %eax
648 ; X64-NEXT: cmpw %ax, %cx
649 ; X64-NEXT: cmovbl %ecx, %eax
650 ; X64-NEXT: # kill: def $ax killed $ax killed $eax
653 ; X86-LABEL: test_signbits_i16:
655 ; X86-NEXT: movsbl {{[0-9]+}}(%esp), %ecx
656 ; X86-NEXT: movswl {{[0-9]+}}(%esp), %eax
657 ; X86-NEXT: shrl $15, %eax
658 ; X86-NEXT: cmpw %cx, %ax
659 ; X86-NEXT: cmovael %ecx, %eax
660 ; X86-NEXT: # kill: def $ax killed $ax killed $eax
662 %ax = ashr i16 %a, 15
664 %r = call i16 @llvm.umin.i16(i16 %ax, i16 %bx)
668 define i32 @test_signbits_i32(i32 %a, i32 %b) nounwind {
669 ; X64-LABEL: test_signbits_i32:
671 ; X64-NEXT: movl %esi, %eax
672 ; X64-NEXT: sarl $16, %edi
673 ; X64-NEXT: sarl $17, %eax
674 ; X64-NEXT: cmpl %eax, %edi
675 ; X64-NEXT: cmovbl %edi, %eax
678 ; X86-LABEL: test_signbits_i32:
680 ; X86-NEXT: movswl {{[0-9]+}}(%esp), %ecx
681 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
682 ; X86-NEXT: sarl $17, %eax
683 ; X86-NEXT: cmpl %eax, %ecx
684 ; X86-NEXT: cmovbl %ecx, %eax
686 %ax = ashr i32 %a, 16
687 %bx = ashr i32 %b, 17
688 %r = call i32 @llvm.umin.i32(i32 %ax, i32 %bx)
692 define i64 @test_signbits_i64(i64 %a, i64 %b) nounwind {
693 ; X64-LABEL: test_signbits_i64:
695 ; X64-NEXT: movq %rsi, %rax
696 ; X64-NEXT: sarq $32, %rdi
697 ; X64-NEXT: sarq $32, %rax
698 ; X64-NEXT: cmpq %rax, %rdi
699 ; X64-NEXT: cmovbq %rdi, %rax
702 ; X86-LABEL: test_signbits_i64:
704 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
705 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
706 ; X86-NEXT: cmpl %eax, %ecx
707 ; X86-NEXT: cmovbl %ecx, %eax
708 ; X86-NEXT: movl %eax, %edx
709 ; X86-NEXT: sarl $31, %edx
711 %ax = ashr i64 %a, 32
712 %bx = ashr i64 %b, 32
713 %r = call i64 @llvm.umin.i64(i64 %ax, i64 %bx)
717 define i128 @test_signbits_i128(i128 %a, i128 %b) nounwind {
718 ; X64-LABEL: test_signbits_i128:
720 ; X64-NEXT: movq %rcx, %rax
721 ; X64-NEXT: sarq $28, %rax
722 ; X64-NEXT: cmpq %rax, %rsi
723 ; X64-NEXT: cmovbq %rsi, %rax
724 ; X64-NEXT: movq %rax, %rdx
725 ; X64-NEXT: sarq $63, %rdx
728 ; X86-LABEL: test_signbits_i128:
730 ; X86-NEXT: pushl %ebx
731 ; X86-NEXT: pushl %edi
732 ; X86-NEXT: pushl %esi
733 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
734 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
735 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
736 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
737 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
738 ; X86-NEXT: shrdl $28, %edi, %ecx
739 ; X86-NEXT: sarl $28, %edi
740 ; X86-NEXT: cmpl %ecx, %esi
741 ; X86-NEXT: movl %edx, %ebx
742 ; X86-NEXT: sbbl %edi, %ebx
743 ; X86-NEXT: cmovbl %esi, %ecx
744 ; X86-NEXT: cmovbl %edx, %edi
745 ; X86-NEXT: movl %edi, 4(%eax)
746 ; X86-NEXT: sarl $31, %edi
747 ; X86-NEXT: movl %ecx, (%eax)
748 ; X86-NEXT: movl %edi, 12(%eax)
749 ; X86-NEXT: movl %edi, 8(%eax)
750 ; X86-NEXT: popl %esi
751 ; X86-NEXT: popl %edi
752 ; X86-NEXT: popl %ebx
754 %ax = ashr i128 %a, 64
755 %bx = ashr i128 %b, 92
756 %r = call i128 @llvm.umin.i128(i128 %ax, i128 %bx)