1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-linux | FileCheck %s --check-prefixes=X64,SSE
3 ; RUN: llc < %s -mtriple=x86_64-linux -mattr=avx | FileCheck %s --check-prefixes=X64,AVX,AVX1
4 ; RUN: llc < %s -mtriple=x86_64-linux -mattr=avx2 | FileCheck %s --check-prefixes=X64,AVX,AVX2
5 ; RUN: llc < %s -mtriple=i686 -mattr=cmov | FileCheck %s --check-prefix=X86
7 declare i8 @llvm.smin.i8(i8, i8)
8 declare i16 @llvm.smin.i16(i16, i16)
9 declare i24 @llvm.smin.i24(i24, i24)
10 declare i32 @llvm.smin.i32(i32, i32)
11 declare i64 @llvm.smin.i64(i64, i64)
12 declare i128 @llvm.smin.i128(i128, i128)
14 declare <1 x i32> @llvm.smin.v1i32(<1 x i32>, <1 x i32>)
15 declare <2 x i32> @llvm.smin.v2i32(<2 x i32>, <2 x i32>)
16 declare <3 x i32> @llvm.smin.v3i32(<3 x i32>, <3 x i32>)
17 declare <4 x i32> @llvm.smin.v4i32(<4 x i32>, <4 x i32>)
18 declare <8 x i32> @llvm.smin.v8i32(<8 x i32>, <8 x i32>)
20 declare <8 x i16> @llvm.smin.v8i16(<8 x i16>, <8 x i16>)
21 declare <16 x i8> @llvm.smin.v16i8(<16 x i8>, <16 x i8>)
23 define i8 @test_i8(i8 %a, i8 %b) nounwind {
26 ; X64-NEXT: movl %esi, %eax
27 ; X64-NEXT: cmpb %al, %dil
28 ; X64-NEXT: cmovll %edi, %eax
29 ; X64-NEXT: # kill: def $al killed $al killed $eax
34 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
35 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
36 ; X86-NEXT: cmpb %al, %cl
37 ; X86-NEXT: cmovll %ecx, %eax
38 ; X86-NEXT: # kill: def $al killed $al killed $eax
40 %r = call i8 @llvm.smin.i8(i8 %a, i8 %b)
44 define i16 @test_i16(i16 %a, i16 %b) nounwind {
45 ; X64-LABEL: test_i16:
47 ; X64-NEXT: movl %esi, %eax
48 ; X64-NEXT: cmpw %ax, %di
49 ; X64-NEXT: cmovll %edi, %eax
50 ; X64-NEXT: # kill: def $ax killed $ax killed $eax
53 ; X86-LABEL: test_i16:
55 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
56 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
57 ; X86-NEXT: cmpw %ax, %cx
58 ; X86-NEXT: cmovll %ecx, %eax
59 ; X86-NEXT: # kill: def $ax killed $ax killed $eax
61 %r = call i16 @llvm.smin.i16(i16 %a, i16 %b)
65 define i24 @test_i24(i24 %a, i24 %b) nounwind {
66 ; X64-LABEL: test_i24:
68 ; X64-NEXT: movl %edi, %eax
69 ; X64-NEXT: shll $8, %esi
70 ; X64-NEXT: sarl $8, %esi
71 ; X64-NEXT: shll $8, %eax
72 ; X64-NEXT: sarl $8, %eax
73 ; X64-NEXT: cmpl %esi, %eax
74 ; X64-NEXT: cmovgel %esi, %eax
77 ; X86-LABEL: test_i24:
79 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
80 ; X86-NEXT: shll $8, %ecx
81 ; X86-NEXT: sarl $8, %ecx
82 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
83 ; X86-NEXT: shll $8, %eax
84 ; X86-NEXT: sarl $8, %eax
85 ; X86-NEXT: cmpl %ecx, %eax
86 ; X86-NEXT: cmovgel %ecx, %eax
88 %r = call i24 @llvm.smin.i24(i24 %a, i24 %b)
92 define i32 @test_i32(i32 %a, i32 %b) nounwind {
93 ; X64-LABEL: test_i32:
95 ; X64-NEXT: movl %esi, %eax
96 ; X64-NEXT: cmpl %esi, %edi
97 ; X64-NEXT: cmovll %edi, %eax
100 ; X86-LABEL: test_i32:
102 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
103 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
104 ; X86-NEXT: cmpl %eax, %ecx
105 ; X86-NEXT: cmovll %ecx, %eax
107 %r = call i32 @llvm.smin.i32(i32 %a, i32 %b)
111 define i64 @test_i64(i64 %a, i64 %b) nounwind {
112 ; X64-LABEL: test_i64:
114 ; X64-NEXT: movq %rsi, %rax
115 ; X64-NEXT: cmpq %rsi, %rdi
116 ; X64-NEXT: cmovlq %rdi, %rax
119 ; X86-LABEL: test_i64:
121 ; X86-NEXT: pushl %edi
122 ; X86-NEXT: pushl %esi
123 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
124 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
125 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
126 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
127 ; X86-NEXT: cmpl %eax, %ecx
128 ; X86-NEXT: movl %esi, %edi
129 ; X86-NEXT: sbbl %edx, %edi
130 ; X86-NEXT: cmovll %ecx, %eax
131 ; X86-NEXT: cmovll %esi, %edx
132 ; X86-NEXT: popl %esi
133 ; X86-NEXT: popl %edi
135 %r = call i64 @llvm.smin.i64(i64 %a, i64 %b)
139 define i128 @test_i128(i128 %a, i128 %b) nounwind {
140 ; X64-LABEL: test_i128:
142 ; X64-NEXT: movq %rdx, %rax
143 ; X64-NEXT: cmpq %rdx, %rdi
144 ; X64-NEXT: movq %rsi, %rdx
145 ; X64-NEXT: sbbq %rcx, %rdx
146 ; X64-NEXT: cmovlq %rdi, %rax
147 ; X64-NEXT: cmovlq %rsi, %rcx
148 ; X64-NEXT: movq %rcx, %rdx
151 ; X86-LABEL: test_i128:
153 ; X86-NEXT: pushl %ebp
154 ; X86-NEXT: pushl %ebx
155 ; X86-NEXT: pushl %edi
156 ; X86-NEXT: pushl %esi
157 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
158 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
159 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
160 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
161 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
162 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebp
163 ; X86-NEXT: cmpl %edx, %ebx
164 ; X86-NEXT: sbbl %esi, %ebp
165 ; X86-NEXT: movl %eax, %ebp
166 ; X86-NEXT: sbbl %ecx, %ebp
167 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebp
168 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
169 ; X86-NEXT: movl %edi, %eax
170 ; X86-NEXT: sbbl %ebp, %eax
171 ; X86-NEXT: cmovll %ebx, %edx
172 ; X86-NEXT: cmovll {{[0-9]+}}(%esp), %esi
173 ; X86-NEXT: cmovll {{[0-9]+}}(%esp), %ecx
174 ; X86-NEXT: cmovll %edi, %ebp
175 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
176 ; X86-NEXT: movl %ebp, 12(%eax)
177 ; X86-NEXT: movl %ecx, 8(%eax)
178 ; X86-NEXT: movl %esi, 4(%eax)
179 ; X86-NEXT: movl %edx, (%eax)
180 ; X86-NEXT: popl %esi
181 ; X86-NEXT: popl %edi
182 ; X86-NEXT: popl %ebx
183 ; X86-NEXT: popl %ebp
185 %r = call i128 @llvm.smin.i128(i128 %a, i128 %b)
189 define <1 x i32> @test_v1i32(<1 x i32> %a, <1 x i32> %b) nounwind {
190 ; X64-LABEL: test_v1i32:
192 ; X64-NEXT: movl %esi, %eax
193 ; X64-NEXT: cmpl %esi, %edi
194 ; X64-NEXT: cmovll %edi, %eax
197 ; X86-LABEL: test_v1i32:
199 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
200 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
201 ; X86-NEXT: cmpl %eax, %ecx
202 ; X86-NEXT: cmovll %ecx, %eax
204 %r = call <1 x i32> @llvm.smin.v1i32(<1 x i32> %a, <1 x i32> %b)
208 define <2 x i32> @test_v2i32(<2 x i32> %a, <2 x i32> %b) nounwind {
209 ; SSE-LABEL: test_v2i32:
211 ; SSE-NEXT: movdqa %xmm1, %xmm2
212 ; SSE-NEXT: pcmpgtd %xmm0, %xmm2
213 ; SSE-NEXT: pand %xmm2, %xmm0
214 ; SSE-NEXT: pandn %xmm1, %xmm2
215 ; SSE-NEXT: por %xmm2, %xmm0
218 ; AVX-LABEL: test_v2i32:
220 ; AVX-NEXT: vpminsd %xmm1, %xmm0, %xmm0
223 ; X86-LABEL: test_v2i32:
225 ; X86-NEXT: pushl %esi
226 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
227 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
228 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
229 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
230 ; X86-NEXT: cmpl %eax, %esi
231 ; X86-NEXT: cmovll %esi, %eax
232 ; X86-NEXT: cmpl %edx, %ecx
233 ; X86-NEXT: cmovll %ecx, %edx
234 ; X86-NEXT: popl %esi
236 %r = call <2 x i32> @llvm.smin.v2i32(<2 x i32> %a, <2 x i32> %b)
240 define <3 x i32> @test_v3i32(<3 x i32> %a, <3 x i32> %b) nounwind {
241 ; SSE-LABEL: test_v3i32:
243 ; SSE-NEXT: movdqa %xmm1, %xmm2
244 ; SSE-NEXT: pcmpgtd %xmm0, %xmm2
245 ; SSE-NEXT: pand %xmm2, %xmm0
246 ; SSE-NEXT: pandn %xmm1, %xmm2
247 ; SSE-NEXT: por %xmm2, %xmm0
250 ; AVX-LABEL: test_v3i32:
252 ; AVX-NEXT: vpminsd %xmm1, %xmm0, %xmm0
255 ; X86-LABEL: test_v3i32:
257 ; X86-NEXT: pushl %ebx
258 ; X86-NEXT: pushl %edi
259 ; X86-NEXT: pushl %esi
260 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
261 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
262 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
263 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
264 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
265 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
266 ; X86-NEXT: cmpl %eax, %ebx
267 ; X86-NEXT: cmovll %ebx, %eax
268 ; X86-NEXT: cmpl %edx, %edi
269 ; X86-NEXT: cmovll %edi, %edx
270 ; X86-NEXT: cmpl %ecx, %esi
271 ; X86-NEXT: cmovll %esi, %ecx
272 ; X86-NEXT: popl %esi
273 ; X86-NEXT: popl %edi
274 ; X86-NEXT: popl %ebx
276 %r = call <3 x i32> @llvm.smin.v3i32(<3 x i32> %a, <3 x i32> %b)
280 define <4 x i32> @test_v4i32(<4 x i32> %a, <4 x i32> %b) nounwind {
281 ; SSE-LABEL: test_v4i32:
283 ; SSE-NEXT: movdqa %xmm1, %xmm2
284 ; SSE-NEXT: pcmpgtd %xmm0, %xmm2
285 ; SSE-NEXT: pand %xmm2, %xmm0
286 ; SSE-NEXT: pandn %xmm1, %xmm2
287 ; SSE-NEXT: por %xmm2, %xmm0
290 ; AVX-LABEL: test_v4i32:
292 ; AVX-NEXT: vpminsd %xmm1, %xmm0, %xmm0
295 ; X86-LABEL: test_v4i32:
297 ; X86-NEXT: pushl %edi
298 ; X86-NEXT: pushl %esi
299 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
300 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
301 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
302 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
303 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
304 ; X86-NEXT: cmpl %edi, %eax
305 ; X86-NEXT: cmovll %eax, %edi
306 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
307 ; X86-NEXT: cmpl %esi, %eax
308 ; X86-NEXT: cmovll %eax, %esi
309 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
310 ; X86-NEXT: cmpl %edx, %eax
311 ; X86-NEXT: cmovll %eax, %edx
312 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
313 ; X86-NEXT: cmpl %ecx, %eax
314 ; X86-NEXT: cmovll %eax, %ecx
315 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
316 ; X86-NEXT: movl %ecx, 12(%eax)
317 ; X86-NEXT: movl %edx, 8(%eax)
318 ; X86-NEXT: movl %esi, 4(%eax)
319 ; X86-NEXT: movl %edi, (%eax)
320 ; X86-NEXT: popl %esi
321 ; X86-NEXT: popl %edi
323 %r = call <4 x i32> @llvm.smin.v4i32(<4 x i32> %a, <4 x i32> %b)
327 define <8 x i32> @test_v8i32(<8 x i32> %a, <8 x i32> %b) nounwind {
328 ; SSE-LABEL: test_v8i32:
330 ; SSE-NEXT: movdqa %xmm2, %xmm4
331 ; SSE-NEXT: pcmpgtd %xmm0, %xmm4
332 ; SSE-NEXT: pand %xmm4, %xmm0
333 ; SSE-NEXT: pandn %xmm2, %xmm4
334 ; SSE-NEXT: por %xmm4, %xmm0
335 ; SSE-NEXT: movdqa %xmm3, %xmm2
336 ; SSE-NEXT: pcmpgtd %xmm1, %xmm2
337 ; SSE-NEXT: pand %xmm2, %xmm1
338 ; SSE-NEXT: pandn %xmm3, %xmm2
339 ; SSE-NEXT: por %xmm2, %xmm1
342 ; AVX1-LABEL: test_v8i32:
344 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm2
345 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
346 ; AVX1-NEXT: vpminsd %xmm2, %xmm3, %xmm2
347 ; AVX1-NEXT: vpminsd %xmm1, %xmm0, %xmm0
348 ; AVX1-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0
351 ; AVX2-LABEL: test_v8i32:
353 ; AVX2-NEXT: vpminsd %ymm1, %ymm0, %ymm0
356 ; X86-LABEL: test_v8i32:
358 ; X86-NEXT: pushl %ebp
359 ; X86-NEXT: pushl %ebx
360 ; X86-NEXT: pushl %edi
361 ; X86-NEXT: pushl %esi
362 ; X86-NEXT: subl $8, %esp
363 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
364 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
365 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
366 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
367 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
368 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebp
369 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
370 ; X86-NEXT: cmpl %ebp, %eax
371 ; X86-NEXT: cmovll %eax, %ebp
372 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
373 ; X86-NEXT: cmpl %ebx, %eax
374 ; X86-NEXT: cmovll %eax, %ebx
375 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
376 ; X86-NEXT: cmpl %edi, %eax
377 ; X86-NEXT: cmovll %eax, %edi
378 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
379 ; X86-NEXT: cmpl %esi, %eax
380 ; X86-NEXT: cmovll %eax, %esi
381 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
382 ; X86-NEXT: cmpl %edx, %eax
383 ; X86-NEXT: cmovll %eax, %edx
384 ; X86-NEXT: movl %edx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
385 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
386 ; X86-NEXT: cmpl %ecx, %eax
387 ; X86-NEXT: cmovll %eax, %ecx
388 ; X86-NEXT: movl %ecx, (%esp) # 4-byte Spill
389 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
390 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
391 ; X86-NEXT: cmpl %edx, %eax
392 ; X86-NEXT: cmovll %eax, %edx
393 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
394 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
395 ; X86-NEXT: cmpl %eax, %ecx
396 ; X86-NEXT: cmovll %ecx, %eax
397 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
398 ; X86-NEXT: movl %eax, 28(%ecx)
399 ; X86-NEXT: movl %edx, 24(%ecx)
400 ; X86-NEXT: movl (%esp), %eax # 4-byte Reload
401 ; X86-NEXT: movl %eax, 20(%ecx)
402 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload
403 ; X86-NEXT: movl %eax, 16(%ecx)
404 ; X86-NEXT: movl %esi, 12(%ecx)
405 ; X86-NEXT: movl %edi, 8(%ecx)
406 ; X86-NEXT: movl %ebx, 4(%ecx)
407 ; X86-NEXT: movl %ebp, (%ecx)
408 ; X86-NEXT: movl %ecx, %eax
409 ; X86-NEXT: addl $8, %esp
410 ; X86-NEXT: popl %esi
411 ; X86-NEXT: popl %edi
412 ; X86-NEXT: popl %ebx
413 ; X86-NEXT: popl %ebp
415 %r = call <8 x i32> @llvm.smin.v8i32(<8 x i32> %a, <8 x i32> %b)
419 define <8 x i16> @test_v8i16(<8 x i16> %a, <8 x i16> %b) nounwind {
420 ; SSE-LABEL: test_v8i16:
422 ; SSE-NEXT: pminsw %xmm1, %xmm0
425 ; AVX-LABEL: test_v8i16:
427 ; AVX-NEXT: vpminsw %xmm1, %xmm0, %xmm0
430 ; X86-LABEL: test_v8i16:
432 ; X86-NEXT: pushl %ebp
433 ; X86-NEXT: pushl %ebx
434 ; X86-NEXT: pushl %edi
435 ; X86-NEXT: pushl %esi
436 ; X86-NEXT: subl $8, %esp
437 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
438 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
439 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
440 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
441 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
442 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebp
443 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
444 ; X86-NEXT: cmpw %bp, %ax
445 ; X86-NEXT: cmovll %eax, %ebp
446 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
447 ; X86-NEXT: cmpw %bx, %ax
448 ; X86-NEXT: cmovll %eax, %ebx
449 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
450 ; X86-NEXT: cmpw %di, %ax
451 ; X86-NEXT: cmovll %eax, %edi
452 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
453 ; X86-NEXT: cmpw %si, %ax
454 ; X86-NEXT: cmovll %eax, %esi
455 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
456 ; X86-NEXT: cmpw %dx, %ax
457 ; X86-NEXT: cmovll %eax, %edx
458 ; X86-NEXT: movl %edx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
459 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
460 ; X86-NEXT: cmpw %cx, %ax
461 ; X86-NEXT: cmovll %eax, %ecx
462 ; X86-NEXT: movl %ecx, (%esp) # 4-byte Spill
463 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
464 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
465 ; X86-NEXT: cmpw %dx, %ax
466 ; X86-NEXT: cmovll %eax, %edx
467 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
468 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
469 ; X86-NEXT: cmpw %ax, %cx
470 ; X86-NEXT: cmovll %ecx, %eax
471 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
472 ; X86-NEXT: movw %ax, 14(%ecx)
473 ; X86-NEXT: movw %dx, 12(%ecx)
474 ; X86-NEXT: movl (%esp), %eax # 4-byte Reload
475 ; X86-NEXT: movw %ax, 10(%ecx)
476 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload
477 ; X86-NEXT: movw %ax, 8(%ecx)
478 ; X86-NEXT: movw %si, 6(%ecx)
479 ; X86-NEXT: movw %di, 4(%ecx)
480 ; X86-NEXT: movw %bx, 2(%ecx)
481 ; X86-NEXT: movw %bp, (%ecx)
482 ; X86-NEXT: movl %ecx, %eax
483 ; X86-NEXT: addl $8, %esp
484 ; X86-NEXT: popl %esi
485 ; X86-NEXT: popl %edi
486 ; X86-NEXT: popl %ebx
487 ; X86-NEXT: popl %ebp
489 %r = call <8 x i16> @llvm.smin.v8i16(<8 x i16> %a, <8 x i16> %b)
493 define <16 x i8> @test_v16i8(<16 x i8> %a, <16 x i8> %b) nounwind {
494 ; SSE-LABEL: test_v16i8:
496 ; SSE-NEXT: movdqa %xmm1, %xmm2
497 ; SSE-NEXT: pcmpgtb %xmm0, %xmm2
498 ; SSE-NEXT: pand %xmm2, %xmm0
499 ; SSE-NEXT: pandn %xmm1, %xmm2
500 ; SSE-NEXT: por %xmm2, %xmm0
503 ; AVX-LABEL: test_v16i8:
505 ; AVX-NEXT: vpminsb %xmm1, %xmm0, %xmm0
508 ; X86-LABEL: test_v16i8:
510 ; X86-NEXT: pushl %ebp
511 ; X86-NEXT: pushl %ebx
512 ; X86-NEXT: pushl %edi
513 ; X86-NEXT: pushl %esi
514 ; X86-NEXT: subl $40, %esp
515 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
516 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
517 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
518 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
519 ; X86-NEXT: cmpb %bl, %al
520 ; X86-NEXT: cmovll %eax, %ebx
521 ; X86-NEXT: movl %ebx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
522 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
523 ; X86-NEXT: cmpb %dl, %al
524 ; X86-NEXT: cmovll %eax, %edx
525 ; X86-NEXT: movl %edx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
526 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
527 ; X86-NEXT: cmpb %cl, %al
528 ; X86-NEXT: cmovll %eax, %ecx
529 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
530 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
531 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
532 ; X86-NEXT: cmpb %cl, %al
533 ; X86-NEXT: cmovll %eax, %ecx
534 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
535 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
536 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
537 ; X86-NEXT: cmpb %cl, %al
538 ; X86-NEXT: cmovll %eax, %ecx
539 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
540 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
541 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
542 ; X86-NEXT: cmpb %cl, %al
543 ; X86-NEXT: cmovll %eax, %ecx
544 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
545 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
546 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
547 ; X86-NEXT: cmpb %cl, %al
548 ; X86-NEXT: cmovll %eax, %ecx
549 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
550 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
551 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
552 ; X86-NEXT: cmpb %cl, %al
553 ; X86-NEXT: cmovll %eax, %ecx
554 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
555 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
556 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
557 ; X86-NEXT: cmpb %cl, %al
558 ; X86-NEXT: cmovll %eax, %ecx
559 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
560 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
561 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
562 ; X86-NEXT: cmpb %cl, %al
563 ; X86-NEXT: cmovll %eax, %ecx
564 ; X86-NEXT: movl %ecx, (%esp) # 4-byte Spill
565 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
566 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
567 ; X86-NEXT: cmpb %cl, %al
568 ; X86-NEXT: cmovll %eax, %ecx
569 ; X86-NEXT: movl %ecx, %ebp
570 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
571 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
572 ; X86-NEXT: cmpb %cl, %al
573 ; X86-NEXT: cmovll %eax, %ecx
574 ; X86-NEXT: movl %ecx, %edi
575 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
576 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
577 ; X86-NEXT: cmpb %cl, %al
578 ; X86-NEXT: cmovll %eax, %ecx
579 ; X86-NEXT: movl %ecx, %esi
580 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
581 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
582 ; X86-NEXT: cmpb %bl, %al
583 ; X86-NEXT: cmovll %eax, %ebx
584 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
585 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
586 ; X86-NEXT: cmpb %dl, %al
587 ; X86-NEXT: cmovll %eax, %edx
588 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
589 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
590 ; X86-NEXT: cmpb %cl, %al
591 ; X86-NEXT: cmovll %eax, %ecx
592 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
593 ; X86-NEXT: movb %cl, 15(%eax)
594 ; X86-NEXT: movb %dl, 14(%eax)
595 ; X86-NEXT: movb %bl, 13(%eax)
596 ; X86-NEXT: movl %esi, %ecx
597 ; X86-NEXT: movb %cl, 12(%eax)
598 ; X86-NEXT: movl %edi, %ecx
599 ; X86-NEXT: movb %cl, 11(%eax)
600 ; X86-NEXT: movl %ebp, %ecx
601 ; X86-NEXT: movb %cl, 10(%eax)
602 ; X86-NEXT: movl (%esp), %ecx # 4-byte Reload
603 ; X86-NEXT: movb %cl, 9(%eax)
604 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
605 ; X86-NEXT: movb %cl, 8(%eax)
606 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
607 ; X86-NEXT: movb %cl, 7(%eax)
608 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
609 ; X86-NEXT: movb %cl, 6(%eax)
610 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
611 ; X86-NEXT: movb %cl, 5(%eax)
612 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
613 ; X86-NEXT: movb %cl, 4(%eax)
614 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
615 ; X86-NEXT: movb %cl, 3(%eax)
616 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
617 ; X86-NEXT: movb %cl, 2(%eax)
618 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
619 ; X86-NEXT: movb %cl, 1(%eax)
620 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
621 ; X86-NEXT: movb %cl, (%eax)
622 ; X86-NEXT: addl $40, %esp
623 ; X86-NEXT: popl %esi
624 ; X86-NEXT: popl %edi
625 ; X86-NEXT: popl %ebx
626 ; X86-NEXT: popl %ebp
628 %r = call <16 x i8> @llvm.smin.v16i8(<16 x i8> %a, <16 x i8> %b)
632 define i16 @test_signbits_i16(i16 %a, i16 %b) nounwind {
633 ; X64-LABEL: test_signbits_i16:
635 ; X64-NEXT: movswl %si, %eax
636 ; X64-NEXT: movswl %di, %ecx
637 ; X64-NEXT: shrl $15, %ecx
638 ; X64-NEXT: shrl $8, %eax
639 ; X64-NEXT: cmpw %ax, %cx
640 ; X64-NEXT: cmovll %ecx, %eax
641 ; X64-NEXT: # kill: def $ax killed $ax killed $eax
644 ; X86-LABEL: test_signbits_i16:
646 ; X86-NEXT: movsbl {{[0-9]+}}(%esp), %ecx
647 ; X86-NEXT: movswl {{[0-9]+}}(%esp), %eax
648 ; X86-NEXT: shrl $15, %eax
649 ; X86-NEXT: cmpw %cx, %ax
650 ; X86-NEXT: cmovgel %ecx, %eax
651 ; X86-NEXT: # kill: def $ax killed $ax killed $eax
653 %ax = ashr i16 %a, 15
655 %r = call i16 @llvm.smin.i16(i16 %ax, i16 %bx)
659 define i32 @test_signbits_i32(i32 %a, i32 %b) nounwind {
660 ; X64-LABEL: test_signbits_i32:
662 ; X64-NEXT: movl %esi, %eax
663 ; X64-NEXT: sarl $16, %edi
664 ; X64-NEXT: sarl $17, %eax
665 ; X64-NEXT: cmpl %eax, %edi
666 ; X64-NEXT: cmovll %edi, %eax
669 ; X86-LABEL: test_signbits_i32:
671 ; X86-NEXT: movswl {{[0-9]+}}(%esp), %ecx
672 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
673 ; X86-NEXT: sarl $17, %eax
674 ; X86-NEXT: cmpl %eax, %ecx
675 ; X86-NEXT: cmovll %ecx, %eax
677 %ax = ashr i32 %a, 16
678 %bx = ashr i32 %b, 17
679 %r = call i32 @llvm.smin.i32(i32 %ax, i32 %bx)
683 define i64 @test_signbits_i64(i64 %a, i64 %b) nounwind {
684 ; X64-LABEL: test_signbits_i64:
686 ; X64-NEXT: movq %rsi, %rax
687 ; X64-NEXT: sarq $32, %rdi
688 ; X64-NEXT: sarq $32, %rax
689 ; X64-NEXT: cmpq %rax, %rdi
690 ; X64-NEXT: cmovlq %rdi, %rax
693 ; X86-LABEL: test_signbits_i64:
695 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
696 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
697 ; X86-NEXT: cmpl %eax, %ecx
698 ; X86-NEXT: cmovll %ecx, %eax
699 ; X86-NEXT: movl %eax, %edx
700 ; X86-NEXT: sarl $31, %edx
702 %ax = ashr i64 %a, 32
703 %bx = ashr i64 %b, 32
704 %r = call i64 @llvm.smin.i64(i64 %ax, i64 %bx)
708 define i128 @test_signbits_i128(i128 %a, i128 %b) nounwind {
709 ; X64-LABEL: test_signbits_i128:
711 ; X64-NEXT: movq %rcx, %rax
712 ; X64-NEXT: sarq $28, %rax
713 ; X64-NEXT: cmpq %rax, %rsi
714 ; X64-NEXT: cmovlq %rsi, %rax
715 ; X64-NEXT: movq %rax, %rdx
716 ; X64-NEXT: sarq $63, %rdx
719 ; X86-LABEL: test_signbits_i128:
721 ; X86-NEXT: pushl %ebx
722 ; X86-NEXT: pushl %edi
723 ; X86-NEXT: pushl %esi
724 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
725 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
726 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
727 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
728 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
729 ; X86-NEXT: shrdl $28, %edi, %ecx
730 ; X86-NEXT: sarl $28, %edi
731 ; X86-NEXT: cmpl %ecx, %esi
732 ; X86-NEXT: movl %edx, %ebx
733 ; X86-NEXT: sbbl %edi, %ebx
734 ; X86-NEXT: cmovll %esi, %ecx
735 ; X86-NEXT: cmovll %edx, %edi
736 ; X86-NEXT: movl %edi, 4(%eax)
737 ; X86-NEXT: sarl $31, %edi
738 ; X86-NEXT: movl %ecx, (%eax)
739 ; X86-NEXT: movl %edi, 12(%eax)
740 ; X86-NEXT: movl %edi, 8(%eax)
741 ; X86-NEXT: popl %esi
742 ; X86-NEXT: popl %edi
743 ; X86-NEXT: popl %ebx
745 %ax = ashr i128 %a, 64
746 %bx = ashr i128 %b, 92
747 %r = call i128 @llvm.smin.i128(i128 %ax, i128 %bx)