1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-linux | FileCheck %s --check-prefixes=X64,SSE
3 ; RUN: llc < %s -mtriple=x86_64-linux -mattr=avx | FileCheck %s --check-prefixes=X64,AVX,AVX1
4 ; RUN: llc < %s -mtriple=x86_64-linux -mattr=avx2 | FileCheck %s --check-prefixes=X64,AVX,AVX2
5 ; RUN: llc < %s -mtriple=i686 -mattr=cmov | FileCheck %s --check-prefix=X86
7 declare i8 @llvm.smin.i8(i8, i8)
8 declare i16 @llvm.smin.i16(i16, i16)
9 declare i24 @llvm.smin.i24(i24, i24)
10 declare i32 @llvm.smin.i32(i32, i32)
11 declare i64 @llvm.smin.i64(i64, i64)
12 declare i128 @llvm.smin.i128(i128, i128)
14 declare <1 x i32> @llvm.smin.v1i32(<1 x i32>, <1 x i32>)
15 declare <2 x i32> @llvm.smin.v2i32(<2 x i32>, <2 x i32>)
16 declare <3 x i32> @llvm.smin.v3i32(<3 x i32>, <3 x i32>)
17 declare <4 x i32> @llvm.smin.v4i32(<4 x i32>, <4 x i32>)
18 declare <8 x i32> @llvm.smin.v8i32(<8 x i32>, <8 x i32>)
20 declare <8 x i16> @llvm.smin.v8i16(<8 x i16>, <8 x i16>)
21 declare <16 x i8> @llvm.smin.v16i8(<16 x i8>, <16 x i8>)
23 define i8 @test_i8(i8 %a, i8 %b) nounwind {
26 ; X64-NEXT: movl %esi, %eax
27 ; X64-NEXT: cmpb %al, %dil
28 ; X64-NEXT: cmovll %edi, %eax
29 ; X64-NEXT: # kill: def $al killed $al killed $eax
34 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
35 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
36 ; X86-NEXT: cmpb %al, %cl
37 ; X86-NEXT: cmovll %ecx, %eax
38 ; X86-NEXT: # kill: def $al killed $al killed $eax
40 %r = call i8 @llvm.smin.i8(i8 %a, i8 %b)
44 define i16 @test_i16(i16 %a, i16 %b) nounwind {
45 ; X64-LABEL: test_i16:
47 ; X64-NEXT: movl %esi, %eax
48 ; X64-NEXT: cmpw %ax, %di
49 ; X64-NEXT: cmovll %edi, %eax
50 ; X64-NEXT: # kill: def $ax killed $ax killed $eax
53 ; X86-LABEL: test_i16:
55 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
56 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
57 ; X86-NEXT: cmpw %ax, %cx
58 ; X86-NEXT: cmovll %ecx, %eax
59 ; X86-NEXT: # kill: def $ax killed $ax killed $eax
61 %r = call i16 @llvm.smin.i16(i16 %a, i16 %b)
65 define i24 @test_i24(i24 %a, i24 %b) nounwind {
66 ; X64-LABEL: test_i24:
68 ; X64-NEXT: movl %edi, %eax
69 ; X64-NEXT: shll $8, %esi
70 ; X64-NEXT: sarl $8, %esi
71 ; X64-NEXT: shll $8, %eax
72 ; X64-NEXT: sarl $8, %eax
73 ; X64-NEXT: cmpl %esi, %eax
74 ; X64-NEXT: cmovgel %esi, %eax
77 ; X86-LABEL: test_i24:
79 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
80 ; X86-NEXT: shll $8, %ecx
81 ; X86-NEXT: sarl $8, %ecx
82 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
83 ; X86-NEXT: shll $8, %eax
84 ; X86-NEXT: sarl $8, %eax
85 ; X86-NEXT: cmpl %ecx, %eax
86 ; X86-NEXT: cmovgel %ecx, %eax
88 %r = call i24 @llvm.smin.i24(i24 %a, i24 %b)
92 define i32 @test_i32(i32 %a, i32 %b) nounwind {
93 ; X64-LABEL: test_i32:
95 ; X64-NEXT: movl %esi, %eax
96 ; X64-NEXT: cmpl %esi, %edi
97 ; X64-NEXT: cmovll %edi, %eax
100 ; X86-LABEL: test_i32:
102 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
103 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
104 ; X86-NEXT: cmpl %eax, %ecx
105 ; X86-NEXT: cmovll %ecx, %eax
107 %r = call i32 @llvm.smin.i32(i32 %a, i32 %b)
111 define i64 @test_i64(i64 %a, i64 %b) nounwind {
112 ; X64-LABEL: test_i64:
114 ; X64-NEXT: movq %rsi, %rax
115 ; X64-NEXT: cmpq %rsi, %rdi
116 ; X64-NEXT: cmovlq %rdi, %rax
119 ; X86-LABEL: test_i64:
121 ; X86-NEXT: pushl %edi
122 ; X86-NEXT: pushl %esi
123 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
124 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
125 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
126 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
127 ; X86-NEXT: cmpl %eax, %ecx
128 ; X86-NEXT: movl %eax, %edi
129 ; X86-NEXT: cmovbl %ecx, %edi
130 ; X86-NEXT: cmpl %edx, %esi
131 ; X86-NEXT: cmovll %ecx, %eax
132 ; X86-NEXT: cmovel %edi, %eax
133 ; X86-NEXT: cmovll %esi, %edx
134 ; X86-NEXT: popl %esi
135 ; X86-NEXT: popl %edi
137 %r = call i64 @llvm.smin.i64(i64 %a, i64 %b)
141 define i128 @test_i128(i128 %a, i128 %b) nounwind {
142 ; X64-LABEL: test_i128:
144 ; X64-NEXT: movq %rdx, %rax
145 ; X64-NEXT: cmpq %rdx, %rdi
146 ; X64-NEXT: cmovbq %rdi, %rdx
147 ; X64-NEXT: cmpq %rcx, %rsi
148 ; X64-NEXT: cmovlq %rdi, %rax
149 ; X64-NEXT: cmoveq %rdx, %rax
150 ; X64-NEXT: cmovlq %rsi, %rcx
151 ; X64-NEXT: movq %rcx, %rdx
154 ; X86-LABEL: test_i128:
156 ; X86-NEXT: pushl %ebp
157 ; X86-NEXT: pushl %ebx
158 ; X86-NEXT: pushl %edi
159 ; X86-NEXT: pushl %esi
160 ; X86-NEXT: subl $8, %esp
161 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
162 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
163 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
164 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
165 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
166 ; X86-NEXT: cmpl %ecx, %eax
167 ; X86-NEXT: movl %ecx, %ebx
168 ; X86-NEXT: cmovbl %eax, %ebx
169 ; X86-NEXT: cmpl %esi, %edi
170 ; X86-NEXT: movl %ecx, %ebp
171 ; X86-NEXT: cmovbl %eax, %ebp
172 ; X86-NEXT: cmovel %ebx, %ebp
173 ; X86-NEXT: movl %esi, %eax
174 ; X86-NEXT: cmovbl %edi, %eax
175 ; X86-NEXT: movl %eax, (%esp) # 4-byte Spill
176 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
177 ; X86-NEXT: cmpl %edx, %edi
178 ; X86-NEXT: movl %edx, %eax
179 ; X86-NEXT: cmovbl %edi, %eax
180 ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
181 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
182 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
183 ; X86-NEXT: movl %ebx, %edi
184 ; X86-NEXT: sbbl %eax, %edi
185 ; X86-NEXT: cmovll {{[0-9]+}}(%esp), %esi
186 ; X86-NEXT: cmovll {{[0-9]+}}(%esp), %ecx
187 ; X86-NEXT: movl %ebx, %edi
188 ; X86-NEXT: xorl %eax, %edi
189 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
190 ; X86-NEXT: xorl %edx, %ebx
191 ; X86-NEXT: orl %edi, %ebx
192 ; X86-NEXT: cmovel %ebp, %ecx
193 ; X86-NEXT: cmovel (%esp), %esi # 4-byte Folded Reload
194 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
195 ; X86-NEXT: cmpl %eax, %edi
196 ; X86-NEXT: cmovll {{[0-9]+}}(%esp), %edx
197 ; X86-NEXT: cmovel {{[-0-9]+}}(%e{{[sb]}}p), %edx # 4-byte Folded Reload
198 ; X86-NEXT: cmovll %edi, %eax
199 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
200 ; X86-NEXT: movl %eax, 12(%edi)
201 ; X86-NEXT: movl %edx, 8(%edi)
202 ; X86-NEXT: movl %esi, 4(%edi)
203 ; X86-NEXT: movl %ecx, (%edi)
204 ; X86-NEXT: movl %edi, %eax
205 ; X86-NEXT: addl $8, %esp
206 ; X86-NEXT: popl %esi
207 ; X86-NEXT: popl %edi
208 ; X86-NEXT: popl %ebx
209 ; X86-NEXT: popl %ebp
211 %r = call i128 @llvm.smin.i128(i128 %a, i128 %b)
215 define <1 x i32> @test_v1i32(<1 x i32> %a, <1 x i32> %b) nounwind {
216 ; X64-LABEL: test_v1i32:
218 ; X64-NEXT: movl %esi, %eax
219 ; X64-NEXT: cmpl %esi, %edi
220 ; X64-NEXT: cmovll %edi, %eax
223 ; X86-LABEL: test_v1i32:
225 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
226 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
227 ; X86-NEXT: cmpl %eax, %ecx
228 ; X86-NEXT: cmovll %ecx, %eax
230 %r = call <1 x i32> @llvm.smin.v1i32(<1 x i32> %a, <1 x i32> %b)
234 define <2 x i32> @test_v2i32(<2 x i32> %a, <2 x i32> %b) nounwind {
235 ; SSE-LABEL: test_v2i32:
237 ; SSE-NEXT: movdqa %xmm1, %xmm2
238 ; SSE-NEXT: pcmpgtd %xmm0, %xmm2
239 ; SSE-NEXT: pand %xmm2, %xmm0
240 ; SSE-NEXT: pandn %xmm1, %xmm2
241 ; SSE-NEXT: por %xmm2, %xmm0
244 ; AVX-LABEL: test_v2i32:
246 ; AVX-NEXT: vpminsd %xmm1, %xmm0, %xmm0
249 ; X86-LABEL: test_v2i32:
251 ; X86-NEXT: pushl %esi
252 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
253 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
254 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
255 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
256 ; X86-NEXT: cmpl %eax, %esi
257 ; X86-NEXT: cmovll %esi, %eax
258 ; X86-NEXT: cmpl %edx, %ecx
259 ; X86-NEXT: cmovll %ecx, %edx
260 ; X86-NEXT: popl %esi
262 %r = call <2 x i32> @llvm.smin.v2i32(<2 x i32> %a, <2 x i32> %b)
266 define <3 x i32> @test_v3i32(<3 x i32> %a, <3 x i32> %b) nounwind {
267 ; SSE-LABEL: test_v3i32:
269 ; SSE-NEXT: movdqa %xmm1, %xmm2
270 ; SSE-NEXT: pcmpgtd %xmm0, %xmm2
271 ; SSE-NEXT: pand %xmm2, %xmm0
272 ; SSE-NEXT: pandn %xmm1, %xmm2
273 ; SSE-NEXT: por %xmm2, %xmm0
276 ; AVX-LABEL: test_v3i32:
278 ; AVX-NEXT: vpminsd %xmm1, %xmm0, %xmm0
281 ; X86-LABEL: test_v3i32:
283 ; X86-NEXT: pushl %ebx
284 ; X86-NEXT: pushl %edi
285 ; X86-NEXT: pushl %esi
286 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
287 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
288 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
289 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
290 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
291 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
292 ; X86-NEXT: cmpl %eax, %ebx
293 ; X86-NEXT: cmovll %ebx, %eax
294 ; X86-NEXT: cmpl %edx, %edi
295 ; X86-NEXT: cmovll %edi, %edx
296 ; X86-NEXT: cmpl %ecx, %esi
297 ; X86-NEXT: cmovll %esi, %ecx
298 ; X86-NEXT: popl %esi
299 ; X86-NEXT: popl %edi
300 ; X86-NEXT: popl %ebx
302 %r = call <3 x i32> @llvm.smin.v3i32(<3 x i32> %a, <3 x i32> %b)
306 define <4 x i32> @test_v4i32(<4 x i32> %a, <4 x i32> %b) nounwind {
307 ; SSE-LABEL: test_v4i32:
309 ; SSE-NEXT: movdqa %xmm1, %xmm2
310 ; SSE-NEXT: pcmpgtd %xmm0, %xmm2
311 ; SSE-NEXT: pand %xmm2, %xmm0
312 ; SSE-NEXT: pandn %xmm1, %xmm2
313 ; SSE-NEXT: por %xmm2, %xmm0
316 ; AVX-LABEL: test_v4i32:
318 ; AVX-NEXT: vpminsd %xmm1, %xmm0, %xmm0
321 ; X86-LABEL: test_v4i32:
323 ; X86-NEXT: pushl %edi
324 ; X86-NEXT: pushl %esi
325 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
326 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
327 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
328 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
329 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
330 ; X86-NEXT: cmpl %edi, %eax
331 ; X86-NEXT: cmovll %eax, %edi
332 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
333 ; X86-NEXT: cmpl %esi, %eax
334 ; X86-NEXT: cmovll %eax, %esi
335 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
336 ; X86-NEXT: cmpl %edx, %eax
337 ; X86-NEXT: cmovll %eax, %edx
338 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
339 ; X86-NEXT: cmpl %ecx, %eax
340 ; X86-NEXT: cmovll %eax, %ecx
341 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
342 ; X86-NEXT: movl %ecx, 12(%eax)
343 ; X86-NEXT: movl %edx, 8(%eax)
344 ; X86-NEXT: movl %esi, 4(%eax)
345 ; X86-NEXT: movl %edi, (%eax)
346 ; X86-NEXT: popl %esi
347 ; X86-NEXT: popl %edi
349 %r = call <4 x i32> @llvm.smin.v4i32(<4 x i32> %a, <4 x i32> %b)
353 define <8 x i32> @test_v8i32(<8 x i32> %a, <8 x i32> %b) nounwind {
354 ; SSE-LABEL: test_v8i32:
356 ; SSE-NEXT: movdqa %xmm2, %xmm4
357 ; SSE-NEXT: pcmpgtd %xmm0, %xmm4
358 ; SSE-NEXT: pand %xmm4, %xmm0
359 ; SSE-NEXT: pandn %xmm2, %xmm4
360 ; SSE-NEXT: por %xmm4, %xmm0
361 ; SSE-NEXT: movdqa %xmm3, %xmm2
362 ; SSE-NEXT: pcmpgtd %xmm1, %xmm2
363 ; SSE-NEXT: pand %xmm2, %xmm1
364 ; SSE-NEXT: pandn %xmm3, %xmm2
365 ; SSE-NEXT: por %xmm2, %xmm1
368 ; AVX1-LABEL: test_v8i32:
370 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm2
371 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
372 ; AVX1-NEXT: vpminsd %xmm2, %xmm3, %xmm2
373 ; AVX1-NEXT: vpminsd %xmm1, %xmm0, %xmm0
374 ; AVX1-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0
377 ; AVX2-LABEL: test_v8i32:
379 ; AVX2-NEXT: vpminsd %ymm1, %ymm0, %ymm0
382 ; X86-LABEL: test_v8i32:
384 ; X86-NEXT: pushl %ebp
385 ; X86-NEXT: pushl %ebx
386 ; X86-NEXT: pushl %edi
387 ; X86-NEXT: pushl %esi
388 ; X86-NEXT: subl $8, %esp
389 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
390 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
391 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
392 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
393 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
394 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebp
395 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
396 ; X86-NEXT: cmpl %ebp, %eax
397 ; X86-NEXT: cmovll %eax, %ebp
398 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
399 ; X86-NEXT: cmpl %ebx, %eax
400 ; X86-NEXT: cmovll %eax, %ebx
401 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
402 ; X86-NEXT: cmpl %edi, %eax
403 ; X86-NEXT: cmovll %eax, %edi
404 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
405 ; X86-NEXT: cmpl %esi, %eax
406 ; X86-NEXT: cmovll %eax, %esi
407 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
408 ; X86-NEXT: cmpl %edx, %eax
409 ; X86-NEXT: cmovll %eax, %edx
410 ; X86-NEXT: movl %edx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
411 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
412 ; X86-NEXT: cmpl %ecx, %eax
413 ; X86-NEXT: cmovll %eax, %ecx
414 ; X86-NEXT: movl %ecx, (%esp) # 4-byte Spill
415 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
416 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
417 ; X86-NEXT: cmpl %ecx, %eax
418 ; X86-NEXT: cmovll %eax, %ecx
419 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
420 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
421 ; X86-NEXT: cmpl %eax, %edx
422 ; X86-NEXT: cmovll %edx, %eax
423 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
424 ; X86-NEXT: movl %eax, 28(%edx)
425 ; X86-NEXT: movl %ecx, 24(%edx)
426 ; X86-NEXT: movl (%esp), %eax # 4-byte Reload
427 ; X86-NEXT: movl %eax, 20(%edx)
428 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload
429 ; X86-NEXT: movl %eax, 16(%edx)
430 ; X86-NEXT: movl %esi, 12(%edx)
431 ; X86-NEXT: movl %edi, 8(%edx)
432 ; X86-NEXT: movl %ebx, 4(%edx)
433 ; X86-NEXT: movl %ebp, (%edx)
434 ; X86-NEXT: movl %edx, %eax
435 ; X86-NEXT: addl $8, %esp
436 ; X86-NEXT: popl %esi
437 ; X86-NEXT: popl %edi
438 ; X86-NEXT: popl %ebx
439 ; X86-NEXT: popl %ebp
441 %r = call <8 x i32> @llvm.smin.v8i32(<8 x i32> %a, <8 x i32> %b)
445 define <8 x i16> @test_v8i16(<8 x i16> %a, <8 x i16> %b) nounwind {
446 ; SSE-LABEL: test_v8i16:
448 ; SSE-NEXT: pminsw %xmm1, %xmm0
451 ; AVX-LABEL: test_v8i16:
453 ; AVX-NEXT: vpminsw %xmm1, %xmm0, %xmm0
456 ; X86-LABEL: test_v8i16:
458 ; X86-NEXT: pushl %ebp
459 ; X86-NEXT: pushl %ebx
460 ; X86-NEXT: pushl %edi
461 ; X86-NEXT: pushl %esi
462 ; X86-NEXT: subl $8, %esp
463 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
464 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
465 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
466 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
467 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
468 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebp
469 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
470 ; X86-NEXT: cmpw %bp, %ax
471 ; X86-NEXT: cmovll %eax, %ebp
472 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
473 ; X86-NEXT: cmpw %bx, %ax
474 ; X86-NEXT: cmovll %eax, %ebx
475 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
476 ; X86-NEXT: cmpw %di, %ax
477 ; X86-NEXT: cmovll %eax, %edi
478 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
479 ; X86-NEXT: cmpw %si, %ax
480 ; X86-NEXT: cmovll %eax, %esi
481 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
482 ; X86-NEXT: cmpw %dx, %ax
483 ; X86-NEXT: cmovll %eax, %edx
484 ; X86-NEXT: movl %edx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
485 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
486 ; X86-NEXT: cmpw %cx, %ax
487 ; X86-NEXT: cmovll %eax, %ecx
488 ; X86-NEXT: movl %ecx, (%esp) # 4-byte Spill
489 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
490 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
491 ; X86-NEXT: cmpw %dx, %ax
492 ; X86-NEXT: cmovll %eax, %edx
493 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
494 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
495 ; X86-NEXT: cmpw %ax, %cx
496 ; X86-NEXT: cmovll %ecx, %eax
497 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
498 ; X86-NEXT: movw %ax, 14(%ecx)
499 ; X86-NEXT: movw %dx, 12(%ecx)
500 ; X86-NEXT: movl (%esp), %eax # 4-byte Reload
501 ; X86-NEXT: movw %ax, 10(%ecx)
502 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload
503 ; X86-NEXT: movw %ax, 8(%ecx)
504 ; X86-NEXT: movw %si, 6(%ecx)
505 ; X86-NEXT: movw %di, 4(%ecx)
506 ; X86-NEXT: movw %bx, 2(%ecx)
507 ; X86-NEXT: movw %bp, (%ecx)
508 ; X86-NEXT: movl %ecx, %eax
509 ; X86-NEXT: addl $8, %esp
510 ; X86-NEXT: popl %esi
511 ; X86-NEXT: popl %edi
512 ; X86-NEXT: popl %ebx
513 ; X86-NEXT: popl %ebp
515 %r = call <8 x i16> @llvm.smin.v8i16(<8 x i16> %a, <8 x i16> %b)
519 define <16 x i8> @test_v16i8(<16 x i8> %a, <16 x i8> %b) nounwind {
520 ; SSE-LABEL: test_v16i8:
522 ; SSE-NEXT: movdqa %xmm1, %xmm2
523 ; SSE-NEXT: pcmpgtb %xmm0, %xmm2
524 ; SSE-NEXT: pand %xmm2, %xmm0
525 ; SSE-NEXT: pandn %xmm1, %xmm2
526 ; SSE-NEXT: por %xmm2, %xmm0
529 ; AVX-LABEL: test_v16i8:
531 ; AVX-NEXT: vpminsb %xmm1, %xmm0, %xmm0
534 ; X86-LABEL: test_v16i8:
536 ; X86-NEXT: pushl %ebp
537 ; X86-NEXT: pushl %ebx
538 ; X86-NEXT: pushl %edi
539 ; X86-NEXT: pushl %esi
540 ; X86-NEXT: subl $40, %esp
541 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
542 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
543 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
544 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
545 ; X86-NEXT: cmpb %bl, %al
546 ; X86-NEXT: cmovll %eax, %ebx
547 ; X86-NEXT: movl %ebx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
548 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
549 ; X86-NEXT: cmpb %dl, %al
550 ; X86-NEXT: cmovll %eax, %edx
551 ; X86-NEXT: movl %edx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
552 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
553 ; X86-NEXT: cmpb %cl, %al
554 ; X86-NEXT: cmovll %eax, %ecx
555 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
556 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
557 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
558 ; X86-NEXT: cmpb %cl, %al
559 ; X86-NEXT: cmovll %eax, %ecx
560 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
561 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
562 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
563 ; X86-NEXT: cmpb %cl, %al
564 ; X86-NEXT: cmovll %eax, %ecx
565 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
566 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
567 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
568 ; X86-NEXT: cmpb %cl, %al
569 ; X86-NEXT: cmovll %eax, %ecx
570 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
571 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
572 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
573 ; X86-NEXT: cmpb %cl, %al
574 ; X86-NEXT: cmovll %eax, %ecx
575 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
576 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
577 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
578 ; X86-NEXT: cmpb %cl, %al
579 ; X86-NEXT: cmovll %eax, %ecx
580 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
581 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
582 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
583 ; X86-NEXT: cmpb %cl, %al
584 ; X86-NEXT: cmovll %eax, %ecx
585 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
586 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
587 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
588 ; X86-NEXT: cmpb %cl, %al
589 ; X86-NEXT: cmovll %eax, %ecx
590 ; X86-NEXT: movl %ecx, (%esp) # 4-byte Spill
591 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
592 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
593 ; X86-NEXT: cmpb %cl, %al
594 ; X86-NEXT: cmovll %eax, %ecx
595 ; X86-NEXT: movl %ecx, %ebp
596 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
597 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
598 ; X86-NEXT: cmpb %cl, %al
599 ; X86-NEXT: cmovll %eax, %ecx
600 ; X86-NEXT: movl %ecx, %edi
601 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
602 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
603 ; X86-NEXT: cmpb %cl, %al
604 ; X86-NEXT: cmovll %eax, %ecx
605 ; X86-NEXT: movl %ecx, %esi
606 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
607 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
608 ; X86-NEXT: cmpb %bl, %al
609 ; X86-NEXT: cmovll %eax, %ebx
610 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
611 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
612 ; X86-NEXT: cmpb %dl, %al
613 ; X86-NEXT: cmovll %eax, %edx
614 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
615 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
616 ; X86-NEXT: cmpb %cl, %al
617 ; X86-NEXT: cmovll %eax, %ecx
618 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
619 ; X86-NEXT: movb %cl, 15(%eax)
620 ; X86-NEXT: movb %dl, 14(%eax)
621 ; X86-NEXT: movb %bl, 13(%eax)
622 ; X86-NEXT: movl %esi, %ecx
623 ; X86-NEXT: movb %cl, 12(%eax)
624 ; X86-NEXT: movl %edi, %ecx
625 ; X86-NEXT: movb %cl, 11(%eax)
626 ; X86-NEXT: movl %ebp, %ecx
627 ; X86-NEXT: movb %cl, 10(%eax)
628 ; X86-NEXT: movl (%esp), %ecx # 4-byte Reload
629 ; X86-NEXT: movb %cl, 9(%eax)
630 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
631 ; X86-NEXT: movb %cl, 8(%eax)
632 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
633 ; X86-NEXT: movb %cl, 7(%eax)
634 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
635 ; X86-NEXT: movb %cl, 6(%eax)
636 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
637 ; X86-NEXT: movb %cl, 5(%eax)
638 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
639 ; X86-NEXT: movb %cl, 4(%eax)
640 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
641 ; X86-NEXT: movb %cl, 3(%eax)
642 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
643 ; X86-NEXT: movb %cl, 2(%eax)
644 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
645 ; X86-NEXT: movb %cl, 1(%eax)
646 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
647 ; X86-NEXT: movb %cl, (%eax)
648 ; X86-NEXT: addl $40, %esp
649 ; X86-NEXT: popl %esi
650 ; X86-NEXT: popl %edi
651 ; X86-NEXT: popl %ebx
652 ; X86-NEXT: popl %ebp
654 %r = call <16 x i8> @llvm.smin.v16i8(<16 x i8> %a, <16 x i8> %b)