1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-linux | FileCheck %s --check-prefixes=X64,SSE
3 ; RUN: llc < %s -mtriple=x86_64-linux -mattr=avx | FileCheck %s --check-prefixes=X64,AVX,AVX1
4 ; RUN: llc < %s -mtriple=x86_64-linux -mattr=avx2 | FileCheck %s --check-prefixes=X64,AVX,AVX2
5 ; RUN: llc < %s -mtriple=i686 -mattr=cmov | FileCheck %s --check-prefix=X86
7 ; The i1 parameter is not codegen-relevant right now.
9 declare i8 @llvm.abs.i8(i8, i1)
10 declare i16 @llvm.abs.i16(i16, i1)
11 declare i24 @llvm.abs.i24(i24, i1)
12 declare i32 @llvm.abs.i32(i32, i1)
13 declare i64 @llvm.abs.i64(i64, i1)
14 declare i128 @llvm.abs.i128(i128, i1)
16 declare <1 x i32> @llvm.abs.v1i32(<1 x i32>, i1)
17 declare <2 x i32> @llvm.abs.v2i32(<2 x i32>, i1)
18 declare <3 x i32> @llvm.abs.v3i32(<3 x i32>, i1)
19 declare <4 x i32> @llvm.abs.v4i32(<4 x i32>, i1)
20 declare <8 x i32> @llvm.abs.v8i32(<8 x i32>, i1)
22 declare <8 x i16> @llvm.abs.v8i16(<8 x i16>, i1)
23 declare <16 x i8> @llvm.abs.v16i8(<16 x i8>, i1)
25 define i8 @test_i8(i8 %a) nounwind {
28 ; X64-NEXT: movl %edi, %eax
29 ; X64-NEXT: movl %eax, %ecx
30 ; X64-NEXT: sarb $7, %cl
31 ; X64-NEXT: xorb %cl, %al
32 ; X64-NEXT: subb %cl, %al
33 ; X64-NEXT: # kill: def $al killed $al killed $eax
38 ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax
39 ; X86-NEXT: movl %eax, %ecx
40 ; X86-NEXT: sarb $7, %cl
41 ; X86-NEXT: xorb %cl, %al
42 ; X86-NEXT: subb %cl, %al
44 %r = call i8 @llvm.abs.i8(i8 %a, i1 false)
48 define i16 @test_i16(i16 %a) nounwind {
49 ; X64-LABEL: test_i16:
51 ; X64-NEXT: movl %edi, %eax
53 ; X64-NEXT: cmovsw %di, %ax
56 ; X86-LABEL: test_i16:
58 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %ecx
59 ; X86-NEXT: movl %ecx, %eax
61 ; X86-NEXT: cmovsw %cx, %ax
63 %r = call i16 @llvm.abs.i16(i16 %a, i1 false)
67 define i24 @test_i24(i24 %a) nounwind {
68 ; X64-LABEL: test_i24:
70 ; X64-NEXT: shll $8, %edi
71 ; X64-NEXT: sarl $8, %edi
72 ; X64-NEXT: movl %edi, %eax
74 ; X64-NEXT: cmovsl %edi, %eax
77 ; X86-LABEL: test_i24:
79 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
80 ; X86-NEXT: shll $8, %ecx
81 ; X86-NEXT: sarl $8, %ecx
82 ; X86-NEXT: movl %ecx, %eax
84 ; X86-NEXT: cmovsl %ecx, %eax
86 %r = call i24 @llvm.abs.i24(i24 %a, i1 false)
90 define i32 @test_i32(i32 %a) nounwind {
91 ; X64-LABEL: test_i32:
93 ; X64-NEXT: movl %edi, %eax
95 ; X64-NEXT: cmovsl %edi, %eax
98 ; X86-LABEL: test_i32:
100 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
101 ; X86-NEXT: movl %ecx, %eax
102 ; X86-NEXT: negl %eax
103 ; X86-NEXT: cmovsl %ecx, %eax
105 %r = call i32 @llvm.abs.i32(i32 %a, i1 false)
109 define i64 @test_i64(i64 %a) nounwind {
110 ; X64-LABEL: test_i64:
112 ; X64-NEXT: movq %rdi, %rax
113 ; X64-NEXT: negq %rax
114 ; X64-NEXT: cmovsq %rdi, %rax
117 ; X86-LABEL: test_i64:
119 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
120 ; X86-NEXT: movl %edx, %ecx
121 ; X86-NEXT: sarl $31, %ecx
122 ; X86-NEXT: xorl %ecx, %edx
123 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
124 ; X86-NEXT: xorl %ecx, %eax
125 ; X86-NEXT: subl %ecx, %eax
126 ; X86-NEXT: sbbl %ecx, %edx
128 %r = call i64 @llvm.abs.i64(i64 %a, i1 false)
132 define i128 @test_i128(i128 %a) nounwind {
133 ; X64-LABEL: test_i128:
135 ; X64-NEXT: movq %rsi, %rdx
136 ; X64-NEXT: movq %rdi, %rax
137 ; X64-NEXT: movq %rsi, %rcx
138 ; X64-NEXT: sarq $63, %rcx
139 ; X64-NEXT: xorq %rcx, %rdx
140 ; X64-NEXT: xorq %rcx, %rax
141 ; X64-NEXT: subq %rcx, %rax
142 ; X64-NEXT: sbbq %rcx, %rdx
145 ; X86-LABEL: test_i128:
147 ; X86-NEXT: pushl %ebx
148 ; X86-NEXT: pushl %edi
149 ; X86-NEXT: pushl %esi
150 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
151 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
152 ; X86-NEXT: movl %ecx, %edx
153 ; X86-NEXT: sarl $31, %edx
154 ; X86-NEXT: xorl %edx, %ecx
155 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
156 ; X86-NEXT: xorl %edx, %esi
157 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
158 ; X86-NEXT: xorl %edx, %edi
159 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
160 ; X86-NEXT: xorl %edx, %ebx
161 ; X86-NEXT: subl %edx, %ebx
162 ; X86-NEXT: sbbl %edx, %edi
163 ; X86-NEXT: sbbl %edx, %esi
164 ; X86-NEXT: sbbl %edx, %ecx
165 ; X86-NEXT: movl %ebx, (%eax)
166 ; X86-NEXT: movl %edi, 4(%eax)
167 ; X86-NEXT: movl %esi, 8(%eax)
168 ; X86-NEXT: movl %ecx, 12(%eax)
169 ; X86-NEXT: popl %esi
170 ; X86-NEXT: popl %edi
171 ; X86-NEXT: popl %ebx
173 %r = call i128 @llvm.abs.i128(i128 %a, i1 false)
177 define <1 x i32> @test_v1i32(<1 x i32> %a) nounwind {
178 ; X64-LABEL: test_v1i32:
180 ; X64-NEXT: movl %edi, %eax
181 ; X64-NEXT: negl %eax
182 ; X64-NEXT: cmovsl %edi, %eax
185 ; X86-LABEL: test_v1i32:
187 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
188 ; X86-NEXT: movl %ecx, %eax
189 ; X86-NEXT: negl %eax
190 ; X86-NEXT: cmovsl %ecx, %eax
192 %r = call <1 x i32> @llvm.abs.v1i32(<1 x i32> %a, i1 false)
196 define <2 x i32> @test_v2i32(<2 x i32> %a) nounwind {
197 ; SSE-LABEL: test_v2i32:
199 ; SSE-NEXT: movdqa %xmm0, %xmm1
200 ; SSE-NEXT: psrad $31, %xmm1
201 ; SSE-NEXT: pxor %xmm1, %xmm0
202 ; SSE-NEXT: psubd %xmm1, %xmm0
205 ; AVX-LABEL: test_v2i32:
207 ; AVX-NEXT: vpabsd %xmm0, %xmm0
210 ; X86-LABEL: test_v2i32:
212 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
213 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
214 ; X86-NEXT: movl %edx, %eax
215 ; X86-NEXT: negl %eax
216 ; X86-NEXT: cmovsl %edx, %eax
217 ; X86-NEXT: movl %ecx, %edx
218 ; X86-NEXT: negl %edx
219 ; X86-NEXT: cmovsl %ecx, %edx
221 %r = call <2 x i32> @llvm.abs.v2i32(<2 x i32> %a, i1 false)
225 define <3 x i32> @test_v3i32(<3 x i32> %a) nounwind {
226 ; SSE-LABEL: test_v3i32:
228 ; SSE-NEXT: movdqa %xmm0, %xmm1
229 ; SSE-NEXT: psrad $31, %xmm1
230 ; SSE-NEXT: pxor %xmm1, %xmm0
231 ; SSE-NEXT: psubd %xmm1, %xmm0
234 ; AVX-LABEL: test_v3i32:
236 ; AVX-NEXT: vpabsd %xmm0, %xmm0
239 ; X86-LABEL: test_v3i32:
241 ; X86-NEXT: pushl %esi
242 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
243 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
244 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
245 ; X86-NEXT: movl %edx, %eax
246 ; X86-NEXT: negl %eax
247 ; X86-NEXT: cmovsl %edx, %eax
248 ; X86-NEXT: movl %ecx, %edx
249 ; X86-NEXT: negl %edx
250 ; X86-NEXT: cmovsl %ecx, %edx
251 ; X86-NEXT: movl %esi, %ecx
252 ; X86-NEXT: negl %ecx
253 ; X86-NEXT: cmovsl %esi, %ecx
254 ; X86-NEXT: popl %esi
256 %r = call <3 x i32> @llvm.abs.v3i32(<3 x i32> %a, i1 false)
260 define <4 x i32> @test_v4i32(<4 x i32> %a) nounwind {
261 ; SSE-LABEL: test_v4i32:
263 ; SSE-NEXT: movdqa %xmm0, %xmm1
264 ; SSE-NEXT: psrad $31, %xmm1
265 ; SSE-NEXT: pxor %xmm1, %xmm0
266 ; SSE-NEXT: psubd %xmm1, %xmm0
269 ; AVX-LABEL: test_v4i32:
271 ; AVX-NEXT: vpabsd %xmm0, %xmm0
274 ; X86-LABEL: test_v4i32:
276 ; X86-NEXT: pushl %ebx
277 ; X86-NEXT: pushl %edi
278 ; X86-NEXT: pushl %esi
279 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
280 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
281 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
282 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
283 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
284 ; X86-NEXT: movl %ebx, %edx
285 ; X86-NEXT: negl %edx
286 ; X86-NEXT: cmovsl %ebx, %edx
287 ; X86-NEXT: movl %edi, %ebx
288 ; X86-NEXT: negl %ebx
289 ; X86-NEXT: cmovsl %edi, %ebx
290 ; X86-NEXT: movl %esi, %edi
291 ; X86-NEXT: negl %edi
292 ; X86-NEXT: cmovsl %esi, %edi
293 ; X86-NEXT: movl %ecx, %esi
294 ; X86-NEXT: negl %esi
295 ; X86-NEXT: cmovsl %ecx, %esi
296 ; X86-NEXT: movl %esi, 12(%eax)
297 ; X86-NEXT: movl %edi, 8(%eax)
298 ; X86-NEXT: movl %ebx, 4(%eax)
299 ; X86-NEXT: movl %edx, (%eax)
300 ; X86-NEXT: popl %esi
301 ; X86-NEXT: popl %edi
302 ; X86-NEXT: popl %ebx
304 %r = call <4 x i32> @llvm.abs.v4i32(<4 x i32> %a, i1 false)
308 define <8 x i32> @test_v8i32(<8 x i32> %a) nounwind {
309 ; SSE-LABEL: test_v8i32:
311 ; SSE-NEXT: movdqa %xmm0, %xmm2
312 ; SSE-NEXT: psrad $31, %xmm2
313 ; SSE-NEXT: pxor %xmm2, %xmm0
314 ; SSE-NEXT: psubd %xmm2, %xmm0
315 ; SSE-NEXT: movdqa %xmm1, %xmm2
316 ; SSE-NEXT: psrad $31, %xmm2
317 ; SSE-NEXT: pxor %xmm2, %xmm1
318 ; SSE-NEXT: psubd %xmm2, %xmm1
321 ; AVX1-LABEL: test_v8i32:
323 ; AVX1-NEXT: vpabsd %xmm0, %xmm1
324 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm0
325 ; AVX1-NEXT: vpabsd %xmm0, %xmm0
326 ; AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
329 ; AVX2-LABEL: test_v8i32:
331 ; AVX2-NEXT: vpabsd %ymm0, %ymm0
334 ; X86-LABEL: test_v8i32:
336 ; X86-NEXT: pushl %ebp
337 ; X86-NEXT: pushl %ebx
338 ; X86-NEXT: pushl %edi
339 ; X86-NEXT: pushl %esi
340 ; X86-NEXT: subl $8, %esp
341 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
342 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
343 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebp
344 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
345 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
346 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
347 ; X86-NEXT: movl %edx, %ecx
348 ; X86-NEXT: negl %ecx
349 ; X86-NEXT: cmovsl %edx, %ecx
350 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
351 ; X86-NEXT: movl %esi, %ecx
352 ; X86-NEXT: negl %ecx
353 ; X86-NEXT: cmovsl %esi, %ecx
354 ; X86-NEXT: movl %ecx, (%esp) # 4-byte Spill
355 ; X86-NEXT: movl %edi, %esi
356 ; X86-NEXT: negl %esi
357 ; X86-NEXT: cmovsl %edi, %esi
358 ; X86-NEXT: movl %ebp, %edi
359 ; X86-NEXT: negl %edi
360 ; X86-NEXT: cmovsl %ebp, %edi
361 ; X86-NEXT: movl %ebx, %ebp
362 ; X86-NEXT: negl %ebp
363 ; X86-NEXT: cmovsl %ebx, %ebp
364 ; X86-NEXT: movl %eax, %ebx
365 ; X86-NEXT: negl %ebx
366 ; X86-NEXT: cmovsl %eax, %ebx
367 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
368 ; X86-NEXT: movl %ecx, %eax
369 ; X86-NEXT: negl %eax
370 ; X86-NEXT: cmovsl %ecx, %eax
371 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
372 ; X86-NEXT: movl %edx, %ecx
373 ; X86-NEXT: negl %ecx
374 ; X86-NEXT: cmovsl %edx, %ecx
375 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
376 ; X86-NEXT: movl %ecx, 28(%edx)
377 ; X86-NEXT: movl %eax, 24(%edx)
378 ; X86-NEXT: movl %ebx, 20(%edx)
379 ; X86-NEXT: movl %ebp, 16(%edx)
380 ; X86-NEXT: movl %edi, 12(%edx)
381 ; X86-NEXT: movl %esi, 8(%edx)
382 ; X86-NEXT: movl (%esp), %eax # 4-byte Reload
383 ; X86-NEXT: movl %eax, 4(%edx)
384 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload
385 ; X86-NEXT: movl %eax, (%edx)
386 ; X86-NEXT: movl %edx, %eax
387 ; X86-NEXT: addl $8, %esp
388 ; X86-NEXT: popl %esi
389 ; X86-NEXT: popl %edi
390 ; X86-NEXT: popl %ebx
391 ; X86-NEXT: popl %ebp
393 %r = call <8 x i32> @llvm.abs.v8i32(<8 x i32> %a, i1 false)
397 define <8 x i16> @test_v8i16(<8 x i16> %a) nounwind {
398 ; SSE-LABEL: test_v8i16:
400 ; SSE-NEXT: pxor %xmm1, %xmm1
401 ; SSE-NEXT: psubw %xmm0, %xmm1
402 ; SSE-NEXT: pmaxsw %xmm1, %xmm0
405 ; AVX-LABEL: test_v8i16:
407 ; AVX-NEXT: vpabsw %xmm0, %xmm0
410 ; X86-LABEL: test_v8i16:
412 ; X86-NEXT: pushl %ebp
413 ; X86-NEXT: pushl %ebx
414 ; X86-NEXT: pushl %edi
415 ; X86-NEXT: pushl %esi
416 ; X86-NEXT: pushl %eax
417 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
418 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %ebx
419 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %ebp
420 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %edi
421 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %esi
422 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %edx
423 ; X86-NEXT: movl %edx, %ecx
425 ; X86-NEXT: cmovsw %dx, %cx
426 ; X86-NEXT: movw %cx, {{[-0-9]+}}(%e{{[sb]}}p) # 2-byte Spill
427 ; X86-NEXT: movl %esi, %ecx
429 ; X86-NEXT: cmovsw %si, %cx
430 ; X86-NEXT: movw %cx, (%esp) # 2-byte Spill
431 ; X86-NEXT: movl %edi, %esi
433 ; X86-NEXT: cmovsw %di, %si
434 ; X86-NEXT: movl %ebp, %edi
436 ; X86-NEXT: cmovsw %bp, %di
437 ; X86-NEXT: movl %ebx, %ebp
439 ; X86-NEXT: cmovsw %bx, %bp
440 ; X86-NEXT: movl %eax, %ebx
442 ; X86-NEXT: cmovsw %ax, %bx
443 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %ecx
444 ; X86-NEXT: movl %ecx, %eax
446 ; X86-NEXT: cmovsw %cx, %ax
447 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %edx
448 ; X86-NEXT: movl %edx, %ecx
450 ; X86-NEXT: cmovsw %dx, %cx
451 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
452 ; X86-NEXT: movw %cx, 14(%edx)
453 ; X86-NEXT: movw %ax, 12(%edx)
454 ; X86-NEXT: movw %bx, 10(%edx)
455 ; X86-NEXT: movw %bp, 8(%edx)
456 ; X86-NEXT: movw %di, 6(%edx)
457 ; X86-NEXT: movw %si, 4(%edx)
458 ; X86-NEXT: movzwl (%esp), %eax # 2-byte Folded Reload
459 ; X86-NEXT: movw %ax, 2(%edx)
460 ; X86-NEXT: movzwl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 2-byte Folded Reload
461 ; X86-NEXT: movw %ax, (%edx)
462 ; X86-NEXT: movl %edx, %eax
463 ; X86-NEXT: addl $4, %esp
464 ; X86-NEXT: popl %esi
465 ; X86-NEXT: popl %edi
466 ; X86-NEXT: popl %ebx
467 ; X86-NEXT: popl %ebp
469 %r = call <8 x i16> @llvm.abs.v8i16(<8 x i16> %a, i1 false)
473 define <16 x i8> @test_v16i8(<16 x i8> %a) nounwind {
474 ; SSE-LABEL: test_v16i8:
476 ; SSE-NEXT: pxor %xmm1, %xmm1
477 ; SSE-NEXT: psubb %xmm0, %xmm1
478 ; SSE-NEXT: pminub %xmm1, %xmm0
481 ; AVX-LABEL: test_v16i8:
483 ; AVX-NEXT: vpabsb %xmm0, %xmm0
486 ; X86-LABEL: test_v16i8:
488 ; X86-NEXT: pushl %ebx
489 ; X86-NEXT: subl $12, %esp
490 ; X86-NEXT: movb {{[0-9]+}}(%esp), %bh
491 ; X86-NEXT: movb {{[0-9]+}}(%esp), %bl
492 ; X86-NEXT: movb {{[0-9]+}}(%esp), %dh
493 ; X86-NEXT: movb {{[0-9]+}}(%esp), %ch
494 ; X86-NEXT: movb {{[0-9]+}}(%esp), %ah
495 ; X86-NEXT: movb {{[0-9]+}}(%esp), %dl
496 ; X86-NEXT: movb {{[0-9]+}}(%esp), %cl
497 ; X86-NEXT: movb %cl, %al
498 ; X86-NEXT: sarb $7, %al
499 ; X86-NEXT: xorb %al, %cl
500 ; X86-NEXT: subb %al, %cl
501 ; X86-NEXT: movb %cl, {{[-0-9]+}}(%e{{[sb]}}p) # 1-byte Spill
502 ; X86-NEXT: movb %dl, %al
503 ; X86-NEXT: sarb $7, %al
504 ; X86-NEXT: xorb %al, %dl
505 ; X86-NEXT: subb %al, %dl
506 ; X86-NEXT: movb %dl, {{[-0-9]+}}(%e{{[sb]}}p) # 1-byte Spill
507 ; X86-NEXT: movb %ah, %al
508 ; X86-NEXT: sarb $7, %al
509 ; X86-NEXT: xorb %al, %ah
510 ; X86-NEXT: subb %al, %ah
511 ; X86-NEXT: movb %ah, {{[-0-9]+}}(%e{{[sb]}}p) # 1-byte Spill
512 ; X86-NEXT: movb %ch, %al
513 ; X86-NEXT: sarb $7, %al
514 ; X86-NEXT: xorb %al, %ch
515 ; X86-NEXT: subb %al, %ch
516 ; X86-NEXT: movb %ch, {{[-0-9]+}}(%e{{[sb]}}p) # 1-byte Spill
517 ; X86-NEXT: movb %dh, %al
518 ; X86-NEXT: sarb $7, %al
519 ; X86-NEXT: xorb %al, %dh
520 ; X86-NEXT: subb %al, %dh
521 ; X86-NEXT: movb %dh, {{[-0-9]+}}(%e{{[sb]}}p) # 1-byte Spill
522 ; X86-NEXT: movl %ebx, %eax
523 ; X86-NEXT: sarb $7, %al
524 ; X86-NEXT: xorb %al, %bl
525 ; X86-NEXT: subb %al, %bl
526 ; X86-NEXT: movb %bl, {{[-0-9]+}}(%e{{[sb]}}p) # 1-byte Spill
527 ; X86-NEXT: movb %bh, %al
528 ; X86-NEXT: sarb $7, %al
529 ; X86-NEXT: xorb %al, %bh
530 ; X86-NEXT: subb %al, %bh
531 ; X86-NEXT: movb %bh, {{[-0-9]+}}(%e{{[sb]}}p) # 1-byte Spill
532 ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ecx
533 ; X86-NEXT: movl %ecx, %eax
534 ; X86-NEXT: sarb $7, %al
535 ; X86-NEXT: xorb %al, %cl
536 ; X86-NEXT: subb %al, %cl
537 ; X86-NEXT: movb %cl, {{[-0-9]+}}(%e{{[sb]}}p) # 1-byte Spill
538 ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ecx
539 ; X86-NEXT: movl %ecx, %eax
540 ; X86-NEXT: sarb $7, %al
541 ; X86-NEXT: xorb %al, %cl
542 ; X86-NEXT: subb %al, %cl
543 ; X86-NEXT: movb %cl, {{[-0-9]+}}(%e{{[sb]}}p) # 1-byte Spill
544 ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ecx
545 ; X86-NEXT: movl %ecx, %eax
546 ; X86-NEXT: sarb $7, %al
547 ; X86-NEXT: xorb %al, %cl
548 ; X86-NEXT: subb %al, %cl
549 ; X86-NEXT: movb %cl, {{[-0-9]+}}(%e{{[sb]}}p) # 1-byte Spill
550 ; X86-NEXT: movb {{[0-9]+}}(%esp), %bh
551 ; X86-NEXT: movb %bh, %al
552 ; X86-NEXT: sarb $7, %al
553 ; X86-NEXT: xorb %al, %bh
554 ; X86-NEXT: subb %al, %bh
555 ; X86-NEXT: movb {{[0-9]+}}(%esp), %bl
556 ; X86-NEXT: movl %ebx, %eax
557 ; X86-NEXT: sarb $7, %al
558 ; X86-NEXT: xorb %al, %bl
559 ; X86-NEXT: subb %al, %bl
560 ; X86-NEXT: movb {{[0-9]+}}(%esp), %dh
561 ; X86-NEXT: movb %dh, %al
562 ; X86-NEXT: sarb $7, %al
563 ; X86-NEXT: xorb %al, %dh
564 ; X86-NEXT: subb %al, %dh
565 ; X86-NEXT: movb {{[0-9]+}}(%esp), %ch
566 ; X86-NEXT: movb %ch, %al
567 ; X86-NEXT: sarb $7, %al
568 ; X86-NEXT: xorb %al, %ch
569 ; X86-NEXT: subb %al, %ch
570 ; X86-NEXT: movb {{[0-9]+}}(%esp), %dl
571 ; X86-NEXT: movl %edx, %eax
572 ; X86-NEXT: sarb $7, %al
573 ; X86-NEXT: xorb %al, %dl
574 ; X86-NEXT: subb %al, %dl
575 ; X86-NEXT: movb {{[0-9]+}}(%esp), %cl
576 ; X86-NEXT: movl %ecx, %eax
577 ; X86-NEXT: sarb $7, %al
578 ; X86-NEXT: xorb %al, %cl
579 ; X86-NEXT: subb %al, %cl
580 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
581 ; X86-NEXT: movb %cl, 15(%eax)
582 ; X86-NEXT: movb %dl, 14(%eax)
583 ; X86-NEXT: movb %ch, 13(%eax)
584 ; X86-NEXT: movb %dh, 12(%eax)
585 ; X86-NEXT: movb %bl, 11(%eax)
586 ; X86-NEXT: movb %bh, 10(%eax)
587 ; X86-NEXT: movzbl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 1-byte Folded Reload
588 ; X86-NEXT: movb %cl, 9(%eax)
589 ; X86-NEXT: movzbl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 1-byte Folded Reload
590 ; X86-NEXT: movb %cl, 8(%eax)
591 ; X86-NEXT: movzbl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 1-byte Folded Reload
592 ; X86-NEXT: movb %cl, 7(%eax)
593 ; X86-NEXT: movzbl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 1-byte Folded Reload
594 ; X86-NEXT: movb %cl, 6(%eax)
595 ; X86-NEXT: movzbl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 1-byte Folded Reload
596 ; X86-NEXT: movb %cl, 5(%eax)
597 ; X86-NEXT: movzbl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 1-byte Folded Reload
598 ; X86-NEXT: movb %cl, 4(%eax)
599 ; X86-NEXT: movzbl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 1-byte Folded Reload
600 ; X86-NEXT: movb %cl, 3(%eax)
601 ; X86-NEXT: movzbl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 1-byte Folded Reload
602 ; X86-NEXT: movb %cl, 2(%eax)
603 ; X86-NEXT: movzbl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 1-byte Folded Reload
604 ; X86-NEXT: movb %cl, 1(%eax)
605 ; X86-NEXT: movzbl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 1-byte Folded Reload
606 ; X86-NEXT: movb %cl, (%eax)
607 ; X86-NEXT: addl $12, %esp
608 ; X86-NEXT: popl %ebx
610 %r = call <16 x i8> @llvm.abs.v16i8(<16 x i8> %a, i1 false)
614 define i16 @test_sextinreg_i16(i16 %a) nounwind {
615 ; X64-LABEL: test_sextinreg_i16:
617 ; X64-NEXT: movsbl %dil, %ecx
618 ; X64-NEXT: movl %ecx, %eax
620 ; X64-NEXT: cmovsw %cx, %ax
623 ; X86-LABEL: test_sextinreg_i16:
625 ; X86-NEXT: movsbl {{[0-9]+}}(%esp), %ecx
626 ; X86-NEXT: movl %ecx, %eax
628 ; X86-NEXT: cmovsw %cx, %ax
631 %ashr = ashr exact i16 %shl, 8
632 %res = call i16 @llvm.abs.i16(i16 %ashr, i1 true)
636 define i32 @test_sextinreg_i32(i32 %a) nounwind {
637 ; X64-LABEL: test_sextinreg_i32:
639 ; X64-NEXT: movswl %di, %ecx
640 ; X64-NEXT: movl %ecx, %eax
641 ; X64-NEXT: negl %eax
642 ; X64-NEXT: cmovsl %ecx, %eax
645 ; X86-LABEL: test_sextinreg_i32:
647 ; X86-NEXT: movswl {{[0-9]+}}(%esp), %ecx
648 ; X86-NEXT: movl %ecx, %eax
649 ; X86-NEXT: negl %eax
650 ; X86-NEXT: cmovsl %ecx, %eax
652 %shl = shl i32 %a, 16
653 %ashr = ashr exact i32 %shl, 16
654 %res = call i32 @llvm.abs.i32(i32 %ashr, i1 true)
658 define i64 @test_sextinreg_i64(i64 %a) nounwind {
659 ; X64-LABEL: test_sextinreg_i64:
661 ; X64-NEXT: movl %edi, %eax
662 ; X64-NEXT: negl %eax
663 ; X64-NEXT: cmovsl %edi, %eax
666 ; X86-LABEL: test_sextinreg_i64:
668 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
669 ; X86-NEXT: movl %ecx, %eax
670 ; X86-NEXT: negl %eax
671 ; X86-NEXT: cmovsl %ecx, %eax
672 ; X86-NEXT: xorl %edx, %edx
674 %shl = shl i64 %a, 32
675 %ashr = ashr exact i64 %shl, 32
676 %res = call i64 @llvm.abs.i64(i64 %ashr, i1 true)
680 define i128 @test_sextinreg_i128(i128 %a) nounwind {
681 ; X64-LABEL: test_sextinreg_i128:
683 ; X64-NEXT: movq %rdi, %rax
684 ; X64-NEXT: negq %rax
685 ; X64-NEXT: cmovsq %rdi, %rax
686 ; X64-NEXT: xorl %edx, %edx
689 ; X86-LABEL: test_sextinreg_i128:
691 ; X86-NEXT: pushl %esi
692 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
693 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
694 ; X86-NEXT: movl %ecx, %edx
695 ; X86-NEXT: sarl $31, %edx
696 ; X86-NEXT: xorl %edx, %ecx
697 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
698 ; X86-NEXT: xorl %edx, %esi
699 ; X86-NEXT: subl %edx, %esi
700 ; X86-NEXT: sbbl %edx, %ecx
701 ; X86-NEXT: movl %esi, (%eax)
702 ; X86-NEXT: movl %ecx, 4(%eax)
703 ; X86-NEXT: movl $0, 12(%eax)
704 ; X86-NEXT: movl $0, 8(%eax)
705 ; X86-NEXT: popl %esi
707 %shl = shl i128 %a, 64
708 %ashr = ashr exact i128 %shl, 64
709 %res = call i128 @llvm.abs.i128(i128 %ashr, i1 true)
713 define i8 @test_minsigned_i8(i8 %a0, i8 %a1) nounwind {
714 ; X64-LABEL: test_minsigned_i8:
716 ; X64-NEXT: movl %edi, %eax
717 ; X64-NEXT: sarb $7, %al
718 ; X64-NEXT: movl %edi, %ecx
719 ; X64-NEXT: xorb %al, %cl
720 ; X64-NEXT: subb %al, %cl
721 ; X64-NEXT: cmpb $-128, %dil
722 ; X64-NEXT: movzbl %cl, %eax
723 ; X64-NEXT: cmovel %esi, %eax
724 ; X64-NEXT: # kill: def $al killed $al killed $eax
727 ; X86-LABEL: test_minsigned_i8:
729 ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax
730 ; X86-NEXT: cmpb $-128, %al
731 ; X86-NEXT: jne .LBB17_1
733 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
734 ; X86-NEXT: # kill: def $al killed $al killed $eax
736 ; X86-NEXT: .LBB17_1:
737 ; X86-NEXT: movl %eax, %ecx
738 ; X86-NEXT: sarb $7, %cl
739 ; X86-NEXT: xorb %cl, %al
740 ; X86-NEXT: subb %cl, %al
741 ; X86-NEXT: movzbl %al, %eax
742 ; X86-NEXT: # kill: def $al killed $al killed $eax
744 %lim = icmp eq i8 %a0, -128
745 %abs = tail call i8 @llvm.abs.i8(i8 %a0, i1 false)
746 %res = select i1 %lim, i8 %a1, i8 %abs
750 define i16 @test_minsigned_i16(i16 %a0, i16 %a1) nounwind {
751 ; X64-LABEL: test_minsigned_i16:
753 ; X64-NEXT: movzwl %di, %ecx
754 ; X64-NEXT: movl %ecx, %eax
756 ; X64-NEXT: cmovsw %cx, %ax
757 ; X64-NEXT: cmpl $32768, %ecx # imm = 0x8000
758 ; X64-NEXT: cmovel %esi, %eax
759 ; X64-NEXT: # kill: def $ax killed $ax killed $eax
762 ; X86-LABEL: test_minsigned_i16:
764 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %ecx
765 ; X86-NEXT: movl %ecx, %eax
767 ; X86-NEXT: cmovsw %cx, %ax
768 ; X86-NEXT: cmpl $32768, %ecx # imm = 0x8000
769 ; X86-NEXT: jne .LBB18_2
771 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
772 ; X86-NEXT: .LBB18_2:
774 %lim = icmp eq i16 %a0, -32768
775 %abs = tail call i16 @llvm.abs.i16(i16 %a0, i1 false)
776 %res = select i1 %lim, i16 %a1, i16 %abs
780 define i32 @test_minsigned_i32(i32 %a0, i32 %a1) nounwind {
781 ; X64-LABEL: test_minsigned_i32:
783 ; X64-NEXT: movl %edi, %eax
784 ; X64-NEXT: negl %eax
785 ; X64-NEXT: cmovsl %edi, %eax
786 ; X64-NEXT: cmovol %esi, %eax
789 ; X86-LABEL: test_minsigned_i32:
791 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
792 ; X86-NEXT: movl %ecx, %eax
793 ; X86-NEXT: negl %eax
794 ; X86-NEXT: cmovsl %ecx, %eax
795 ; X86-NEXT: cmovol {{[0-9]+}}(%esp), %eax
797 %lim = icmp eq i32 %a0, -2147483648
798 %abs = tail call i32 @llvm.abs.i32(i32 %a0, i1 false)
799 %res = select i1 %lim, i32 %a1, i32 %abs
803 define i64 @test_minsigned_i64(i64 %a0, i64 %a1) nounwind {
804 ; X64-LABEL: test_minsigned_i64:
806 ; X64-NEXT: movq %rdi, %rax
807 ; X64-NEXT: negq %rax
808 ; X64-NEXT: cmovsq %rdi, %rax
809 ; X64-NEXT: cmovoq %rsi, %rax
812 ; X86-LABEL: test_minsigned_i64:
814 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
815 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
816 ; X86-NEXT: leal -2147483648(%edx), %ecx
817 ; X86-NEXT: orl %eax, %ecx
818 ; X86-NEXT: jne .LBB20_1
819 ; X86-NEXT: # %bb.2: # %select.end
820 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
821 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
823 ; X86-NEXT: .LBB20_1: # %select.false.sink
824 ; X86-NEXT: movl %edx, %ecx
825 ; X86-NEXT: sarl $31, %ecx
826 ; X86-NEXT: xorl %ecx, %edx
827 ; X86-NEXT: xorl %ecx, %eax
828 ; X86-NEXT: subl %ecx, %eax
829 ; X86-NEXT: sbbl %ecx, %edx
831 %lim = icmp eq i64 %a0, -9223372036854775808
832 %abs = tail call i64 @llvm.abs.i64(i64 %a0, i1 false)
833 %res = select i1 %lim, i64 %a1, i64 %abs