1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=i686-unknown-unknown | FileCheck %s --check-prefixes=X86
3 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown | FileCheck %s --check-prefixes=X64,NOBMI
4 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+bmi | FileCheck %s --check-prefixes=X64,BMI
6 ; InstCombine and DAGCombiner transform an 'add' into an 'or'
7 ; if there are no common bits from the incoming operands.
8 ; LEA instruction selection should be able to see through that
9 ; transform and reduce add/shift/or instruction counts.
11 define i32 @or_shift1_and1(i32 %x, i32 %y) {
12 ; X86-LABEL: or_shift1_and1:
14 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
15 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
16 ; X86-NEXT: andl $1, %ecx
17 ; X86-NEXT: leal (%ecx,%eax,2), %eax
20 ; X64-LABEL: or_shift1_and1:
22 ; X64-NEXT: # kill: def $esi killed $esi def $rsi
23 ; X64-NEXT: # kill: def $edi killed $edi def $rdi
24 ; X64-NEXT: andl $1, %esi
25 ; X64-NEXT: leal (%rsi,%rdi,2), %eax
29 %or = or i32 %and, %shl
33 define i32 @or_shift1_and1_swapped(i32 %x, i32 %y) {
34 ; X86-LABEL: or_shift1_and1_swapped:
36 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
37 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
38 ; X86-NEXT: andl $1, %ecx
39 ; X86-NEXT: leal (%ecx,%eax,2), %eax
42 ; X64-LABEL: or_shift1_and1_swapped:
44 ; X64-NEXT: # kill: def $esi killed $esi def $rsi
45 ; X64-NEXT: # kill: def $edi killed $edi def $rdi
46 ; X64-NEXT: andl $1, %esi
47 ; X64-NEXT: leal (%rsi,%rdi,2), %eax
51 %or = or i32 %shl, %and
55 define i32 @or_shift2_and1(i32 %x, i32 %y) {
56 ; X86-LABEL: or_shift2_and1:
58 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
59 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
60 ; X86-NEXT: andl $1, %ecx
61 ; X86-NEXT: leal (%ecx,%eax,4), %eax
64 ; X64-LABEL: or_shift2_and1:
66 ; X64-NEXT: # kill: def $esi killed $esi def $rsi
67 ; X64-NEXT: # kill: def $edi killed $edi def $rdi
68 ; X64-NEXT: andl $1, %esi
69 ; X64-NEXT: leal (%rsi,%rdi,4), %eax
73 %or = or i32 %shl, %and
77 define i32 @or_shift3_and1(i32 %x, i32 %y) {
78 ; X86-LABEL: or_shift3_and1:
80 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
81 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
82 ; X86-NEXT: andl $1, %ecx
83 ; X86-NEXT: leal (%ecx,%eax,8), %eax
86 ; X64-LABEL: or_shift3_and1:
88 ; X64-NEXT: # kill: def $esi killed $esi def $rsi
89 ; X64-NEXT: # kill: def $edi killed $edi def $rdi
90 ; X64-NEXT: andl $1, %esi
91 ; X64-NEXT: leal (%rsi,%rdi,8), %eax
95 %or = or i32 %shl, %and
99 define i32 @or_shift3_and7(i32 %x, i32 %y) {
100 ; X86-LABEL: or_shift3_and7:
102 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
103 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
104 ; X86-NEXT: andl $7, %ecx
105 ; X86-NEXT: leal (%ecx,%eax,8), %eax
108 ; X64-LABEL: or_shift3_and7:
110 ; X64-NEXT: # kill: def $esi killed $esi def $rsi
111 ; X64-NEXT: # kill: def $edi killed $edi def $rdi
112 ; X64-NEXT: andl $7, %esi
113 ; X64-NEXT: leal (%rsi,%rdi,8), %eax
117 %or = or i32 %shl, %and
121 ; The shift is too big for an LEA.
123 define i32 @or_shift4_and1(i32 %x, i32 %y) {
124 ; X86-LABEL: or_shift4_and1:
126 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
127 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
128 ; X86-NEXT: shll $4, %ecx
129 ; X86-NEXT: andl $1, %eax
130 ; X86-NEXT: orl %ecx, %eax
133 ; X64-LABEL: or_shift4_and1:
135 ; X64-NEXT: # kill: def $esi killed $esi def $rsi
136 ; X64-NEXT: # kill: def $edi killed $edi def $rdi
137 ; X64-NEXT: shll $4, %edi
138 ; X64-NEXT: andl $1, %esi
139 ; X64-NEXT: leal (%rsi,%rdi), %eax
143 %or = or i32 %shl, %and
147 ; The mask is too big for the shift, so the 'or' isn't equivalent to an 'add'.
149 define i32 @or_shift3_and8(i32 %x, i32 %y) {
150 ; X86-LABEL: or_shift3_and8:
152 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
153 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
154 ; X86-NEXT: shll $3, %ecx
155 ; X86-NEXT: andl $8, %eax
156 ; X86-NEXT: orl %ecx, %eax
159 ; X64-LABEL: or_shift3_and8:
161 ; X64-NEXT: # kill: def $edi killed $edi def $rdi
162 ; X64-NEXT: leal (,%rdi,8), %eax
163 ; X64-NEXT: andl $8, %esi
164 ; X64-NEXT: orl %esi, %eax
168 %or = or i32 %shl, %and
172 ; 64-bit operands should work too.
174 define i64 @or_shift1_and1_64(i64 %x, i64 %y) {
175 ; X86-LABEL: or_shift1_and1_64:
177 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
178 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
179 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
180 ; X86-NEXT: shldl $1, %ecx, %edx
181 ; X86-NEXT: andl $1, %eax
182 ; X86-NEXT: leal (%eax,%ecx,2), %eax
185 ; X64-LABEL: or_shift1_and1_64:
187 ; X64-NEXT: andl $1, %esi
188 ; X64-NEXT: leaq (%rsi,%rdi,2), %rax
192 %or = or i64 %and, %shl
196 ; In the following patterns, lhs and rhs of the or instruction have no common bits.
198 define i32 @or_and_and_rhs_neg_i32(i32 %x, i32 %y, i32 %z) {
199 ; X86-LABEL: or_and_and_rhs_neg_i32:
200 ; X86: # %bb.0: # %entry
201 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
202 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
203 ; X86-NEXT: xorl %ecx, %eax
204 ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax
205 ; X86-NEXT: xorl %ecx, %eax
206 ; X86-NEXT: incl %eax
209 ; NOBMI-LABEL: or_and_and_rhs_neg_i32:
210 ; NOBMI: # %bb.0: # %entry
211 ; NOBMI-NEXT: # kill: def $edx killed $edx def $rdx
212 ; NOBMI-NEXT: xorl %edi, %edx
213 ; NOBMI-NEXT: andl %esi, %edx
214 ; NOBMI-NEXT: xorl %edi, %edx
215 ; NOBMI-NEXT: leal 1(%rdx), %eax
218 ; BMI-LABEL: or_and_and_rhs_neg_i32:
219 ; BMI: # %bb.0: # %entry
220 ; BMI-NEXT: # kill: def $edx killed $edx def $rdx
221 ; BMI-NEXT: andl %esi, %edx
222 ; BMI-NEXT: andnl %edi, %esi, %eax
223 ; BMI-NEXT: leal 1(%rdx,%rax), %eax
226 %and1 = and i32 %z, %y
227 %xor = xor i32 %y, -1
228 %and2 = and i32 %x, %xor
229 %or = or i32 %and1, %and2
230 %inc = add i32 %or, 1
234 define i32 @or_and_and_lhs_neg_i32(i32 %x, i32 %y, i32 %z) {
235 ; X86-LABEL: or_and_and_lhs_neg_i32:
236 ; X86: # %bb.0: # %entry
237 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
238 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
239 ; X86-NEXT: xorl %ecx, %eax
240 ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax
241 ; X86-NEXT: xorl %ecx, %eax
242 ; X86-NEXT: incl %eax
245 ; NOBMI-LABEL: or_and_and_lhs_neg_i32:
246 ; NOBMI: # %bb.0: # %entry
247 ; NOBMI-NEXT: # kill: def $edx killed $edx def $rdx
248 ; NOBMI-NEXT: xorl %edi, %edx
249 ; NOBMI-NEXT: andl %esi, %edx
250 ; NOBMI-NEXT: xorl %edi, %edx
251 ; NOBMI-NEXT: leal 1(%rdx), %eax
254 ; BMI-LABEL: or_and_and_lhs_neg_i32:
255 ; BMI: # %bb.0: # %entry
256 ; BMI-NEXT: # kill: def $edx killed $edx def $rdx
257 ; BMI-NEXT: andl %esi, %edx
258 ; BMI-NEXT: andnl %edi, %esi, %eax
259 ; BMI-NEXT: leal 1(%rdx,%rax), %eax
262 %and1 = and i32 %z, %y
263 %xor = xor i32 %y, -1
264 %and2 = and i32 %xor, %x
265 %or = or i32 %and1, %and2
266 %inc = add i32 %or, 1
270 define i32 @or_and_rhs_neg_and_i32(i32 %x, i32 %y, i32 %z) {
271 ; X86-LABEL: or_and_rhs_neg_and_i32:
272 ; X86: # %bb.0: # %entry
273 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
274 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
275 ; X86-NEXT: xorl %ecx, %eax
276 ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax
277 ; X86-NEXT: xorl %ecx, %eax
278 ; X86-NEXT: incl %eax
281 ; NOBMI-LABEL: or_and_rhs_neg_and_i32:
282 ; NOBMI: # %bb.0: # %entry
283 ; NOBMI-NEXT: # kill: def $edi killed $edi def $rdi
284 ; NOBMI-NEXT: xorl %edx, %edi
285 ; NOBMI-NEXT: andl %esi, %edi
286 ; NOBMI-NEXT: xorl %edx, %edi
287 ; NOBMI-NEXT: leal 1(%rdi), %eax
290 ; BMI-LABEL: or_and_rhs_neg_and_i32:
291 ; BMI: # %bb.0: # %entry
292 ; BMI-NEXT: # kill: def $edi killed $edi def $rdi
293 ; BMI-NEXT: andnl %edx, %esi, %eax
294 ; BMI-NEXT: andl %esi, %edi
295 ; BMI-NEXT: leal 1(%rax,%rdi), %eax
298 %xor = xor i32 %y, -1
299 %and1 = and i32 %z, %xor
300 %and2 = and i32 %x, %y
301 %or = or i32 %and1, %and2
302 %inc = add i32 %or, 1
306 define i32 @or_and_lhs_neg_and_i32(i32 %x, i32 %y, i32 %z) {
307 ; X86-LABEL: or_and_lhs_neg_and_i32:
308 ; X86: # %bb.0: # %entry
309 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
310 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
311 ; X86-NEXT: xorl %ecx, %eax
312 ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax
313 ; X86-NEXT: xorl %ecx, %eax
314 ; X86-NEXT: incl %eax
317 ; NOBMI-LABEL: or_and_lhs_neg_and_i32:
318 ; NOBMI: # %bb.0: # %entry
319 ; NOBMI-NEXT: # kill: def $edi killed $edi def $rdi
320 ; NOBMI-NEXT: xorl %edx, %edi
321 ; NOBMI-NEXT: andl %esi, %edi
322 ; NOBMI-NEXT: xorl %edx, %edi
323 ; NOBMI-NEXT: leal 1(%rdi), %eax
326 ; BMI-LABEL: or_and_lhs_neg_and_i32:
327 ; BMI: # %bb.0: # %entry
328 ; BMI-NEXT: # kill: def $edi killed $edi def $rdi
329 ; BMI-NEXT: andnl %edx, %esi, %eax
330 ; BMI-NEXT: andl %esi, %edi
331 ; BMI-NEXT: leal 1(%rax,%rdi), %eax
334 %xor = xor i32 %y, -1
335 %and1 = and i32 %xor, %z
336 %and2 = and i32 %x, %y
337 %or = or i32 %and1, %and2
338 %inc = add i32 %or, 1
342 define i64 @or_and_and_rhs_neg_i64(i64 %x, i64 %y, i64 %z) {
343 ; X86-LABEL: or_and_and_rhs_neg_i64:
344 ; X86: # %bb.0: # %entry
345 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
346 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
347 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
348 ; X86-NEXT: xorl %eax, %edx
349 ; X86-NEXT: andl {{[0-9]+}}(%esp), %edx
350 ; X86-NEXT: xorl %eax, %edx
351 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
352 ; X86-NEXT: xorl %ecx, %eax
353 ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax
354 ; X86-NEXT: xorl %ecx, %eax
355 ; X86-NEXT: addl $1, %eax
356 ; X86-NEXT: adcl $0, %edx
359 ; NOBMI-LABEL: or_and_and_rhs_neg_i64:
360 ; NOBMI: # %bb.0: # %entry
361 ; NOBMI-NEXT: xorq %rdi, %rdx
362 ; NOBMI-NEXT: andq %rsi, %rdx
363 ; NOBMI-NEXT: xorq %rdi, %rdx
364 ; NOBMI-NEXT: leaq 1(%rdx), %rax
367 ; BMI-LABEL: or_and_and_rhs_neg_i64:
368 ; BMI: # %bb.0: # %entry
369 ; BMI-NEXT: andq %rsi, %rdx
370 ; BMI-NEXT: andnq %rdi, %rsi, %rax
371 ; BMI-NEXT: leaq 1(%rdx,%rax), %rax
374 %and1 = and i64 %z, %y
375 %xor = xor i64 %y, -1
376 %and2 = and i64 %x, %xor
377 %or = or i64 %and1, %and2
378 %inc = add i64 %or, 1
382 define i64 @or_and_and_lhs_neg_i64(i64 %x, i64 %y, i64 %z) {
383 ; X86-LABEL: or_and_and_lhs_neg_i64:
384 ; X86: # %bb.0: # %entry
385 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
386 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
387 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
388 ; X86-NEXT: xorl %eax, %edx
389 ; X86-NEXT: andl {{[0-9]+}}(%esp), %edx
390 ; X86-NEXT: xorl %eax, %edx
391 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
392 ; X86-NEXT: xorl %ecx, %eax
393 ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax
394 ; X86-NEXT: xorl %ecx, %eax
395 ; X86-NEXT: addl $1, %eax
396 ; X86-NEXT: adcl $0, %edx
399 ; NOBMI-LABEL: or_and_and_lhs_neg_i64:
400 ; NOBMI: # %bb.0: # %entry
401 ; NOBMI-NEXT: xorq %rdi, %rdx
402 ; NOBMI-NEXT: andq %rsi, %rdx
403 ; NOBMI-NEXT: xorq %rdi, %rdx
404 ; NOBMI-NEXT: leaq 1(%rdx), %rax
407 ; BMI-LABEL: or_and_and_lhs_neg_i64:
408 ; BMI: # %bb.0: # %entry
409 ; BMI-NEXT: andq %rsi, %rdx
410 ; BMI-NEXT: andnq %rdi, %rsi, %rax
411 ; BMI-NEXT: leaq 1(%rdx,%rax), %rax
414 %and1 = and i64 %z, %y
415 %xor = xor i64 %y, -1
416 %and2 = and i64 %xor, %x
417 %or = or i64 %and1, %and2
418 %inc = add i64 %or, 1
422 define i64 @or_and_rhs_neg_and_i64(i64 %x, i64 %y, i64 %z) {
423 ; X86-LABEL: or_and_rhs_neg_and_i64:
424 ; X86: # %bb.0: # %entry
425 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
426 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
427 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
428 ; X86-NEXT: xorl %eax, %edx
429 ; X86-NEXT: andl {{[0-9]+}}(%esp), %edx
430 ; X86-NEXT: xorl %eax, %edx
431 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
432 ; X86-NEXT: xorl %ecx, %eax
433 ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax
434 ; X86-NEXT: xorl %ecx, %eax
435 ; X86-NEXT: addl $1, %eax
436 ; X86-NEXT: adcl $0, %edx
439 ; NOBMI-LABEL: or_and_rhs_neg_and_i64:
440 ; NOBMI: # %bb.0: # %entry
441 ; NOBMI-NEXT: xorq %rdx, %rdi
442 ; NOBMI-NEXT: andq %rsi, %rdi
443 ; NOBMI-NEXT: xorq %rdx, %rdi
444 ; NOBMI-NEXT: leaq 1(%rdi), %rax
447 ; BMI-LABEL: or_and_rhs_neg_and_i64:
448 ; BMI: # %bb.0: # %entry
449 ; BMI-NEXT: andnq %rdx, %rsi, %rax
450 ; BMI-NEXT: andq %rsi, %rdi
451 ; BMI-NEXT: leaq 1(%rax,%rdi), %rax
454 %xor = xor i64 %y, -1
455 %and1 = and i64 %z, %xor
456 %and2 = and i64 %x, %y
457 %or = or i64 %and1, %and2
458 %inc = add i64 %or, 1
462 define i64 @or_and_lhs_neg_and_i64(i64 %x, i64 %y, i64 %z) {
463 ; X86-LABEL: or_and_lhs_neg_and_i64:
464 ; X86: # %bb.0: # %entry
465 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
466 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
467 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
468 ; X86-NEXT: xorl %eax, %edx
469 ; X86-NEXT: andl {{[0-9]+}}(%esp), %edx
470 ; X86-NEXT: xorl %eax, %edx
471 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
472 ; X86-NEXT: xorl %ecx, %eax
473 ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax
474 ; X86-NEXT: xorl %ecx, %eax
475 ; X86-NEXT: addl $1, %eax
476 ; X86-NEXT: adcl $0, %edx
479 ; NOBMI-LABEL: or_and_lhs_neg_and_i64:
480 ; NOBMI: # %bb.0: # %entry
481 ; NOBMI-NEXT: xorq %rdx, %rdi
482 ; NOBMI-NEXT: andq %rsi, %rdi
483 ; NOBMI-NEXT: xorq %rdx, %rdi
484 ; NOBMI-NEXT: leaq 1(%rdi), %rax
487 ; BMI-LABEL: or_and_lhs_neg_and_i64:
488 ; BMI: # %bb.0: # %entry
489 ; BMI-NEXT: andnq %rdx, %rsi, %rax
490 ; BMI-NEXT: andq %rsi, %rdi
491 ; BMI-NEXT: leaq 1(%rax,%rdi), %rax
494 %xor = xor i64 %y, -1
495 %and1 = and i64 %xor, %z
496 %and2 = and i64 %x, %y
497 %or = or i64 %and1, %and2
498 %inc = add i64 %or, 1
502 define i32 @or_sext1(i32 %x) {
503 ; X86-LABEL: or_sext1:
505 ; X86-NEXT: xorl %eax, %eax
506 ; X86-NEXT: cmpl $43, {{[0-9]+}}(%esp)
508 ; X86-NEXT: leal -1(%eax,%eax), %eax
511 ; X64-LABEL: or_sext1:
513 ; X64-NEXT: xorl %eax, %eax
514 ; X64-NEXT: cmpl $43, %edi
516 ; X64-NEXT: leal -1(%rax,%rax), %eax
518 %cmp = icmp sgt i32 %x, 42
519 %sext = sext i1 %cmp to i32
520 %or = or i32 %sext, 1
524 define i64 @or_sext1_64(i64 %x) {
525 ; X86-LABEL: or_sext1_64:
527 ; X86-NEXT: xorl %eax, %eax
528 ; X86-NEXT: movl $42, %ecx
529 ; X86-NEXT: cmpl {{[0-9]+}}(%esp), %ecx
530 ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %eax
532 ; X86-NEXT: movzbl %al, %edx
533 ; X86-NEXT: negl %edx
534 ; X86-NEXT: movl %edx, %eax
535 ; X86-NEXT: orl $1, %eax
538 ; X64-LABEL: or_sext1_64:
540 ; X64-NEXT: xorl %eax, %eax
541 ; X64-NEXT: cmpq $43, %rdi
543 ; X64-NEXT: leaq -1(%rax,%rax), %rax
545 %cmp = icmp sgt i64 %x, 42
546 %sext = sext i1 %cmp to i64
547 %or = or i64 %sext, 1
551 define i32 @or_sext2(i32 %x) {
552 ; X86-LABEL: or_sext2:
554 ; X86-NEXT: xorl %eax, %eax
555 ; X86-NEXT: cmpl $43, {{[0-9]+}}(%esp)
557 ; X86-NEXT: leal -1(%eax,%eax,2), %eax
560 ; X64-LABEL: or_sext2:
562 ; X64-NEXT: xorl %eax, %eax
563 ; X64-NEXT: cmpl $43, %edi
565 ; X64-NEXT: leal -1(%rax,%rax,2), %eax
567 %cmp = icmp sgt i32 %x, 42
568 %sext = sext i1 %cmp to i32
569 %or = or i32 %sext, 2
573 define i64 @or_sext2_64(i64 %x) {
574 ; X86-LABEL: or_sext2_64:
576 ; X86-NEXT: xorl %eax, %eax
577 ; X86-NEXT: movl $42, %ecx
578 ; X86-NEXT: cmpl {{[0-9]+}}(%esp), %ecx
579 ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %eax
581 ; X86-NEXT: movzbl %al, %edx
582 ; X86-NEXT: negl %edx
583 ; X86-NEXT: movl %edx, %eax
584 ; X86-NEXT: orl $2, %eax
587 ; X64-LABEL: or_sext2_64:
589 ; X64-NEXT: xorl %eax, %eax
590 ; X64-NEXT: cmpq $43, %rdi
592 ; X64-NEXT: leaq -1(%rax,%rax,2), %rax
594 %cmp = icmp sgt i64 %x, 42
595 %sext = sext i1 %cmp to i64
596 %or = or i64 %sext, 2
600 define i32 @or_sext3(i32 %x) {
601 ; X86-LABEL: or_sext3:
603 ; X86-NEXT: xorl %eax, %eax
604 ; X86-NEXT: cmpl $43, {{[0-9]+}}(%esp)
606 ; X86-NEXT: leal -1(,%eax,4), %eax
609 ; X64-LABEL: or_sext3:
611 ; X64-NEXT: xorl %eax, %eax
612 ; X64-NEXT: cmpl $43, %edi
614 ; X64-NEXT: leal -1(,%rax,4), %eax
616 %cmp = icmp sgt i32 %x, 42
617 %sext = sext i1 %cmp to i32
618 %or = or i32 %sext, 3
622 define i64 @or_sext3_64(i64 %x) {
623 ; X86-LABEL: or_sext3_64:
625 ; X86-NEXT: xorl %eax, %eax
626 ; X86-NEXT: movl $42, %ecx
627 ; X86-NEXT: cmpl {{[0-9]+}}(%esp), %ecx
628 ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %eax
630 ; X86-NEXT: movzbl %al, %edx
631 ; X86-NEXT: negl %edx
632 ; X86-NEXT: movl %edx, %eax
633 ; X86-NEXT: orl $3, %eax
636 ; X64-LABEL: or_sext3_64:
638 ; X64-NEXT: xorl %eax, %eax
639 ; X64-NEXT: cmpq $43, %rdi
641 ; X64-NEXT: leaq -1(,%rax,4), %rax
643 %cmp = icmp sgt i64 %x, 42
644 %sext = sext i1 %cmp to i64
645 %or = or i64 %sext, 3
649 define i32 @or_sext4(i32 %x) {
650 ; X86-LABEL: or_sext4:
652 ; X86-NEXT: xorl %eax, %eax
653 ; X86-NEXT: cmpl $43, {{[0-9]+}}(%esp)
655 ; X86-NEXT: leal -1(%eax,%eax,4), %eax
658 ; X64-LABEL: or_sext4:
660 ; X64-NEXT: xorl %eax, %eax
661 ; X64-NEXT: cmpl $43, %edi
663 ; X64-NEXT: leal -1(%rax,%rax,4), %eax
665 %cmp = icmp sgt i32 %x, 42
666 %sext = sext i1 %cmp to i32
667 %or = or i32 %sext, 4
671 define i64 @or_sext4_64(i64 %x) {
672 ; X86-LABEL: or_sext4_64:
674 ; X86-NEXT: xorl %eax, %eax
675 ; X86-NEXT: movl $42, %ecx
676 ; X86-NEXT: cmpl {{[0-9]+}}(%esp), %ecx
677 ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %eax
679 ; X86-NEXT: movzbl %al, %edx
680 ; X86-NEXT: negl %edx
681 ; X86-NEXT: movl %edx, %eax
682 ; X86-NEXT: orl $4, %eax
685 ; X64-LABEL: or_sext4_64:
687 ; X64-NEXT: xorl %eax, %eax
688 ; X64-NEXT: cmpq $43, %rdi
690 ; X64-NEXT: leaq -1(%rax,%rax,4), %rax
692 %cmp = icmp sgt i64 %x, 42
693 %sext = sext i1 %cmp to i64
694 %or = or i64 %sext, 4
698 define i32 @or_sext7(i32 %x) {
699 ; X86-LABEL: or_sext7:
701 ; X86-NEXT: xorl %eax, %eax
702 ; X86-NEXT: cmpl $43, {{[0-9]+}}(%esp)
704 ; X86-NEXT: leal -1(,%eax,8), %eax
707 ; X64-LABEL: or_sext7:
709 ; X64-NEXT: xorl %eax, %eax
710 ; X64-NEXT: cmpl $43, %edi
712 ; X64-NEXT: leal -1(,%rax,8), %eax
714 %cmp = icmp sgt i32 %x, 42
715 %sext = sext i1 %cmp to i32
716 %or = or i32 %sext, 7
720 define i64 @or_sext7_64(i64 %x) {
721 ; X86-LABEL: or_sext7_64:
723 ; X86-NEXT: xorl %eax, %eax
724 ; X86-NEXT: movl $42, %ecx
725 ; X86-NEXT: cmpl {{[0-9]+}}(%esp), %ecx
726 ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %eax
728 ; X86-NEXT: movzbl %al, %edx
729 ; X86-NEXT: negl %edx
730 ; X86-NEXT: movl %edx, %eax
731 ; X86-NEXT: orl $7, %eax
734 ; X64-LABEL: or_sext7_64:
736 ; X64-NEXT: xorl %eax, %eax
737 ; X64-NEXT: cmpq $43, %rdi
739 ; X64-NEXT: leaq -1(,%rax,8), %rax
741 %cmp = icmp sgt i64 %x, 42
742 %sext = sext i1 %cmp to i64
743 %or = or i64 %sext, 7
747 define i32 @or_sext8(i32 %x) {
748 ; X86-LABEL: or_sext8:
750 ; X86-NEXT: xorl %eax, %eax
751 ; X86-NEXT: cmpl $43, {{[0-9]+}}(%esp)
753 ; X86-NEXT: leal -1(%eax,%eax,8), %eax
756 ; X64-LABEL: or_sext8:
758 ; X64-NEXT: xorl %eax, %eax
759 ; X64-NEXT: cmpl $43, %edi
761 ; X64-NEXT: leal -1(%rax,%rax,8), %eax
763 %cmp = icmp sgt i32 %x, 42
764 %sext = sext i1 %cmp to i32
765 %or = or i32 %sext, 8
769 define i64 @or_sext8_64(i64 %x) {
770 ; X86-LABEL: or_sext8_64:
772 ; X86-NEXT: xorl %eax, %eax
773 ; X86-NEXT: movl $42, %ecx
774 ; X86-NEXT: cmpl {{[0-9]+}}(%esp), %ecx
775 ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %eax
777 ; X86-NEXT: movzbl %al, %edx
778 ; X86-NEXT: negl %edx
779 ; X86-NEXT: movl %edx, %eax
780 ; X86-NEXT: orl $8, %eax
783 ; X64-LABEL: or_sext8_64:
785 ; X64-NEXT: xorl %eax, %eax
786 ; X64-NEXT: cmpq $43, %rdi
788 ; X64-NEXT: leaq -1(%rax,%rax,8), %rax
790 %cmp = icmp sgt i64 %x, 42
791 %sext = sext i1 %cmp to i64
792 %or = or i64 %sext, 8
796 define i64 @or_large_constant(i64 %x) {
797 ; X86-LABEL: or_large_constant:
798 ; X86: # %bb.0: # %entry
799 ; X86-NEXT: xorl %edx, %edx
800 ; X86-NEXT: movl $1, %eax
801 ; X86-NEXT: cmpl {{[0-9]+}}(%esp), %eax
802 ; X86-NEXT: movl $0, %eax
803 ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %eax
805 ; X86-NEXT: movzbl %al, %eax
806 ; X86-NEXT: negl %eax
807 ; X86-NEXT: sbbl %edx, %edx
808 ; X86-NEXT: orl $1, %eax
809 ; X86-NEXT: orl $128, %edx
812 ; X64-LABEL: or_large_constant:
813 ; X64: # %bb.0: # %entry
814 ; X64-NEXT: xorl %ecx, %ecx
815 ; X64-NEXT: cmpq $2, %rdi
816 ; X64-NEXT: setge %cl
817 ; X64-NEXT: negq %rcx
818 ; X64-NEXT: movabsq $549755813889, %rax # imm = 0x8000000001
819 ; X64-NEXT: orq %rcx, %rax
822 %cmp = icmp sgt i64 %x, 1
823 %zext = zext i1 %cmp to i64
824 %sub = sub i64 0, %zext
825 %or = or i64 %sub, 549755813889 ; 0x8000000001