1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -fast-isel -mtriple=x86_64-unknown-unknown -mattr=+bmi | FileCheck %s --check-prefix=X64
4 ; NOTE: This should use IR equivalent to what is generated by clang/test/CodeGen/bmi-builtins.c
10 define i64 @test__andn_u64(i64 %a0, i64 %a1) {
11 ; X64-LABEL: test__andn_u64:
13 ; X64-NEXT: movq %rdi, %rax
14 ; X64-NEXT: xorq $-1, %rax
15 ; X64-NEXT: andq %rsi, %rax
17 %xor = xor i64 %a0, -1
18 %res = and i64 %xor, %a1
22 define i64 @test__bextr_u64(i64 %a0, i64 %a1) {
23 ; X64-LABEL: test__bextr_u64:
25 ; X64-NEXT: bextrq %rsi, %rdi, %rax
27 %res = call i64 @llvm.x86.bmi.bextr.64(i64 %a0, i64 %a1)
31 define i64 @test__blsi_u64(i64 %a0) {
32 ; X64-LABEL: test__blsi_u64:
34 ; X64-NEXT: xorl %eax, %eax
35 ; X64-NEXT: subq %rdi, %rax
36 ; X64-NEXT: andq %rdi, %rax
39 %res = and i64 %a0, %neg
43 define i64 @test__blsmsk_u64(i64 %a0) {
44 ; X64-LABEL: test__blsmsk_u64:
46 ; X64-NEXT: movq %rdi, %rax
47 ; X64-NEXT: subq $1, %rax
48 ; X64-NEXT: xorq %rdi, %rax
51 %res = xor i64 %a0, %dec
55 define i64 @test__blsr_u64(i64 %a0) {
56 ; X64-LABEL: test__blsr_u64:
58 ; X64-NEXT: movq %rdi, %rax
59 ; X64-NEXT: subq $1, %rax
60 ; X64-NEXT: andq %rdi, %rax
63 %res = and i64 %a0, %dec
67 define i64 @test__tzcnt_u64(i64 %a0) {
68 ; X64-LABEL: test__tzcnt_u64:
70 ; X64-NEXT: tzcntq %rdi, %rax
72 %cmp = icmp ne i64 %a0, 0
73 %cttz = call i64 @llvm.cttz.i64(i64 %a0, i1 false)
81 define i64 @test_andn_u64(i64 %a0, i64 %a1) {
82 ; X64-LABEL: test_andn_u64:
84 ; X64-NEXT: movq %rdi, %rax
85 ; X64-NEXT: xorq $-1, %rax
86 ; X64-NEXT: andq %rsi, %rax
88 %xor = xor i64 %a0, -1
89 %res = and i64 %xor, %a1
93 define i64 @test_bextr_u64(i64 %a0, i32 %a1, i32 %a2) {
94 ; X64-LABEL: test_bextr_u64:
96 ; X64-NEXT: andl $255, %esi
97 ; X64-NEXT: andl $255, %edx
98 ; X64-NEXT: shll $8, %edx
99 ; X64-NEXT: orl %esi, %edx
100 ; X64-NEXT: movl %edx, %eax
101 ; X64-NEXT: bextrq %rax, %rdi, %rax
103 %and1 = and i32 %a1, 255
104 %and2 = and i32 %a2, 255
105 %shl = shl i32 %and2, 8
106 %or = or i32 %and1, %shl
107 %zext = zext i32 %or to i64
108 %res = call i64 @llvm.x86.bmi.bextr.64(i64 %a0, i64 %zext)
112 define i64 @test_blsi_u64(i64 %a0) {
113 ; X64-LABEL: test_blsi_u64:
115 ; X64-NEXT: xorl %eax, %eax
116 ; X64-NEXT: subq %rdi, %rax
117 ; X64-NEXT: andq %rdi, %rax
119 %neg = sub i64 0, %a0
120 %res = and i64 %a0, %neg
124 define i64 @test_blsmsk_u64(i64 %a0) {
125 ; X64-LABEL: test_blsmsk_u64:
127 ; X64-NEXT: movq %rdi, %rax
128 ; X64-NEXT: subq $1, %rax
129 ; X64-NEXT: xorq %rdi, %rax
131 %dec = sub i64 %a0, 1
132 %res = xor i64 %a0, %dec
136 define i64 @test_blsr_u64(i64 %a0) {
137 ; X64-LABEL: test_blsr_u64:
139 ; X64-NEXT: movq %rdi, %rax
140 ; X64-NEXT: subq $1, %rax
141 ; X64-NEXT: andq %rdi, %rax
143 %dec = sub i64 %a0, 1
144 %res = and i64 %a0, %dec
148 define i64 @test_tzcnt_u64(i64 %a0) {
149 ; X64-LABEL: test_tzcnt_u64:
151 ; X64-NEXT: tzcntq %rdi, %rax
153 %cmp = icmp ne i64 %a0, 0
154 %cttz = call i64 @llvm.cttz.i64(i64 %a0, i1 false)
158 declare i64 @llvm.cttz.i64(i64, i1)
159 declare i64 @llvm.x86.bmi.bextr.64(i64, i64)