1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+bmi,+bmi2 | FileCheck %s --check-prefixes=CHECK
3 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+bmi,+bmi2,egpr --show-mc-encoding | FileCheck %s --check-prefixes=EGPR
5 define i64 @bzhi64(i64 %x, i64 %y) {
8 ; CHECK-NEXT: bzhiq %rsi, %rdi, %rax
13 ; EGPR-NEXT: bzhiq %rsi, %rdi, %rax # EVEX TO VEX Compression encoding: [0xc4,0xe2,0xc8,0xf5,0xc7]
14 ; EGPR-NEXT: retq # encoding: [0xc3]
15 %tmp = tail call i64 @llvm.x86.bmi.bzhi.64(i64 %x, i64 %y)
19 define i64 @bzhi64_load(ptr %x, i64 %y) {
20 ; CHECK-LABEL: bzhi64_load:
22 ; CHECK-NEXT: bzhiq %rsi, (%rdi), %rax
25 ; EGPR-LABEL: bzhi64_load:
27 ; EGPR-NEXT: bzhiq %rsi, (%rdi), %rax # EVEX TO VEX Compression encoding: [0xc4,0xe2,0xc8,0xf5,0x07]
28 ; EGPR-NEXT: retq # encoding: [0xc3]
29 %x1 = load i64, ptr %x
30 %tmp = tail call i64 @llvm.x86.bmi.bzhi.64(i64 %x1, i64 %y)
34 declare i64 @llvm.x86.bmi.bzhi.64(i64, i64)
36 define i64 @pdep64(i64 %x, i64 %y) {
37 ; CHECK-LABEL: pdep64:
39 ; CHECK-NEXT: pdepq %rsi, %rdi, %rax
44 ; EGPR-NEXT: pdepq %rsi, %rdi, %rax # EVEX TO VEX Compression encoding: [0xc4,0xe2,0xc3,0xf5,0xc6]
45 ; EGPR-NEXT: retq # encoding: [0xc3]
46 %tmp = tail call i64 @llvm.x86.bmi.pdep.64(i64 %x, i64 %y)
50 define i64 @pdep64_load(i64 %x, ptr %y) {
51 ; CHECK-LABEL: pdep64_load:
53 ; CHECK-NEXT: pdepq (%rsi), %rdi, %rax
56 ; EGPR-LABEL: pdep64_load:
58 ; EGPR-NEXT: pdepq (%rsi), %rdi, %rax # EVEX TO VEX Compression encoding: [0xc4,0xe2,0xc3,0xf5,0x06]
59 ; EGPR-NEXT: retq # encoding: [0xc3]
60 %y1 = load i64, ptr %y
61 %tmp = tail call i64 @llvm.x86.bmi.pdep.64(i64 %x, i64 %y1)
65 define i64 @pdep64_anyext(i32 %x) {
66 ; CHECK-LABEL: pdep64_anyext:
68 ; CHECK-NEXT: # kill: def $edi killed $edi def $rdi
69 ; CHECK-NEXT: movabsq $6148914691236517205, %rax # imm = 0x5555555555555555
70 ; CHECK-NEXT: pdepq %rax, %rdi, %rax
73 ; EGPR-LABEL: pdep64_anyext:
75 ; EGPR-NEXT: # kill: def $edi killed $edi def $rdi
76 ; EGPR-NEXT: movabsq $6148914691236517205, %rax # encoding: [0x48,0xb8,0x55,0x55,0x55,0x55,0x55,0x55,0x55,0x55]
77 ; EGPR-NEXT: # imm = 0x5555555555555555
78 ; EGPR-NEXT: pdepq %rax, %rdi, %rax # EVEX TO VEX Compression encoding: [0xc4,0xe2,0xc3,0xf5,0xc0]
79 ; EGPR-NEXT: retq # encoding: [0xc3]
80 %x1 = sext i32 %x to i64
81 %tmp = tail call i64 @llvm.x86.bmi.pdep.64(i64 %x1, i64 6148914691236517205)
85 declare i64 @llvm.x86.bmi.pdep.64(i64, i64)
87 define i64 @pext64(i64 %x, i64 %y) {
88 ; CHECK-LABEL: pext64:
90 ; CHECK-NEXT: pextq %rsi, %rdi, %rax
95 ; EGPR-NEXT: pextq %rsi, %rdi, %rax # EVEX TO VEX Compression encoding: [0xc4,0xe2,0xc2,0xf5,0xc6]
96 ; EGPR-NEXT: retq # encoding: [0xc3]
97 %tmp = tail call i64 @llvm.x86.bmi.pext.64(i64 %x, i64 %y)
101 define i64 @pext64_load(i64 %x, ptr %y) {
102 ; CHECK-LABEL: pext64_load:
104 ; CHECK-NEXT: pextq (%rsi), %rdi, %rax
107 ; EGPR-LABEL: pext64_load:
109 ; EGPR-NEXT: pextq (%rsi), %rdi, %rax # EVEX TO VEX Compression encoding: [0xc4,0xe2,0xc2,0xf5,0x06]
110 ; EGPR-NEXT: retq # encoding: [0xc3]
111 %y1 = load i64, ptr %y
112 %tmp = tail call i64 @llvm.x86.bmi.pext.64(i64 %x, i64 %y1)
116 define i64 @pext64_knownbits(i64 %x, i64 %y) {
117 ; CHECK-LABEL: pext64_knownbits:
119 ; CHECK-NEXT: movabsq $6148914691236517205, %rax # imm = 0x5555555555555555
120 ; CHECK-NEXT: pextq %rax, %rdi, %rax
123 ; EGPR-LABEL: pext64_knownbits:
125 ; EGPR-NEXT: movabsq $6148914691236517205, %rax # encoding: [0x48,0xb8,0x55,0x55,0x55,0x55,0x55,0x55,0x55,0x55]
126 ; EGPR-NEXT: # imm = 0x5555555555555555
127 ; EGPR-NEXT: pextq %rax, %rdi, %rax # EVEX TO VEX Compression encoding: [0xc4,0xe2,0xc2,0xf5,0xc0]
128 ; EGPR-NEXT: retq # encoding: [0xc3]
129 %tmp = tail call i64 @llvm.x86.bmi.pext.64(i64 %x, i64 6148914691236517205)
130 %tmp2 = and i64 %tmp, 4294967295
134 declare i64 @llvm.x86.bmi.pext.64(i64, i64)
136 define i64 @mulx64(i64 %x, i64 %y, ptr %p) {
137 ; CHECK-LABEL: mulx64:
139 ; CHECK-NEXT: movq %rdx, %rcx
140 ; CHECK-NEXT: movq %rdi, %rdx
141 ; CHECK-NEXT: mulxq %rsi, %rax, %rdx
142 ; CHECK-NEXT: movq %rdx, (%rcx)
145 ; EGPR-LABEL: mulx64:
147 ; EGPR-NEXT: movq %rdx, %rcx # encoding: [0x48,0x89,0xd1]
148 ; EGPR-NEXT: movq %rdi, %rdx # encoding: [0x48,0x89,0xfa]
149 ; EGPR-NEXT: mulxq %rsi, %rax, %rdx # EVEX TO VEX Compression encoding: [0xc4,0xe2,0xfb,0xf6,0xd6]
150 ; EGPR-NEXT: movq %rdx, (%rcx) # encoding: [0x48,0x89,0x11]
151 ; EGPR-NEXT: retq # encoding: [0xc3]
152 %x1 = zext i64 %x to i128
153 %y1 = zext i64 %y to i128
154 %r1 = mul i128 %x1, %y1
155 %h1 = lshr i128 %r1, 64
156 %h = trunc i128 %h1 to i64
157 %l = trunc i128 %r1 to i64
162 define i64 @mulx64_load(i64 %x, ptr %y, ptr %p) {
163 ; CHECK-LABEL: mulx64_load:
165 ; CHECK-NEXT: movq %rdx, %rcx
166 ; CHECK-NEXT: movq %rdi, %rdx
167 ; CHECK-NEXT: mulxq (%rsi), %rax, %rdx
168 ; CHECK-NEXT: movq %rdx, (%rcx)
171 ; EGPR-LABEL: mulx64_load:
173 ; EGPR-NEXT: movq %rdx, %rcx # encoding: [0x48,0x89,0xd1]
174 ; EGPR-NEXT: movq %rdi, %rdx # encoding: [0x48,0x89,0xfa]
175 ; EGPR-NEXT: mulxq (%rsi), %rax, %rdx # EVEX TO VEX Compression encoding: [0xc4,0xe2,0xfb,0xf6,0x16]
176 ; EGPR-NEXT: movq %rdx, (%rcx) # encoding: [0x48,0x89,0x11]
177 ; EGPR-NEXT: retq # encoding: [0xc3]
178 %y1 = load i64, ptr %y
179 %x2 = zext i64 %x to i128
180 %y2 = zext i64 %y1 to i128
181 %r1 = mul i128 %x2, %y2
182 %h1 = lshr i128 %r1, 64
183 %h = trunc i128 %h1 to i64
184 %l = trunc i128 %r1 to i64