1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=i686-unknown | FileCheck %s --check-prefixes=CHECK,X86
3 ; RUN: llc < %s -mtriple=x86_64-unknown | FileCheck %s --check-prefixes=CHECK,X64
4 ; RUN: llc < %s -mtriple=i686-unknown -mattr=+xop | FileCheck %s --check-prefixes=CHECK,X86XOP
5 ; RUN: llc < %s -mtriple=x86_64-unknown -mattr=+avx512bw,+avx512vl,+gfni | FileCheck %s --check-prefixes=CHECK,GFNI
7 ; These tests just check that the plumbing is in place for @llvm.bitreverse. The
8 ; actual output is massive at the moment as llvm.bitreverse is not yet legal.
10 declare <2 x i16> @llvm.bitreverse.v2i16(<2 x i16>) readnone
12 define <2 x i16> @test_bitreverse_v2i16(<2 x i16> %a) nounwind {
13 ; X86-LABEL: test_bitreverse_v2i16:
15 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %ecx
16 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
17 ; X86-NEXT: rolw $8, %ax
18 ; X86-NEXT: movl %eax, %edx
19 ; X86-NEXT: andl $3855, %edx # imm = 0xF0F
20 ; X86-NEXT: shll $4, %edx
21 ; X86-NEXT: shrl $4, %eax
22 ; X86-NEXT: andl $3855, %eax # imm = 0xF0F
23 ; X86-NEXT: orl %edx, %eax
24 ; X86-NEXT: movl %eax, %edx
25 ; X86-NEXT: andl $13107, %edx # imm = 0x3333
26 ; X86-NEXT: shrl $2, %eax
27 ; X86-NEXT: andl $13107, %eax # imm = 0x3333
28 ; X86-NEXT: leal (%eax,%edx,4), %eax
29 ; X86-NEXT: movl %eax, %edx
30 ; X86-NEXT: andl $21845, %edx # imm = 0x5555
32 ; X86-NEXT: andl $21845, %eax # imm = 0x5555
33 ; X86-NEXT: leal (%eax,%edx,2), %eax
34 ; X86-NEXT: rolw $8, %cx
35 ; X86-NEXT: movl %ecx, %edx
36 ; X86-NEXT: andl $3855, %edx # imm = 0xF0F
37 ; X86-NEXT: shll $4, %edx
38 ; X86-NEXT: shrl $4, %ecx
39 ; X86-NEXT: andl $3855, %ecx # imm = 0xF0F
40 ; X86-NEXT: orl %edx, %ecx
41 ; X86-NEXT: movl %ecx, %edx
42 ; X86-NEXT: andl $13107, %edx # imm = 0x3333
43 ; X86-NEXT: shrl $2, %ecx
44 ; X86-NEXT: andl $13107, %ecx # imm = 0x3333
45 ; X86-NEXT: leal (%ecx,%edx,4), %ecx
46 ; X86-NEXT: movl %ecx, %edx
47 ; X86-NEXT: andl $21845, %edx # imm = 0x5555
49 ; X86-NEXT: andl $21845, %ecx # imm = 0x5555
50 ; X86-NEXT: leal (%ecx,%edx,2), %edx
51 ; X86-NEXT: # kill: def $ax killed $ax killed $eax
52 ; X86-NEXT: # kill: def $dx killed $dx killed $edx
55 ; X64-LABEL: test_bitreverse_v2i16:
57 ; X64-NEXT: movdqa %xmm0, %xmm1
58 ; X64-NEXT: psrlw $8, %xmm1
59 ; X64-NEXT: psllw $8, %xmm0
60 ; X64-NEXT: por %xmm1, %xmm0
61 ; X64-NEXT: movdqa %xmm0, %xmm1
62 ; X64-NEXT: psrlw $4, %xmm1
63 ; X64-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
64 ; X64-NEXT: pand %xmm2, %xmm1
65 ; X64-NEXT: pand %xmm2, %xmm0
66 ; X64-NEXT: psllw $4, %xmm0
67 ; X64-NEXT: por %xmm1, %xmm0
68 ; X64-NEXT: movdqa %xmm0, %xmm1
69 ; X64-NEXT: psrlw $2, %xmm1
70 ; X64-NEXT: movdqa {{.*#+}} xmm2 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51]
71 ; X64-NEXT: pand %xmm2, %xmm1
72 ; X64-NEXT: pand %xmm2, %xmm0
73 ; X64-NEXT: psllw $2, %xmm0
74 ; X64-NEXT: por %xmm1, %xmm0
75 ; X64-NEXT: movdqa %xmm0, %xmm1
76 ; X64-NEXT: psrlw $1, %xmm1
77 ; X64-NEXT: movdqa {{.*#+}} xmm2 = [85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85]
78 ; X64-NEXT: pand %xmm2, %xmm1
79 ; X64-NEXT: pand %xmm2, %xmm0
80 ; X64-NEXT: paddb %xmm0, %xmm0
81 ; X64-NEXT: por %xmm1, %xmm0
84 ; X86XOP-LABEL: test_bitreverse_v2i16:
86 ; X86XOP-NEXT: vpperm {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0, %xmm0
89 ; GFNI-LABEL: test_bitreverse_v2i16:
91 ; GFNI-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[1,0,3,2,5,4,7,6,9,8,11,10,13,12,15,14]
92 ; GFNI-NEXT: vgf2p8affineqb $0, {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %xmm0
94 %b = call <2 x i16> @llvm.bitreverse.v2i16(<2 x i16> %a)
98 declare i64 @llvm.bitreverse.i64(i64) readnone
100 define i64 @test_bitreverse_i64(i64 %a) nounwind {
101 ; X86-LABEL: test_bitreverse_i64:
103 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
104 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
105 ; X86-NEXT: bswapl %eax
106 ; X86-NEXT: movl %eax, %edx
107 ; X86-NEXT: andl $252645135, %edx # imm = 0xF0F0F0F
108 ; X86-NEXT: shll $4, %edx
109 ; X86-NEXT: shrl $4, %eax
110 ; X86-NEXT: andl $252645135, %eax # imm = 0xF0F0F0F
111 ; X86-NEXT: orl %edx, %eax
112 ; X86-NEXT: movl %eax, %edx
113 ; X86-NEXT: andl $858993459, %edx # imm = 0x33333333
114 ; X86-NEXT: shrl $2, %eax
115 ; X86-NEXT: andl $858993459, %eax # imm = 0x33333333
116 ; X86-NEXT: leal (%eax,%edx,4), %eax
117 ; X86-NEXT: movl %eax, %edx
118 ; X86-NEXT: andl $1431655765, %edx # imm = 0x55555555
119 ; X86-NEXT: shrl %eax
120 ; X86-NEXT: andl $1431655765, %eax # imm = 0x55555555
121 ; X86-NEXT: leal (%eax,%edx,2), %eax
122 ; X86-NEXT: bswapl %ecx
123 ; X86-NEXT: movl %ecx, %edx
124 ; X86-NEXT: andl $252645135, %edx # imm = 0xF0F0F0F
125 ; X86-NEXT: shll $4, %edx
126 ; X86-NEXT: shrl $4, %ecx
127 ; X86-NEXT: andl $252645135, %ecx # imm = 0xF0F0F0F
128 ; X86-NEXT: orl %edx, %ecx
129 ; X86-NEXT: movl %ecx, %edx
130 ; X86-NEXT: andl $858993459, %edx # imm = 0x33333333
131 ; X86-NEXT: shrl $2, %ecx
132 ; X86-NEXT: andl $858993459, %ecx # imm = 0x33333333
133 ; X86-NEXT: leal (%ecx,%edx,4), %ecx
134 ; X86-NEXT: movl %ecx, %edx
135 ; X86-NEXT: andl $1431655765, %edx # imm = 0x55555555
136 ; X86-NEXT: shrl %ecx
137 ; X86-NEXT: andl $1431655765, %ecx # imm = 0x55555555
138 ; X86-NEXT: leal (%ecx,%edx,2), %edx
141 ; X64-LABEL: test_bitreverse_i64:
143 ; X64-NEXT: bswapq %rdi
144 ; X64-NEXT: movq %rdi, %rax
145 ; X64-NEXT: shrq $4, %rax
146 ; X64-NEXT: movabsq $1085102592571150095, %rcx # imm = 0xF0F0F0F0F0F0F0F
147 ; X64-NEXT: andq %rcx, %rax
148 ; X64-NEXT: andq %rcx, %rdi
149 ; X64-NEXT: shlq $4, %rdi
150 ; X64-NEXT: orq %rax, %rdi
151 ; X64-NEXT: movabsq $3689348814741910323, %rax # imm = 0x3333333333333333
152 ; X64-NEXT: movq %rdi, %rcx
153 ; X64-NEXT: andq %rax, %rcx
154 ; X64-NEXT: shrq $2, %rdi
155 ; X64-NEXT: andq %rax, %rdi
156 ; X64-NEXT: leaq (%rdi,%rcx,4), %rax
157 ; X64-NEXT: movabsq $6148914691236517205, %rcx # imm = 0x5555555555555555
158 ; X64-NEXT: movq %rax, %rdx
159 ; X64-NEXT: andq %rcx, %rdx
160 ; X64-NEXT: shrq %rax
161 ; X64-NEXT: andq %rcx, %rax
162 ; X64-NEXT: leaq (%rax,%rdx,2), %rax
165 ; X86XOP-LABEL: test_bitreverse_i64:
167 ; X86XOP-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
168 ; X86XOP-NEXT: vpperm {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0, %xmm0
169 ; X86XOP-NEXT: vmovd %xmm0, %eax
170 ; X86XOP-NEXT: vpextrd $1, %xmm0, %edx
173 ; GFNI-LABEL: test_bitreverse_i64:
175 ; GFNI-NEXT: bswapq %rdi
176 ; GFNI-NEXT: movq %rdi, %rax
177 ; GFNI-NEXT: shrq $4, %rax
178 ; GFNI-NEXT: movabsq $1085102592571150095, %rcx # imm = 0xF0F0F0F0F0F0F0F
179 ; GFNI-NEXT: andq %rcx, %rax
180 ; GFNI-NEXT: andq %rcx, %rdi
181 ; GFNI-NEXT: shlq $4, %rdi
182 ; GFNI-NEXT: orq %rax, %rdi
183 ; GFNI-NEXT: movabsq $3689348814741910323, %rax # imm = 0x3333333333333333
184 ; GFNI-NEXT: movq %rdi, %rcx
185 ; GFNI-NEXT: andq %rax, %rcx
186 ; GFNI-NEXT: shrq $2, %rdi
187 ; GFNI-NEXT: andq %rax, %rdi
188 ; GFNI-NEXT: leaq (%rdi,%rcx,4), %rax
189 ; GFNI-NEXT: movabsq $6148914691236517205, %rcx # imm = 0x5555555555555555
190 ; GFNI-NEXT: movq %rax, %rdx
191 ; GFNI-NEXT: andq %rcx, %rdx
192 ; GFNI-NEXT: shrq %rax
193 ; GFNI-NEXT: andq %rcx, %rax
194 ; GFNI-NEXT: leaq (%rax,%rdx,2), %rax
196 %b = call i64 @llvm.bitreverse.i64(i64 %a)
200 declare i32 @llvm.bitreverse.i32(i32) readnone
202 define i32 @test_bitreverse_i32(i32 %a) nounwind {
203 ; X86-LABEL: test_bitreverse_i32:
205 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
206 ; X86-NEXT: bswapl %eax
207 ; X86-NEXT: movl %eax, %ecx
208 ; X86-NEXT: andl $252645135, %ecx # imm = 0xF0F0F0F
209 ; X86-NEXT: shll $4, %ecx
210 ; X86-NEXT: shrl $4, %eax
211 ; X86-NEXT: andl $252645135, %eax # imm = 0xF0F0F0F
212 ; X86-NEXT: orl %ecx, %eax
213 ; X86-NEXT: movl %eax, %ecx
214 ; X86-NEXT: andl $858993459, %ecx # imm = 0x33333333
215 ; X86-NEXT: shrl $2, %eax
216 ; X86-NEXT: andl $858993459, %eax # imm = 0x33333333
217 ; X86-NEXT: leal (%eax,%ecx,4), %eax
218 ; X86-NEXT: movl %eax, %ecx
219 ; X86-NEXT: andl $1431655765, %ecx # imm = 0x55555555
220 ; X86-NEXT: shrl %eax
221 ; X86-NEXT: andl $1431655765, %eax # imm = 0x55555555
222 ; X86-NEXT: leal (%eax,%ecx,2), %eax
225 ; X64-LABEL: test_bitreverse_i32:
227 ; X64-NEXT: # kill: def $edi killed $edi def $rdi
228 ; X64-NEXT: bswapl %edi
229 ; X64-NEXT: movl %edi, %eax
230 ; X64-NEXT: andl $252645135, %eax # imm = 0xF0F0F0F
231 ; X64-NEXT: shll $4, %eax
232 ; X64-NEXT: shrl $4, %edi
233 ; X64-NEXT: andl $252645135, %edi # imm = 0xF0F0F0F
234 ; X64-NEXT: orl %eax, %edi
235 ; X64-NEXT: movl %edi, %eax
236 ; X64-NEXT: andl $858993459, %eax # imm = 0x33333333
237 ; X64-NEXT: shrl $2, %edi
238 ; X64-NEXT: andl $858993459, %edi # imm = 0x33333333
239 ; X64-NEXT: leal (%rdi,%rax,4), %eax
240 ; X64-NEXT: movl %eax, %ecx
241 ; X64-NEXT: andl $1431655765, %ecx # imm = 0x55555555
242 ; X64-NEXT: shrl %eax
243 ; X64-NEXT: andl $1431655765, %eax # imm = 0x55555555
244 ; X64-NEXT: leal (%rax,%rcx,2), %eax
247 ; X86XOP-LABEL: test_bitreverse_i32:
249 ; X86XOP-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
250 ; X86XOP-NEXT: vpperm {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0, %xmm0
251 ; X86XOP-NEXT: vmovd %xmm0, %eax
254 ; GFNI-LABEL: test_bitreverse_i32:
256 ; GFNI-NEXT: # kill: def $edi killed $edi def $rdi
257 ; GFNI-NEXT: bswapl %edi
258 ; GFNI-NEXT: movl %edi, %eax
259 ; GFNI-NEXT: andl $252645135, %eax # imm = 0xF0F0F0F
260 ; GFNI-NEXT: shll $4, %eax
261 ; GFNI-NEXT: shrl $4, %edi
262 ; GFNI-NEXT: andl $252645135, %edi # imm = 0xF0F0F0F
263 ; GFNI-NEXT: orl %eax, %edi
264 ; GFNI-NEXT: movl %edi, %eax
265 ; GFNI-NEXT: andl $858993459, %eax # imm = 0x33333333
266 ; GFNI-NEXT: shrl $2, %edi
267 ; GFNI-NEXT: andl $858993459, %edi # imm = 0x33333333
268 ; GFNI-NEXT: leal (%rdi,%rax,4), %eax
269 ; GFNI-NEXT: movl %eax, %ecx
270 ; GFNI-NEXT: andl $1431655765, %ecx # imm = 0x55555555
271 ; GFNI-NEXT: shrl %eax
272 ; GFNI-NEXT: andl $1431655765, %eax # imm = 0x55555555
273 ; GFNI-NEXT: leal (%rax,%rcx,2), %eax
275 %b = call i32 @llvm.bitreverse.i32(i32 %a)
279 declare i24 @llvm.bitreverse.i24(i24) readnone
281 define i24 @test_bitreverse_i24(i24 %a) nounwind {
282 ; X86-LABEL: test_bitreverse_i24:
284 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
285 ; X86-NEXT: bswapl %eax
286 ; X86-NEXT: movl %eax, %ecx
287 ; X86-NEXT: andl $252645135, %ecx # imm = 0xF0F0F0F
288 ; X86-NEXT: shll $4, %ecx
289 ; X86-NEXT: shrl $4, %eax
290 ; X86-NEXT: andl $252645135, %eax # imm = 0xF0F0F0F
291 ; X86-NEXT: orl %ecx, %eax
292 ; X86-NEXT: movl %eax, %ecx
293 ; X86-NEXT: andl $858993459, %ecx # imm = 0x33333333
294 ; X86-NEXT: shrl $2, %eax
295 ; X86-NEXT: andl $858993459, %eax # imm = 0x33333333
296 ; X86-NEXT: leal (%eax,%ecx,4), %eax
297 ; X86-NEXT: movl %eax, %ecx
298 ; X86-NEXT: andl $1431655680, %ecx # imm = 0x55555500
299 ; X86-NEXT: shrl %eax
300 ; X86-NEXT: andl $1431655680, %eax # imm = 0x55555500
301 ; X86-NEXT: leal (%eax,%ecx,2), %eax
302 ; X86-NEXT: shrl $8, %eax
305 ; X64-LABEL: test_bitreverse_i24:
307 ; X64-NEXT: # kill: def $edi killed $edi def $rdi
308 ; X64-NEXT: bswapl %edi
309 ; X64-NEXT: movl %edi, %eax
310 ; X64-NEXT: andl $252645135, %eax # imm = 0xF0F0F0F
311 ; X64-NEXT: shll $4, %eax
312 ; X64-NEXT: shrl $4, %edi
313 ; X64-NEXT: andl $252645135, %edi # imm = 0xF0F0F0F
314 ; X64-NEXT: orl %eax, %edi
315 ; X64-NEXT: movl %edi, %eax
316 ; X64-NEXT: andl $858993459, %eax # imm = 0x33333333
317 ; X64-NEXT: shrl $2, %edi
318 ; X64-NEXT: andl $858993459, %edi # imm = 0x33333333
319 ; X64-NEXT: leal (%rdi,%rax,4), %eax
320 ; X64-NEXT: movl %eax, %ecx
321 ; X64-NEXT: andl $1431655680, %ecx # imm = 0x55555500
322 ; X64-NEXT: shrl %eax
323 ; X64-NEXT: andl $1431655680, %eax # imm = 0x55555500
324 ; X64-NEXT: leal (%rax,%rcx,2), %eax
325 ; X64-NEXT: shrl $8, %eax
328 ; X86XOP-LABEL: test_bitreverse_i24:
330 ; X86XOP-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
331 ; X86XOP-NEXT: vpperm {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0, %xmm0
332 ; X86XOP-NEXT: vmovd %xmm0, %eax
333 ; X86XOP-NEXT: shrl $8, %eax
336 ; GFNI-LABEL: test_bitreverse_i24:
338 ; GFNI-NEXT: # kill: def $edi killed $edi def $rdi
339 ; GFNI-NEXT: bswapl %edi
340 ; GFNI-NEXT: movl %edi, %eax
341 ; GFNI-NEXT: andl $252645135, %eax # imm = 0xF0F0F0F
342 ; GFNI-NEXT: shll $4, %eax
343 ; GFNI-NEXT: shrl $4, %edi
344 ; GFNI-NEXT: andl $252645135, %edi # imm = 0xF0F0F0F
345 ; GFNI-NEXT: orl %eax, %edi
346 ; GFNI-NEXT: movl %edi, %eax
347 ; GFNI-NEXT: andl $858993459, %eax # imm = 0x33333333
348 ; GFNI-NEXT: shrl $2, %edi
349 ; GFNI-NEXT: andl $858993459, %edi # imm = 0x33333333
350 ; GFNI-NEXT: leal (%rdi,%rax,4), %eax
351 ; GFNI-NEXT: movl %eax, %ecx
352 ; GFNI-NEXT: andl $1431655680, %ecx # imm = 0x55555500
353 ; GFNI-NEXT: shrl %eax
354 ; GFNI-NEXT: andl $1431655680, %eax # imm = 0x55555500
355 ; GFNI-NEXT: leal (%rax,%rcx,2), %eax
356 ; GFNI-NEXT: shrl $8, %eax
358 %b = call i24 @llvm.bitreverse.i24(i24 %a)
362 declare i16 @llvm.bitreverse.i16(i16) readnone
364 define i16 @test_bitreverse_i16(i16 %a) nounwind {
365 ; X86-LABEL: test_bitreverse_i16:
367 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
368 ; X86-NEXT: rolw $8, %ax
369 ; X86-NEXT: movl %eax, %ecx
370 ; X86-NEXT: andl $3855, %ecx # imm = 0xF0F
371 ; X86-NEXT: shll $4, %ecx
372 ; X86-NEXT: shrl $4, %eax
373 ; X86-NEXT: andl $3855, %eax # imm = 0xF0F
374 ; X86-NEXT: orl %ecx, %eax
375 ; X86-NEXT: movl %eax, %ecx
376 ; X86-NEXT: andl $13107, %ecx # imm = 0x3333
377 ; X86-NEXT: shrl $2, %eax
378 ; X86-NEXT: andl $13107, %eax # imm = 0x3333
379 ; X86-NEXT: leal (%eax,%ecx,4), %eax
380 ; X86-NEXT: movl %eax, %ecx
381 ; X86-NEXT: andl $21845, %ecx # imm = 0x5555
382 ; X86-NEXT: shrl %eax
383 ; X86-NEXT: andl $21845, %eax # imm = 0x5555
384 ; X86-NEXT: leal (%eax,%ecx,2), %eax
385 ; X86-NEXT: # kill: def $ax killed $ax killed $eax
388 ; X64-LABEL: test_bitreverse_i16:
390 ; X64-NEXT: # kill: def $edi killed $edi def $rdi
391 ; X64-NEXT: rolw $8, %di
392 ; X64-NEXT: movl %edi, %eax
393 ; X64-NEXT: andl $3855, %eax # imm = 0xF0F
394 ; X64-NEXT: shll $4, %eax
395 ; X64-NEXT: shrl $4, %edi
396 ; X64-NEXT: andl $3855, %edi # imm = 0xF0F
397 ; X64-NEXT: orl %eax, %edi
398 ; X64-NEXT: movl %edi, %eax
399 ; X64-NEXT: andl $13107, %eax # imm = 0x3333
400 ; X64-NEXT: shrl $2, %edi
401 ; X64-NEXT: andl $13107, %edi # imm = 0x3333
402 ; X64-NEXT: leal (%rdi,%rax,4), %eax
403 ; X64-NEXT: movl %eax, %ecx
404 ; X64-NEXT: andl $21845, %ecx # imm = 0x5555
405 ; X64-NEXT: shrl %eax
406 ; X64-NEXT: andl $21845, %eax # imm = 0x5555
407 ; X64-NEXT: leal (%rax,%rcx,2), %eax
408 ; X64-NEXT: # kill: def $ax killed $ax killed $eax
411 ; X86XOP-LABEL: test_bitreverse_i16:
413 ; X86XOP-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
414 ; X86XOP-NEXT: vpperm {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0, %xmm0
415 ; X86XOP-NEXT: vmovd %xmm0, %eax
416 ; X86XOP-NEXT: # kill: def $ax killed $ax killed $eax
419 ; GFNI-LABEL: test_bitreverse_i16:
421 ; GFNI-NEXT: # kill: def $edi killed $edi def $rdi
422 ; GFNI-NEXT: rolw $8, %di
423 ; GFNI-NEXT: movl %edi, %eax
424 ; GFNI-NEXT: andl $3855, %eax # imm = 0xF0F
425 ; GFNI-NEXT: shll $4, %eax
426 ; GFNI-NEXT: shrl $4, %edi
427 ; GFNI-NEXT: andl $3855, %edi # imm = 0xF0F
428 ; GFNI-NEXT: orl %eax, %edi
429 ; GFNI-NEXT: movl %edi, %eax
430 ; GFNI-NEXT: andl $13107, %eax # imm = 0x3333
431 ; GFNI-NEXT: shrl $2, %edi
432 ; GFNI-NEXT: andl $13107, %edi # imm = 0x3333
433 ; GFNI-NEXT: leal (%rdi,%rax,4), %eax
434 ; GFNI-NEXT: movl %eax, %ecx
435 ; GFNI-NEXT: andl $21845, %ecx # imm = 0x5555
436 ; GFNI-NEXT: shrl %eax
437 ; GFNI-NEXT: andl $21845, %eax # imm = 0x5555
438 ; GFNI-NEXT: leal (%rax,%rcx,2), %eax
439 ; GFNI-NEXT: # kill: def $ax killed $ax killed $eax
441 %b = call i16 @llvm.bitreverse.i16(i16 %a)
445 declare i8 @llvm.bitreverse.i8(i8) readnone
447 define i8 @test_bitreverse_i8(i8 %a) {
448 ; X86-LABEL: test_bitreverse_i8:
450 ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax
451 ; X86-NEXT: rolb $4, %al
452 ; X86-NEXT: movl %eax, %ecx
453 ; X86-NEXT: andb $51, %cl
454 ; X86-NEXT: shlb $2, %cl
455 ; X86-NEXT: shrb $2, %al
456 ; X86-NEXT: andb $51, %al
457 ; X86-NEXT: orb %cl, %al
458 ; X86-NEXT: movl %eax, %ecx
459 ; X86-NEXT: andb $85, %cl
460 ; X86-NEXT: addb %cl, %cl
462 ; X86-NEXT: andb $85, %al
463 ; X86-NEXT: orb %cl, %al
466 ; X64-LABEL: test_bitreverse_i8:
468 ; X64-NEXT: rolb $4, %dil
469 ; X64-NEXT: movl %edi, %eax
470 ; X64-NEXT: andb $51, %al
471 ; X64-NEXT: shlb $2, %al
472 ; X64-NEXT: shrb $2, %dil
473 ; X64-NEXT: andb $51, %dil
474 ; X64-NEXT: orb %dil, %al
475 ; X64-NEXT: movl %eax, %ecx
476 ; X64-NEXT: andb $85, %cl
477 ; X64-NEXT: addb %cl, %cl
479 ; X64-NEXT: andb $85, %al
480 ; X64-NEXT: orb %cl, %al
483 ; X86XOP-LABEL: test_bitreverse_i8:
485 ; X86XOP-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
486 ; X86XOP-NEXT: vpperm {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0, %xmm0
487 ; X86XOP-NEXT: vmovd %xmm0, %eax
488 ; X86XOP-NEXT: # kill: def $al killed $al killed $eax
491 ; GFNI-LABEL: test_bitreverse_i8:
493 ; GFNI-NEXT: rolb $4, %dil
494 ; GFNI-NEXT: movl %edi, %eax
495 ; GFNI-NEXT: andb $51, %al
496 ; GFNI-NEXT: shlb $2, %al
497 ; GFNI-NEXT: shrb $2, %dil
498 ; GFNI-NEXT: andb $51, %dil
499 ; GFNI-NEXT: orb %dil, %al
500 ; GFNI-NEXT: movl %eax, %ecx
501 ; GFNI-NEXT: andb $85, %cl
502 ; GFNI-NEXT: addb %cl, %cl
503 ; GFNI-NEXT: shrb %al
504 ; GFNI-NEXT: andb $85, %al
505 ; GFNI-NEXT: orb %cl, %al
507 %b = call i8 @llvm.bitreverse.i8(i8 %a)
511 declare i4 @llvm.bitreverse.i4(i4) readnone
513 define i4 @test_bitreverse_i4(i4 %a) {
514 ; X86-LABEL: test_bitreverse_i4:
516 ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ecx
517 ; X86-NEXT: movl %ecx, %eax
518 ; X86-NEXT: andb $8, %al
519 ; X86-NEXT: movl %ecx, %edx
520 ; X86-NEXT: addb %cl, %dl
521 ; X86-NEXT: andb $4, %dl
522 ; X86-NEXT: movb %cl, %ah
523 ; X86-NEXT: shlb $3, %ah
524 ; X86-NEXT: andb $8, %ah
525 ; X86-NEXT: orb %dl, %ah
527 ; X86-NEXT: andb $2, %cl
528 ; X86-NEXT: orb %ah, %cl
529 ; X86-NEXT: shrb $3, %al
530 ; X86-NEXT: orb %cl, %al
533 ; X64-LABEL: test_bitreverse_i4:
535 ; X64-NEXT: # kill: def $edi killed $edi def $rdi
536 ; X64-NEXT: movl %edi, %eax
537 ; X64-NEXT: andb $8, %al
538 ; X64-NEXT: leal (%rdi,%rdi), %ecx
539 ; X64-NEXT: andb $4, %cl
540 ; X64-NEXT: leal (,%rdi,8), %edx
541 ; X64-NEXT: andb $8, %dl
542 ; X64-NEXT: orb %cl, %dl
543 ; X64-NEXT: shrb %dil
544 ; X64-NEXT: andb $2, %dil
545 ; X64-NEXT: orb %dil, %dl
546 ; X64-NEXT: shrb $3, %al
547 ; X64-NEXT: orb %dl, %al
550 ; X86XOP-LABEL: test_bitreverse_i4:
552 ; X86XOP-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
553 ; X86XOP-NEXT: vpperm {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0, %xmm0
554 ; X86XOP-NEXT: vmovd %xmm0, %eax
555 ; X86XOP-NEXT: shrb $4, %al
556 ; X86XOP-NEXT: # kill: def $al killed $al killed $eax
559 ; GFNI-LABEL: test_bitreverse_i4:
561 ; GFNI-NEXT: # kill: def $edi killed $edi def $rdi
562 ; GFNI-NEXT: movl %edi, %eax
563 ; GFNI-NEXT: andb $8, %al
564 ; GFNI-NEXT: leal (%rdi,%rdi), %ecx
565 ; GFNI-NEXT: andb $4, %cl
566 ; GFNI-NEXT: leal (,%rdi,8), %edx
567 ; GFNI-NEXT: andb $8, %dl
568 ; GFNI-NEXT: orb %cl, %dl
569 ; GFNI-NEXT: shrb %dil
570 ; GFNI-NEXT: andb $2, %dil
571 ; GFNI-NEXT: orb %dil, %dl
572 ; GFNI-NEXT: shrb $3, %al
573 ; GFNI-NEXT: orb %dl, %al
575 %b = call i4 @llvm.bitreverse.i4(i4 %a)
579 ; These tests check that bitreverse(constant) calls are folded
581 define <2 x i16> @fold_v2i16() {
582 ; X86-LABEL: fold_v2i16:
584 ; X86-NEXT: movw $-4096, %ax # imm = 0xF000
585 ; X86-NEXT: movw $240, %dx
588 ; X64-LABEL: fold_v2i16:
590 ; X64-NEXT: movaps {{.*#+}} xmm0 = <61440,240,u,u,u,u,u,u>
593 ; X86XOP-LABEL: fold_v2i16:
595 ; X86XOP-NEXT: vbroadcastss {{.*#+}} xmm0 = [61440,240,61440,240,61440,240,61440,240]
598 ; GFNI-LABEL: fold_v2i16:
600 ; GFNI-NEXT: vbroadcastss {{.*#+}} xmm0 = [61440,240,61440,240,61440,240,61440,240]
602 %b = call <2 x i16> @llvm.bitreverse.v2i16(<2 x i16> <i16 15, i16 3840>)
606 define i24 @fold_i24() {
607 ; CHECK-LABEL: fold_i24:
609 ; CHECK-NEXT: movl $2048, %eax # imm = 0x800
610 ; CHECK-NEXT: ret{{[l|q]}}
611 %b = call i24 @llvm.bitreverse.i24(i24 4096)
615 define i8 @fold_i8() {
616 ; CHECK-LABEL: fold_i8:
618 ; CHECK-NEXT: movb $-16, %al
619 ; CHECK-NEXT: ret{{[l|q]}}
620 %b = call i8 @llvm.bitreverse.i8(i8 15)
624 define i4 @fold_i4() {
625 ; CHECK-LABEL: fold_i4:
627 ; CHECK-NEXT: movb $1, %al
628 ; CHECK-NEXT: ret{{[l|q]}}
629 %b = call i4 @llvm.bitreverse.i4(i4 8)
633 ; These tests check that bitreverse(bitreverse()) calls are removed
635 define i8 @identity_i8(i8 %a) {
636 ; X86-LABEL: identity_i8:
638 ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax
641 ; X64-LABEL: identity_i8:
643 ; X64-NEXT: movl %edi, %eax
644 ; X64-NEXT: # kill: def $al killed $al killed $eax
647 ; X86XOP-LABEL: identity_i8:
649 ; X86XOP-NEXT: movzbl {{[0-9]+}}(%esp), %eax
652 ; GFNI-LABEL: identity_i8:
654 ; GFNI-NEXT: movl %edi, %eax
655 ; GFNI-NEXT: # kill: def $al killed $al killed $eax
657 %b = call i8 @llvm.bitreverse.i8(i8 %a)
658 %c = call i8 @llvm.bitreverse.i8(i8 %b)
662 define <2 x i16> @identity_v2i16(<2 x i16> %a) {
663 ; X86-LABEL: identity_v2i16:
665 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
666 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %edx
669 ; X64-LABEL: identity_v2i16:
673 ; X86XOP-LABEL: identity_v2i16:
677 ; GFNI-LABEL: identity_v2i16:
680 %b = call <2 x i16> @llvm.bitreverse.v2i16(<2 x i16> %a)
681 %c = call <2 x i16> @llvm.bitreverse.v2i16(<2 x i16> %b)
685 ; These tests check that bitreverse(undef) calls are removed
687 define i8 @undef_i8() {
688 ; CHECK-LABEL: undef_i8:
690 ; CHECK-NEXT: ret{{[l|q]}}
691 %b = call i8 @llvm.bitreverse.i8(i8 undef)
695 define <2 x i16> @undef_v2i16() {
696 ; CHECK-LABEL: undef_v2i16:
698 ; CHECK-NEXT: ret{{[l|q]}}
699 %b = call <2 x i16> @llvm.bitreverse.v2i16(<2 x i16> undef)
703 ; Make sure we don't assert during type legalization promoting a large
704 ; bitreverse due to the need for a large shift that won't fit in the i8 returned
705 ; from getShiftAmountTy.
706 define i528 @large_promotion(i528 %A) nounwind {
707 ; X86-LABEL: large_promotion:
709 ; X86-NEXT: pushl %ebp
710 ; X86-NEXT: pushl %ebx
711 ; X86-NEXT: pushl %edi
712 ; X86-NEXT: pushl %esi
713 ; X86-NEXT: subl $60, %esp
714 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
715 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
716 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
717 ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
718 ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
719 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx
720 ; X86-NEXT: bswapl %ebx
721 ; X86-NEXT: movl %ebx, %ebp
722 ; X86-NEXT: andl $252645135, %ebp # imm = 0xF0F0F0F
723 ; X86-NEXT: shll $4, %ebp
724 ; X86-NEXT: shrl $4, %ebx
725 ; X86-NEXT: andl $252645135, %ebx # imm = 0xF0F0F0F
726 ; X86-NEXT: orl %ebp, %ebx
727 ; X86-NEXT: movl %ebx, %ebp
728 ; X86-NEXT: andl $858993459, %ebp # imm = 0x33333333
729 ; X86-NEXT: shrl $2, %ebx
730 ; X86-NEXT: andl $858993459, %ebx # imm = 0x33333333
731 ; X86-NEXT: leal (%ebx,%ebp,4), %ebx
732 ; X86-NEXT: movl %ebx, %ebp
733 ; X86-NEXT: andl $1431633920, %ebp # imm = 0x55550000
734 ; X86-NEXT: shrl %ebx
735 ; X86-NEXT: andl $1431633920, %ebx # imm = 0x55550000
736 ; X86-NEXT: leal (%ebx,%ebp,2), %ebp
737 ; X86-NEXT: bswapl %edi
738 ; X86-NEXT: movl %edi, %ebx
739 ; X86-NEXT: andl $252645135, %ebx # imm = 0xF0F0F0F
740 ; X86-NEXT: shll $4, %ebx
741 ; X86-NEXT: shrl $4, %edi
742 ; X86-NEXT: andl $252645135, %edi # imm = 0xF0F0F0F
743 ; X86-NEXT: orl %ebx, %edi
744 ; X86-NEXT: movl %edi, %ebx
745 ; X86-NEXT: andl $858993459, %ebx # imm = 0x33333333
746 ; X86-NEXT: shrl $2, %edi
747 ; X86-NEXT: andl $858993459, %edi # imm = 0x33333333
748 ; X86-NEXT: leal (%edi,%ebx,4), %edi
749 ; X86-NEXT: movl %edi, %ebx
750 ; X86-NEXT: andl $1431655765, %ebx # imm = 0x55555555
751 ; X86-NEXT: shrl %edi
752 ; X86-NEXT: andl $1431655765, %edi # imm = 0x55555555
753 ; X86-NEXT: leal (%edi,%ebx,2), %edi
754 ; X86-NEXT: movl %edi, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
755 ; X86-NEXT: bswapl %esi
756 ; X86-NEXT: movl %esi, %edi
757 ; X86-NEXT: andl $252645135, %edi # imm = 0xF0F0F0F
758 ; X86-NEXT: shll $4, %edi
759 ; X86-NEXT: shrl $4, %esi
760 ; X86-NEXT: andl $252645135, %esi # imm = 0xF0F0F0F
761 ; X86-NEXT: orl %edi, %esi
762 ; X86-NEXT: movl %esi, %edi
763 ; X86-NEXT: andl $858993459, %edi # imm = 0x33333333
764 ; X86-NEXT: shrl $2, %esi
765 ; X86-NEXT: andl $858993459, %esi # imm = 0x33333333
766 ; X86-NEXT: leal (%esi,%edi,4), %esi
767 ; X86-NEXT: movl %esi, %edi
768 ; X86-NEXT: andl $1431655765, %edi # imm = 0x55555555
769 ; X86-NEXT: shrl %esi
770 ; X86-NEXT: andl $1431655765, %esi # imm = 0x55555555
771 ; X86-NEXT: leal (%esi,%edi,2), %ebx
772 ; X86-NEXT: bswapl %edx
773 ; X86-NEXT: movl %edx, %esi
774 ; X86-NEXT: andl $252645135, %esi # imm = 0xF0F0F0F
775 ; X86-NEXT: shll $4, %esi
776 ; X86-NEXT: shrl $4, %edx
777 ; X86-NEXT: andl $252645135, %edx # imm = 0xF0F0F0F
778 ; X86-NEXT: orl %esi, %edx
779 ; X86-NEXT: movl %edx, %esi
780 ; X86-NEXT: andl $858993459, %esi # imm = 0x33333333
781 ; X86-NEXT: shrl $2, %edx
782 ; X86-NEXT: andl $858993459, %edx # imm = 0x33333333
783 ; X86-NEXT: leal (%edx,%esi,4), %edx
784 ; X86-NEXT: movl %edx, %esi
785 ; X86-NEXT: andl $1431655765, %esi # imm = 0x55555555
786 ; X86-NEXT: shrl %edx
787 ; X86-NEXT: andl $1431655765, %edx # imm = 0x55555555
788 ; X86-NEXT: leal (%edx,%esi,2), %edx
789 ; X86-NEXT: movl %edx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
790 ; X86-NEXT: bswapl %ecx
791 ; X86-NEXT: movl %ecx, %edx
792 ; X86-NEXT: andl $252645135, %edx # imm = 0xF0F0F0F
793 ; X86-NEXT: shll $4, %edx
794 ; X86-NEXT: shrl $4, %ecx
795 ; X86-NEXT: andl $252645135, %ecx # imm = 0xF0F0F0F
796 ; X86-NEXT: orl %edx, %ecx
797 ; X86-NEXT: movl %ecx, %edx
798 ; X86-NEXT: andl $858993459, %edx # imm = 0x33333333
799 ; X86-NEXT: shrl $2, %ecx
800 ; X86-NEXT: andl $858993459, %ecx # imm = 0x33333333
801 ; X86-NEXT: leal (%ecx,%edx,4), %ecx
802 ; X86-NEXT: movl %ecx, %edx
803 ; X86-NEXT: andl $1431655765, %edx # imm = 0x55555555
804 ; X86-NEXT: shrl %ecx
805 ; X86-NEXT: andl $1431655765, %ecx # imm = 0x55555555
806 ; X86-NEXT: leal (%ecx,%edx,2), %ecx
807 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
808 ; X86-NEXT: bswapl %eax
809 ; X86-NEXT: movl %eax, %ecx
810 ; X86-NEXT: andl $252645135, %ecx # imm = 0xF0F0F0F
811 ; X86-NEXT: shll $4, %ecx
812 ; X86-NEXT: shrl $4, %eax
813 ; X86-NEXT: andl $252645135, %eax # imm = 0xF0F0F0F
814 ; X86-NEXT: orl %ecx, %eax
815 ; X86-NEXT: movl %eax, %ecx
816 ; X86-NEXT: andl $858993459, %ecx # imm = 0x33333333
817 ; X86-NEXT: shrl $2, %eax
818 ; X86-NEXT: andl $858993459, %eax # imm = 0x33333333
819 ; X86-NEXT: leal (%eax,%ecx,4), %eax
820 ; X86-NEXT: movl %eax, %ecx
821 ; X86-NEXT: andl $1431655765, %ecx # imm = 0x55555555
822 ; X86-NEXT: shrl %eax
823 ; X86-NEXT: andl $1431655765, %eax # imm = 0x55555555
824 ; X86-NEXT: leal (%eax,%ecx,2), %eax
825 ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
826 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
827 ; X86-NEXT: bswapl %eax
828 ; X86-NEXT: movl %eax, %ecx
829 ; X86-NEXT: andl $252645135, %ecx # imm = 0xF0F0F0F
830 ; X86-NEXT: shll $4, %ecx
831 ; X86-NEXT: shrl $4, %eax
832 ; X86-NEXT: andl $252645135, %eax # imm = 0xF0F0F0F
833 ; X86-NEXT: orl %ecx, %eax
834 ; X86-NEXT: movl %eax, %ecx
835 ; X86-NEXT: andl $858993459, %ecx # imm = 0x33333333
836 ; X86-NEXT: shrl $2, %eax
837 ; X86-NEXT: andl $858993459, %eax # imm = 0x33333333
838 ; X86-NEXT: leal (%eax,%ecx,4), %eax
839 ; X86-NEXT: movl %eax, %ecx
840 ; X86-NEXT: andl $1431655765, %ecx # imm = 0x55555555
841 ; X86-NEXT: shrl %eax
842 ; X86-NEXT: andl $1431655765, %eax # imm = 0x55555555
843 ; X86-NEXT: leal (%eax,%ecx,2), %eax
844 ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
845 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
846 ; X86-NEXT: bswapl %eax
847 ; X86-NEXT: movl %eax, %ecx
848 ; X86-NEXT: andl $252645135, %ecx # imm = 0xF0F0F0F
849 ; X86-NEXT: shll $4, %ecx
850 ; X86-NEXT: shrl $4, %eax
851 ; X86-NEXT: andl $252645135, %eax # imm = 0xF0F0F0F
852 ; X86-NEXT: orl %ecx, %eax
853 ; X86-NEXT: movl %eax, %ecx
854 ; X86-NEXT: andl $858993459, %ecx # imm = 0x33333333
855 ; X86-NEXT: shrl $2, %eax
856 ; X86-NEXT: andl $858993459, %eax # imm = 0x33333333
857 ; X86-NEXT: leal (%eax,%ecx,4), %eax
858 ; X86-NEXT: movl %eax, %ecx
859 ; X86-NEXT: andl $1431655765, %ecx # imm = 0x55555555
860 ; X86-NEXT: shrl %eax
861 ; X86-NEXT: andl $1431655765, %eax # imm = 0x55555555
862 ; X86-NEXT: leal (%eax,%ecx,2), %eax
863 ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
864 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
865 ; X86-NEXT: bswapl %eax
866 ; X86-NEXT: movl %eax, %ecx
867 ; X86-NEXT: andl $252645135, %ecx # imm = 0xF0F0F0F
868 ; X86-NEXT: shll $4, %ecx
869 ; X86-NEXT: shrl $4, %eax
870 ; X86-NEXT: andl $252645135, %eax # imm = 0xF0F0F0F
871 ; X86-NEXT: orl %ecx, %eax
872 ; X86-NEXT: movl %eax, %ecx
873 ; X86-NEXT: andl $858993459, %ecx # imm = 0x33333333
874 ; X86-NEXT: shrl $2, %eax
875 ; X86-NEXT: andl $858993459, %eax # imm = 0x33333333
876 ; X86-NEXT: leal (%eax,%ecx,4), %eax
877 ; X86-NEXT: movl %eax, %ecx
878 ; X86-NEXT: andl $1431655765, %ecx # imm = 0x55555555
879 ; X86-NEXT: shrl %eax
880 ; X86-NEXT: andl $1431655765, %eax # imm = 0x55555555
881 ; X86-NEXT: leal (%eax,%ecx,2), %eax
882 ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
883 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
884 ; X86-NEXT: bswapl %eax
885 ; X86-NEXT: movl %eax, %ecx
886 ; X86-NEXT: andl $252645135, %ecx # imm = 0xF0F0F0F
887 ; X86-NEXT: shll $4, %ecx
888 ; X86-NEXT: shrl $4, %eax
889 ; X86-NEXT: andl $252645135, %eax # imm = 0xF0F0F0F
890 ; X86-NEXT: orl %ecx, %eax
891 ; X86-NEXT: movl %eax, %ecx
892 ; X86-NEXT: andl $858993459, %ecx # imm = 0x33333333
893 ; X86-NEXT: shrl $2, %eax
894 ; X86-NEXT: andl $858993459, %eax # imm = 0x33333333
895 ; X86-NEXT: leal (%eax,%ecx,4), %eax
896 ; X86-NEXT: movl %eax, %ecx
897 ; X86-NEXT: andl $1431655765, %ecx # imm = 0x55555555
898 ; X86-NEXT: shrl %eax
899 ; X86-NEXT: andl $1431655765, %eax # imm = 0x55555555
900 ; X86-NEXT: leal (%eax,%ecx,2), %edi
901 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
902 ; X86-NEXT: bswapl %eax
903 ; X86-NEXT: movl %eax, %ecx
904 ; X86-NEXT: andl $252645135, %ecx # imm = 0xF0F0F0F
905 ; X86-NEXT: shll $4, %ecx
906 ; X86-NEXT: shrl $4, %eax
907 ; X86-NEXT: andl $252645135, %eax # imm = 0xF0F0F0F
908 ; X86-NEXT: orl %ecx, %eax
909 ; X86-NEXT: movl %eax, %ecx
910 ; X86-NEXT: andl $858993459, %ecx # imm = 0x33333333
911 ; X86-NEXT: shrl $2, %eax
912 ; X86-NEXT: andl $858993459, %eax # imm = 0x33333333
913 ; X86-NEXT: leal (%eax,%ecx,4), %eax
914 ; X86-NEXT: movl %eax, %ecx
915 ; X86-NEXT: andl $1431655765, %ecx # imm = 0x55555555
916 ; X86-NEXT: shrl %eax
917 ; X86-NEXT: andl $1431655765, %eax # imm = 0x55555555
918 ; X86-NEXT: leal (%eax,%ecx,2), %eax
919 ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
920 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
921 ; X86-NEXT: bswapl %eax
922 ; X86-NEXT: movl %eax, %ecx
923 ; X86-NEXT: andl $252645135, %ecx # imm = 0xF0F0F0F
924 ; X86-NEXT: shll $4, %ecx
925 ; X86-NEXT: shrl $4, %eax
926 ; X86-NEXT: andl $252645135, %eax # imm = 0xF0F0F0F
927 ; X86-NEXT: orl %ecx, %eax
928 ; X86-NEXT: movl %eax, %ecx
929 ; X86-NEXT: andl $858993459, %ecx # imm = 0x33333333
930 ; X86-NEXT: shrl $2, %eax
931 ; X86-NEXT: andl $858993459, %eax # imm = 0x33333333
932 ; X86-NEXT: leal (%eax,%ecx,4), %eax
933 ; X86-NEXT: movl %eax, %ecx
934 ; X86-NEXT: andl $1431655765, %ecx # imm = 0x55555555
935 ; X86-NEXT: shrl %eax
936 ; X86-NEXT: andl $1431655765, %eax # imm = 0x55555555
937 ; X86-NEXT: leal (%eax,%ecx,2), %eax
938 ; X86-NEXT: movl %eax, (%esp) # 4-byte Spill
939 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
940 ; X86-NEXT: bswapl %eax
941 ; X86-NEXT: movl %eax, %ecx
942 ; X86-NEXT: andl $252645135, %ecx # imm = 0xF0F0F0F
943 ; X86-NEXT: shll $4, %ecx
944 ; X86-NEXT: shrl $4, %eax
945 ; X86-NEXT: andl $252645135, %eax # imm = 0xF0F0F0F
946 ; X86-NEXT: orl %ecx, %eax
947 ; X86-NEXT: movl %eax, %ecx
948 ; X86-NEXT: andl $858993459, %ecx # imm = 0x33333333
949 ; X86-NEXT: shrl $2, %eax
950 ; X86-NEXT: andl $858993459, %eax # imm = 0x33333333
951 ; X86-NEXT: leal (%eax,%ecx,4), %eax
952 ; X86-NEXT: movl %eax, %ecx
953 ; X86-NEXT: andl $1431655765, %ecx # imm = 0x55555555
954 ; X86-NEXT: shrl %eax
955 ; X86-NEXT: andl $1431655765, %eax # imm = 0x55555555
956 ; X86-NEXT: leal (%eax,%ecx,2), %eax
957 ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
958 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
959 ; X86-NEXT: bswapl %eax
960 ; X86-NEXT: movl %eax, %ecx
961 ; X86-NEXT: andl $252645135, %ecx # imm = 0xF0F0F0F
962 ; X86-NEXT: shll $4, %ecx
963 ; X86-NEXT: shrl $4, %eax
964 ; X86-NEXT: andl $252645135, %eax # imm = 0xF0F0F0F
965 ; X86-NEXT: orl %ecx, %eax
966 ; X86-NEXT: movl %eax, %ecx
967 ; X86-NEXT: andl $858993459, %ecx # imm = 0x33333333
968 ; X86-NEXT: shrl $2, %eax
969 ; X86-NEXT: andl $858993459, %eax # imm = 0x33333333
970 ; X86-NEXT: leal (%eax,%ecx,4), %eax
971 ; X86-NEXT: movl %eax, %ecx
972 ; X86-NEXT: andl $1431655765, %ecx # imm = 0x55555555
973 ; X86-NEXT: shrl %eax
974 ; X86-NEXT: andl $1431655765, %eax # imm = 0x55555555
975 ; X86-NEXT: leal (%eax,%ecx,2), %eax
976 ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
977 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
978 ; X86-NEXT: bswapl %eax
979 ; X86-NEXT: movl %eax, %ecx
980 ; X86-NEXT: andl $252645135, %ecx # imm = 0xF0F0F0F
981 ; X86-NEXT: shll $4, %ecx
982 ; X86-NEXT: shrl $4, %eax
983 ; X86-NEXT: andl $252645135, %eax # imm = 0xF0F0F0F
984 ; X86-NEXT: orl %ecx, %eax
985 ; X86-NEXT: movl %eax, %ecx
986 ; X86-NEXT: andl $858993459, %ecx # imm = 0x33333333
987 ; X86-NEXT: shrl $2, %eax
988 ; X86-NEXT: andl $858993459, %eax # imm = 0x33333333
989 ; X86-NEXT: leal (%eax,%ecx,4), %eax
990 ; X86-NEXT: movl %eax, %ecx
991 ; X86-NEXT: andl $1431655765, %ecx # imm = 0x55555555
992 ; X86-NEXT: shrl %eax
993 ; X86-NEXT: andl $1431655765, %eax # imm = 0x55555555
994 ; X86-NEXT: leal (%eax,%ecx,2), %eax
995 ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
996 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
997 ; X86-NEXT: bswapl %eax
998 ; X86-NEXT: movl %eax, %ecx
999 ; X86-NEXT: andl $252645135, %ecx # imm = 0xF0F0F0F
1000 ; X86-NEXT: shll $4, %ecx
1001 ; X86-NEXT: shrl $4, %eax
1002 ; X86-NEXT: andl $252645135, %eax # imm = 0xF0F0F0F
1003 ; X86-NEXT: orl %ecx, %eax
1004 ; X86-NEXT: movl %eax, %ecx
1005 ; X86-NEXT: andl $858993459, %ecx # imm = 0x33333333
1006 ; X86-NEXT: shrl $2, %eax
1007 ; X86-NEXT: andl $858993459, %eax # imm = 0x33333333
1008 ; X86-NEXT: leal (%eax,%ecx,4), %eax
1009 ; X86-NEXT: movl %eax, %ecx
1010 ; X86-NEXT: andl $1431655765, %ecx # imm = 0x55555555
1011 ; X86-NEXT: shrl %eax
1012 ; X86-NEXT: andl $1431655765, %eax # imm = 0x55555555
1013 ; X86-NEXT: leal (%eax,%ecx,2), %eax
1014 ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
1015 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
1016 ; X86-NEXT: bswapl %eax
1017 ; X86-NEXT: movl %eax, %ecx
1018 ; X86-NEXT: andl $252645135, %ecx # imm = 0xF0F0F0F
1019 ; X86-NEXT: shll $4, %ecx
1020 ; X86-NEXT: shrl $4, %eax
1021 ; X86-NEXT: andl $252645135, %eax # imm = 0xF0F0F0F
1022 ; X86-NEXT: orl %ecx, %eax
1023 ; X86-NEXT: movl %eax, %ecx
1024 ; X86-NEXT: andl $858993459, %ecx # imm = 0x33333333
1025 ; X86-NEXT: shrl $2, %eax
1026 ; X86-NEXT: andl $858993459, %eax # imm = 0x33333333
1027 ; X86-NEXT: leal (%eax,%ecx,4), %eax
1028 ; X86-NEXT: movl %eax, %ecx
1029 ; X86-NEXT: andl $1431655765, %ecx # imm = 0x55555555
1030 ; X86-NEXT: shrl %eax
1031 ; X86-NEXT: andl $1431655765, %eax # imm = 0x55555555
1032 ; X86-NEXT: leal (%eax,%ecx,2), %edx
1033 ; X86-NEXT: movl %ebp, %esi
1034 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1035 ; X86-NEXT: shrdl $16, %ecx, %esi
1036 ; X86-NEXT: movl %ebx, %eax
1037 ; X86-NEXT: shrdl $16, %ebx, %ecx
1038 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
1039 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1040 ; X86-NEXT: shrdl $16, %ecx, %eax
1041 ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
1042 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload
1043 ; X86-NEXT: shrdl $16, %eax, %ecx
1044 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
1045 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1046 ; X86-NEXT: shrdl $16, %ecx, %eax
1047 ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
1048 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload
1049 ; X86-NEXT: shrdl $16, %eax, %ecx
1050 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
1051 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1052 ; X86-NEXT: shrdl $16, %ecx, %eax
1053 ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
1054 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload
1055 ; X86-NEXT: shrdl $16, %eax, %ecx
1056 ; X86-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
1057 ; X86-NEXT: shrdl $16, %edi, %eax
1058 ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
1059 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %eax # 4-byte Reload
1060 ; X86-NEXT: shrdl $16, %eax, %edi
1061 ; X86-NEXT: movl %edi, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
1062 ; X86-NEXT: movl (%esp), %ecx # 4-byte Reload
1063 ; X86-NEXT: shrdl $16, %ecx, %eax
1064 ; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
1065 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ebp # 4-byte Reload
1066 ; X86-NEXT: shrdl $16, %ebp, %ecx
1067 ; X86-NEXT: movl %ecx, (%esp) # 4-byte Spill
1068 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ebx # 4-byte Reload
1069 ; X86-NEXT: shrdl $16, %ebx, %ebp
1070 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %edi # 4-byte Reload
1071 ; X86-NEXT: shrdl $16, %edi, %ebx
1072 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1073 ; X86-NEXT: shrdl $16, %ecx, %edi
1074 ; X86-NEXT: shrdl $16, %edx, %ecx
1075 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
1076 ; X86-NEXT: movl %ecx, 60(%eax)
1077 ; X86-NEXT: movl %edi, 56(%eax)
1078 ; X86-NEXT: movl %ebx, 52(%eax)
1079 ; X86-NEXT: movl %ebp, 48(%eax)
1080 ; X86-NEXT: movl (%esp), %ecx # 4-byte Reload
1081 ; X86-NEXT: movl %ecx, 44(%eax)
1082 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1083 ; X86-NEXT: movl %ecx, 40(%eax)
1084 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1085 ; X86-NEXT: movl %ecx, 36(%eax)
1086 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1087 ; X86-NEXT: movl %ecx, 32(%eax)
1088 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1089 ; X86-NEXT: movl %ecx, 28(%eax)
1090 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1091 ; X86-NEXT: movl %ecx, 24(%eax)
1092 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1093 ; X86-NEXT: movl %ecx, 20(%eax)
1094 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1095 ; X86-NEXT: movl %ecx, 16(%eax)
1096 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1097 ; X86-NEXT: movl %ecx, 12(%eax)
1098 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1099 ; X86-NEXT: movl %ecx, 8(%eax)
1100 ; X86-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1101 ; X86-NEXT: movl %ecx, 4(%eax)
1102 ; X86-NEXT: movl %esi, (%eax)
1103 ; X86-NEXT: shrl $16, %edx
1104 ; X86-NEXT: movw %dx, 64(%eax)
1105 ; X86-NEXT: addl $60, %esp
1106 ; X86-NEXT: popl %esi
1107 ; X86-NEXT: popl %edi
1108 ; X86-NEXT: popl %ebx
1109 ; X86-NEXT: popl %ebp
1112 ; X64-LABEL: large_promotion:
1114 ; X64-NEXT: pushq %r15
1115 ; X64-NEXT: pushq %r14
1116 ; X64-NEXT: pushq %r13
1117 ; X64-NEXT: pushq %r12
1118 ; X64-NEXT: pushq %rbx
1119 ; X64-NEXT: movq %rdi, %rax
1120 ; X64-NEXT: movq {{[0-9]+}}(%rsp), %r12
1121 ; X64-NEXT: movq {{[0-9]+}}(%rsp), %r15
1122 ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rbx
1123 ; X64-NEXT: movq {{[0-9]+}}(%rsp), %rdi
1124 ; X64-NEXT: bswapq %rdi
1125 ; X64-NEXT: movq %rdi, %r10
1126 ; X64-NEXT: shrq $4, %r10
1127 ; X64-NEXT: movabsq $1085102592571150095, %r11 # imm = 0xF0F0F0F0F0F0F0F
1128 ; X64-NEXT: andq %r11, %r10
1129 ; X64-NEXT: andq %r11, %rdi
1130 ; X64-NEXT: shlq $4, %rdi
1131 ; X64-NEXT: orq %r10, %rdi
1132 ; X64-NEXT: movabsq $3689348814741910323, %r10 # imm = 0x3333333333333333
1133 ; X64-NEXT: movq %rdi, %r14
1134 ; X64-NEXT: andq %r10, %r14
1135 ; X64-NEXT: shrq $2, %rdi
1136 ; X64-NEXT: andq %r10, %rdi
1137 ; X64-NEXT: leaq (%rdi,%r14,4), %rdi
1138 ; X64-NEXT: movabsq $6148820866244280320, %r14 # imm = 0x5555000000000000
1139 ; X64-NEXT: movq %rdi, %r13
1140 ; X64-NEXT: andq %r14, %r13
1141 ; X64-NEXT: shrq %rdi
1142 ; X64-NEXT: andq %r14, %rdi
1143 ; X64-NEXT: leaq (%rdi,%r13,2), %rdi
1144 ; X64-NEXT: bswapq %rbx
1145 ; X64-NEXT: movq %rbx, %r14
1146 ; X64-NEXT: shrq $4, %r14
1147 ; X64-NEXT: andq %r11, %r14
1148 ; X64-NEXT: andq %r11, %rbx
1149 ; X64-NEXT: shlq $4, %rbx
1150 ; X64-NEXT: orq %r14, %rbx
1151 ; X64-NEXT: movq %rbx, %r14
1152 ; X64-NEXT: andq %r10, %r14
1153 ; X64-NEXT: shrq $2, %rbx
1154 ; X64-NEXT: andq %r10, %rbx
1155 ; X64-NEXT: leaq (%rbx,%r14,4), %rbx
1156 ; X64-NEXT: movabsq $6148914691236517205, %r14 # imm = 0x5555555555555555
1157 ; X64-NEXT: movq %rbx, %r13
1158 ; X64-NEXT: andq %r14, %r13
1159 ; X64-NEXT: shrq %rbx
1160 ; X64-NEXT: andq %r14, %rbx
1161 ; X64-NEXT: leaq (%rbx,%r13,2), %rbx
1162 ; X64-NEXT: shrdq $48, %rbx, %rdi
1163 ; X64-NEXT: bswapq %r15
1164 ; X64-NEXT: movq %r15, %r13
1165 ; X64-NEXT: shrq $4, %r13
1166 ; X64-NEXT: andq %r11, %r13
1167 ; X64-NEXT: andq %r11, %r15
1168 ; X64-NEXT: shlq $4, %r15
1169 ; X64-NEXT: orq %r13, %r15
1170 ; X64-NEXT: movq %r15, %r13
1171 ; X64-NEXT: andq %r10, %r13
1172 ; X64-NEXT: shrq $2, %r15
1173 ; X64-NEXT: andq %r10, %r15
1174 ; X64-NEXT: leaq (%r15,%r13,4), %r15
1175 ; X64-NEXT: movq %r15, %r13
1176 ; X64-NEXT: andq %r14, %r13
1177 ; X64-NEXT: shrq %r15
1178 ; X64-NEXT: andq %r14, %r15
1179 ; X64-NEXT: leaq (%r15,%r13,2), %r15
1180 ; X64-NEXT: shrdq $48, %r15, %rbx
1181 ; X64-NEXT: bswapq %r12
1182 ; X64-NEXT: movq %r12, %r13
1183 ; X64-NEXT: shrq $4, %r13
1184 ; X64-NEXT: andq %r11, %r13
1185 ; X64-NEXT: andq %r11, %r12
1186 ; X64-NEXT: shlq $4, %r12
1187 ; X64-NEXT: orq %r13, %r12
1188 ; X64-NEXT: movq %r12, %r13
1189 ; X64-NEXT: andq %r10, %r13
1190 ; X64-NEXT: shrq $2, %r12
1191 ; X64-NEXT: andq %r10, %r12
1192 ; X64-NEXT: leaq (%r12,%r13,4), %r12
1193 ; X64-NEXT: movq %r12, %r13
1194 ; X64-NEXT: andq %r14, %r13
1195 ; X64-NEXT: shrq %r12
1196 ; X64-NEXT: andq %r14, %r12
1197 ; X64-NEXT: leaq (%r12,%r13,2), %r12
1198 ; X64-NEXT: shrdq $48, %r12, %r15
1199 ; X64-NEXT: bswapq %r9
1200 ; X64-NEXT: movq %r9, %r13
1201 ; X64-NEXT: shrq $4, %r13
1202 ; X64-NEXT: andq %r11, %r13
1203 ; X64-NEXT: andq %r11, %r9
1204 ; X64-NEXT: shlq $4, %r9
1205 ; X64-NEXT: orq %r13, %r9
1206 ; X64-NEXT: movq %r9, %r13
1207 ; X64-NEXT: andq %r10, %r13
1208 ; X64-NEXT: shrq $2, %r9
1209 ; X64-NEXT: andq %r10, %r9
1210 ; X64-NEXT: leaq (%r9,%r13,4), %r9
1211 ; X64-NEXT: movq %r9, %r13
1212 ; X64-NEXT: andq %r14, %r13
1213 ; X64-NEXT: shrq %r9
1214 ; X64-NEXT: andq %r14, %r9
1215 ; X64-NEXT: leaq (%r9,%r13,2), %r9
1216 ; X64-NEXT: shrdq $48, %r9, %r12
1217 ; X64-NEXT: bswapq %r8
1218 ; X64-NEXT: movq %r8, %r13
1219 ; X64-NEXT: shrq $4, %r13
1220 ; X64-NEXT: andq %r11, %r13
1221 ; X64-NEXT: andq %r11, %r8
1222 ; X64-NEXT: shlq $4, %r8
1223 ; X64-NEXT: orq %r13, %r8
1224 ; X64-NEXT: movq %r8, %r13
1225 ; X64-NEXT: andq %r10, %r13
1226 ; X64-NEXT: shrq $2, %r8
1227 ; X64-NEXT: andq %r10, %r8
1228 ; X64-NEXT: leaq (%r8,%r13,4), %r8
1229 ; X64-NEXT: movq %r8, %r13
1230 ; X64-NEXT: andq %r14, %r13
1231 ; X64-NEXT: shrq %r8
1232 ; X64-NEXT: andq %r14, %r8
1233 ; X64-NEXT: leaq (%r8,%r13,2), %r8
1234 ; X64-NEXT: shrdq $48, %r8, %r9
1235 ; X64-NEXT: bswapq %rcx
1236 ; X64-NEXT: movq %rcx, %r13
1237 ; X64-NEXT: shrq $4, %r13
1238 ; X64-NEXT: andq %r11, %r13
1239 ; X64-NEXT: andq %r11, %rcx
1240 ; X64-NEXT: shlq $4, %rcx
1241 ; X64-NEXT: orq %r13, %rcx
1242 ; X64-NEXT: movq %rcx, %r13
1243 ; X64-NEXT: andq %r10, %r13
1244 ; X64-NEXT: shrq $2, %rcx
1245 ; X64-NEXT: andq %r10, %rcx
1246 ; X64-NEXT: leaq (%rcx,%r13,4), %rcx
1247 ; X64-NEXT: movq %rcx, %r13
1248 ; X64-NEXT: andq %r14, %r13
1249 ; X64-NEXT: shrq %rcx
1250 ; X64-NEXT: andq %r14, %rcx
1251 ; X64-NEXT: leaq (%rcx,%r13,2), %rcx
1252 ; X64-NEXT: shrdq $48, %rcx, %r8
1253 ; X64-NEXT: bswapq %rdx
1254 ; X64-NEXT: movq %rdx, %r13
1255 ; X64-NEXT: shrq $4, %r13
1256 ; X64-NEXT: andq %r11, %r13
1257 ; X64-NEXT: andq %r11, %rdx
1258 ; X64-NEXT: shlq $4, %rdx
1259 ; X64-NEXT: orq %r13, %rdx
1260 ; X64-NEXT: movq %rdx, %r13
1261 ; X64-NEXT: andq %r10, %r13
1262 ; X64-NEXT: shrq $2, %rdx
1263 ; X64-NEXT: andq %r10, %rdx
1264 ; X64-NEXT: leaq (%rdx,%r13,4), %rdx
1265 ; X64-NEXT: movq %rdx, %r13
1266 ; X64-NEXT: andq %r14, %r13
1267 ; X64-NEXT: shrq %rdx
1268 ; X64-NEXT: andq %r14, %rdx
1269 ; X64-NEXT: leaq (%rdx,%r13,2), %rdx
1270 ; X64-NEXT: shrdq $48, %rdx, %rcx
1271 ; X64-NEXT: bswapq %rsi
1272 ; X64-NEXT: movq %rsi, %r13
1273 ; X64-NEXT: shrq $4, %r13
1274 ; X64-NEXT: andq %r11, %r13
1275 ; X64-NEXT: andq %r11, %rsi
1276 ; X64-NEXT: shlq $4, %rsi
1277 ; X64-NEXT: orq %r13, %rsi
1278 ; X64-NEXT: movq %rsi, %r11
1279 ; X64-NEXT: andq %r10, %r11
1280 ; X64-NEXT: shrq $2, %rsi
1281 ; X64-NEXT: andq %r10, %rsi
1282 ; X64-NEXT: leaq (%rsi,%r11,4), %rsi
1283 ; X64-NEXT: movq %rsi, %r10
1284 ; X64-NEXT: andq %r14, %r10
1285 ; X64-NEXT: shrq %rsi
1286 ; X64-NEXT: andq %r14, %rsi
1287 ; X64-NEXT: leaq (%rsi,%r10,2), %rsi
1288 ; X64-NEXT: shrdq $48, %rsi, %rdx
1289 ; X64-NEXT: shrq $48, %rsi
1290 ; X64-NEXT: movq %rdx, 56(%rax)
1291 ; X64-NEXT: movq %rcx, 48(%rax)
1292 ; X64-NEXT: movq %r8, 40(%rax)
1293 ; X64-NEXT: movq %r9, 32(%rax)
1294 ; X64-NEXT: movq %r12, 24(%rax)
1295 ; X64-NEXT: movq %r15, 16(%rax)
1296 ; X64-NEXT: movq %rbx, 8(%rax)
1297 ; X64-NEXT: movq %rdi, (%rax)
1298 ; X64-NEXT: movw %si, 64(%rax)
1299 ; X64-NEXT: popq %rbx
1300 ; X64-NEXT: popq %r12
1301 ; X64-NEXT: popq %r13
1302 ; X64-NEXT: popq %r14
1303 ; X64-NEXT: popq %r15
1306 ; X86XOP-LABEL: large_promotion:
1308 ; X86XOP-NEXT: pushl %ebp
1309 ; X86XOP-NEXT: pushl %ebx
1310 ; X86XOP-NEXT: pushl %edi
1311 ; X86XOP-NEXT: pushl %esi
1312 ; X86XOP-NEXT: subl $44, %esp
1313 ; X86XOP-NEXT: vmovdqa {{.*#+}} xmm0 = [87,86,85,84,83,82,81,80,95,94,93,92,91,90,89,88]
1314 ; X86XOP-NEXT: vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
1315 ; X86XOP-NEXT: vpperm %xmm0, %xmm1, %xmm0, %xmm1
1316 ; X86XOP-NEXT: vpextrd $1, %xmm1, %eax
1317 ; X86XOP-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
1318 ; X86XOP-NEXT: vpperm %xmm0, %xmm1, %xmm0, %xmm1
1319 ; X86XOP-NEXT: vmovd %xmm1, %ecx
1320 ; X86XOP-NEXT: shrdl $16, %ecx, %eax
1321 ; X86XOP-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
1322 ; X86XOP-NEXT: vpextrd $1, %xmm1, %eax
1323 ; X86XOP-NEXT: shrdl $16, %eax, %ecx
1324 ; X86XOP-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
1325 ; X86XOP-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
1326 ; X86XOP-NEXT: vpperm %xmm0, %xmm1, %xmm0, %xmm1
1327 ; X86XOP-NEXT: vmovd %xmm1, %ecx
1328 ; X86XOP-NEXT: shrdl $16, %ecx, %eax
1329 ; X86XOP-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
1330 ; X86XOP-NEXT: vpextrd $1, %xmm1, %eax
1331 ; X86XOP-NEXT: shrdl $16, %eax, %ecx
1332 ; X86XOP-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
1333 ; X86XOP-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
1334 ; X86XOP-NEXT: vpperm %xmm0, %xmm1, %xmm0, %xmm1
1335 ; X86XOP-NEXT: vmovd %xmm1, %ecx
1336 ; X86XOP-NEXT: shrdl $16, %ecx, %eax
1337 ; X86XOP-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
1338 ; X86XOP-NEXT: vpextrd $1, %xmm1, %eax
1339 ; X86XOP-NEXT: shrdl $16, %eax, %ecx
1340 ; X86XOP-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
1341 ; X86XOP-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
1342 ; X86XOP-NEXT: vpperm %xmm0, %xmm1, %xmm0, %xmm1
1343 ; X86XOP-NEXT: vmovd %xmm1, %ecx
1344 ; X86XOP-NEXT: shrdl $16, %ecx, %eax
1345 ; X86XOP-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
1346 ; X86XOP-NEXT: vpextrd $1, %xmm1, %eax
1347 ; X86XOP-NEXT: shrdl $16, %eax, %ecx
1348 ; X86XOP-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
1349 ; X86XOP-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
1350 ; X86XOP-NEXT: vpperm %xmm0, %xmm1, %xmm0, %xmm1
1351 ; X86XOP-NEXT: vmovd %xmm1, %ecx
1352 ; X86XOP-NEXT: shrdl $16, %ecx, %eax
1353 ; X86XOP-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
1354 ; X86XOP-NEXT: vpextrd $1, %xmm1, %eax
1355 ; X86XOP-NEXT: shrdl $16, %eax, %ecx
1356 ; X86XOP-NEXT: movl %ecx, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
1357 ; X86XOP-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
1358 ; X86XOP-NEXT: vpperm %xmm0, %xmm1, %xmm0, %xmm1
1359 ; X86XOP-NEXT: vmovd %xmm1, %ebp
1360 ; X86XOP-NEXT: shrdl $16, %ebp, %eax
1361 ; X86XOP-NEXT: movl %eax, (%esp) # 4-byte Spill
1362 ; X86XOP-NEXT: vpextrd $1, %xmm1, %ebx
1363 ; X86XOP-NEXT: shrdl $16, %ebx, %ebp
1364 ; X86XOP-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
1365 ; X86XOP-NEXT: vpperm %xmm0, %xmm1, %xmm0, %xmm1
1366 ; X86XOP-NEXT: vmovd %xmm1, %esi
1367 ; X86XOP-NEXT: shrdl $16, %esi, %ebx
1368 ; X86XOP-NEXT: vpextrd $1, %xmm1, %edx
1369 ; X86XOP-NEXT: shrdl $16, %edx, %esi
1370 ; X86XOP-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
1371 ; X86XOP-NEXT: vpperm %xmm0, %xmm1, %xmm0, %xmm0
1372 ; X86XOP-NEXT: vmovd %xmm0, %ecx
1373 ; X86XOP-NEXT: shrdl $16, %ecx, %edx
1374 ; X86XOP-NEXT: vpextrd $1, %xmm0, %edi
1375 ; X86XOP-NEXT: shrdl $16, %edi, %ecx
1376 ; X86XOP-NEXT: movl {{[0-9]+}}(%esp), %eax
1377 ; X86XOP-NEXT: movl %ecx, 60(%eax)
1378 ; X86XOP-NEXT: movl %edx, 56(%eax)
1379 ; X86XOP-NEXT: movl %esi, 52(%eax)
1380 ; X86XOP-NEXT: movl %ebx, 48(%eax)
1381 ; X86XOP-NEXT: movl %ebp, 44(%eax)
1382 ; X86XOP-NEXT: movl (%esp), %ecx # 4-byte Reload
1383 ; X86XOP-NEXT: movl %ecx, 40(%eax)
1384 ; X86XOP-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1385 ; X86XOP-NEXT: movl %ecx, 36(%eax)
1386 ; X86XOP-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1387 ; X86XOP-NEXT: movl %ecx, 32(%eax)
1388 ; X86XOP-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1389 ; X86XOP-NEXT: movl %ecx, 28(%eax)
1390 ; X86XOP-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1391 ; X86XOP-NEXT: movl %ecx, 24(%eax)
1392 ; X86XOP-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1393 ; X86XOP-NEXT: movl %ecx, 20(%eax)
1394 ; X86XOP-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1395 ; X86XOP-NEXT: movl %ecx, 16(%eax)
1396 ; X86XOP-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1397 ; X86XOP-NEXT: movl %ecx, 12(%eax)
1398 ; X86XOP-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1399 ; X86XOP-NEXT: movl %ecx, 8(%eax)
1400 ; X86XOP-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1401 ; X86XOP-NEXT: movl %ecx, 4(%eax)
1402 ; X86XOP-NEXT: movl {{[-0-9]+}}(%e{{[sb]}}p), %ecx # 4-byte Reload
1403 ; X86XOP-NEXT: movl %ecx, (%eax)
1404 ; X86XOP-NEXT: shrl $16, %edi
1405 ; X86XOP-NEXT: movw %di, 64(%eax)
1406 ; X86XOP-NEXT: addl $44, %esp
1407 ; X86XOP-NEXT: popl %esi
1408 ; X86XOP-NEXT: popl %edi
1409 ; X86XOP-NEXT: popl %ebx
1410 ; X86XOP-NEXT: popl %ebp
1411 ; X86XOP-NEXT: retl $4
1413 ; GFNI-LABEL: large_promotion:
1415 ; GFNI-NEXT: pushq %r15
1416 ; GFNI-NEXT: pushq %r14
1417 ; GFNI-NEXT: pushq %r13
1418 ; GFNI-NEXT: pushq %r12
1419 ; GFNI-NEXT: pushq %rbx
1420 ; GFNI-NEXT: movq %rdi, %rax
1421 ; GFNI-NEXT: movq {{[0-9]+}}(%rsp), %r12
1422 ; GFNI-NEXT: movq {{[0-9]+}}(%rsp), %r15
1423 ; GFNI-NEXT: movq {{[0-9]+}}(%rsp), %rbx
1424 ; GFNI-NEXT: movq {{[0-9]+}}(%rsp), %rdi
1425 ; GFNI-NEXT: bswapq %rdi
1426 ; GFNI-NEXT: movq %rdi, %r10
1427 ; GFNI-NEXT: shrq $4, %r10
1428 ; GFNI-NEXT: movabsq $1085102592571150095, %r11 # imm = 0xF0F0F0F0F0F0F0F
1429 ; GFNI-NEXT: andq %r11, %r10
1430 ; GFNI-NEXT: andq %r11, %rdi
1431 ; GFNI-NEXT: shlq $4, %rdi
1432 ; GFNI-NEXT: orq %r10, %rdi
1433 ; GFNI-NEXT: movabsq $3689348814741910323, %r10 # imm = 0x3333333333333333
1434 ; GFNI-NEXT: movq %rdi, %r14
1435 ; GFNI-NEXT: andq %r10, %r14
1436 ; GFNI-NEXT: shrq $2, %rdi
1437 ; GFNI-NEXT: andq %r10, %rdi
1438 ; GFNI-NEXT: leaq (%rdi,%r14,4), %rdi
1439 ; GFNI-NEXT: movabsq $6148820866244280320, %r14 # imm = 0x5555000000000000
1440 ; GFNI-NEXT: movq %rdi, %r13
1441 ; GFNI-NEXT: andq %r14, %r13
1442 ; GFNI-NEXT: shrq %rdi
1443 ; GFNI-NEXT: andq %r14, %rdi
1444 ; GFNI-NEXT: leaq (%rdi,%r13,2), %rdi
1445 ; GFNI-NEXT: bswapq %rbx
1446 ; GFNI-NEXT: movq %rbx, %r14
1447 ; GFNI-NEXT: shrq $4, %r14
1448 ; GFNI-NEXT: andq %r11, %r14
1449 ; GFNI-NEXT: andq %r11, %rbx
1450 ; GFNI-NEXT: shlq $4, %rbx
1451 ; GFNI-NEXT: orq %r14, %rbx
1452 ; GFNI-NEXT: movq %rbx, %r14
1453 ; GFNI-NEXT: andq %r10, %r14
1454 ; GFNI-NEXT: shrq $2, %rbx
1455 ; GFNI-NEXT: andq %r10, %rbx
1456 ; GFNI-NEXT: leaq (%rbx,%r14,4), %rbx
1457 ; GFNI-NEXT: movabsq $6148914691236517205, %r14 # imm = 0x5555555555555555
1458 ; GFNI-NEXT: movq %rbx, %r13
1459 ; GFNI-NEXT: andq %r14, %r13
1460 ; GFNI-NEXT: shrq %rbx
1461 ; GFNI-NEXT: andq %r14, %rbx
1462 ; GFNI-NEXT: leaq (%rbx,%r13,2), %rbx
1463 ; GFNI-NEXT: shrdq $48, %rbx, %rdi
1464 ; GFNI-NEXT: bswapq %r15
1465 ; GFNI-NEXT: movq %r15, %r13
1466 ; GFNI-NEXT: shrq $4, %r13
1467 ; GFNI-NEXT: andq %r11, %r13
1468 ; GFNI-NEXT: andq %r11, %r15
1469 ; GFNI-NEXT: shlq $4, %r15
1470 ; GFNI-NEXT: orq %r13, %r15
1471 ; GFNI-NEXT: movq %r15, %r13
1472 ; GFNI-NEXT: andq %r10, %r13
1473 ; GFNI-NEXT: shrq $2, %r15
1474 ; GFNI-NEXT: andq %r10, %r15
1475 ; GFNI-NEXT: leaq (%r15,%r13,4), %r15
1476 ; GFNI-NEXT: movq %r15, %r13
1477 ; GFNI-NEXT: andq %r14, %r13
1478 ; GFNI-NEXT: shrq %r15
1479 ; GFNI-NEXT: andq %r14, %r15
1480 ; GFNI-NEXT: leaq (%r15,%r13,2), %r15
1481 ; GFNI-NEXT: shrdq $48, %r15, %rbx
1482 ; GFNI-NEXT: bswapq %r12
1483 ; GFNI-NEXT: movq %r12, %r13
1484 ; GFNI-NEXT: shrq $4, %r13
1485 ; GFNI-NEXT: andq %r11, %r13
1486 ; GFNI-NEXT: andq %r11, %r12
1487 ; GFNI-NEXT: shlq $4, %r12
1488 ; GFNI-NEXT: orq %r13, %r12
1489 ; GFNI-NEXT: movq %r12, %r13
1490 ; GFNI-NEXT: andq %r10, %r13
1491 ; GFNI-NEXT: shrq $2, %r12
1492 ; GFNI-NEXT: andq %r10, %r12
1493 ; GFNI-NEXT: leaq (%r12,%r13,4), %r12
1494 ; GFNI-NEXT: movq %r12, %r13
1495 ; GFNI-NEXT: andq %r14, %r13
1496 ; GFNI-NEXT: shrq %r12
1497 ; GFNI-NEXT: andq %r14, %r12
1498 ; GFNI-NEXT: leaq (%r12,%r13,2), %r12
1499 ; GFNI-NEXT: shrdq $48, %r12, %r15
1500 ; GFNI-NEXT: bswapq %r9
1501 ; GFNI-NEXT: movq %r9, %r13
1502 ; GFNI-NEXT: shrq $4, %r13
1503 ; GFNI-NEXT: andq %r11, %r13
1504 ; GFNI-NEXT: andq %r11, %r9
1505 ; GFNI-NEXT: shlq $4, %r9
1506 ; GFNI-NEXT: orq %r13, %r9
1507 ; GFNI-NEXT: movq %r9, %r13
1508 ; GFNI-NEXT: andq %r10, %r13
1509 ; GFNI-NEXT: shrq $2, %r9
1510 ; GFNI-NEXT: andq %r10, %r9
1511 ; GFNI-NEXT: leaq (%r9,%r13,4), %r9
1512 ; GFNI-NEXT: movq %r9, %r13
1513 ; GFNI-NEXT: andq %r14, %r13
1514 ; GFNI-NEXT: shrq %r9
1515 ; GFNI-NEXT: andq %r14, %r9
1516 ; GFNI-NEXT: leaq (%r9,%r13,2), %r9
1517 ; GFNI-NEXT: shrdq $48, %r9, %r12
1518 ; GFNI-NEXT: bswapq %r8
1519 ; GFNI-NEXT: movq %r8, %r13
1520 ; GFNI-NEXT: shrq $4, %r13
1521 ; GFNI-NEXT: andq %r11, %r13
1522 ; GFNI-NEXT: andq %r11, %r8
1523 ; GFNI-NEXT: shlq $4, %r8
1524 ; GFNI-NEXT: orq %r13, %r8
1525 ; GFNI-NEXT: movq %r8, %r13
1526 ; GFNI-NEXT: andq %r10, %r13
1527 ; GFNI-NEXT: shrq $2, %r8
1528 ; GFNI-NEXT: andq %r10, %r8
1529 ; GFNI-NEXT: leaq (%r8,%r13,4), %r8
1530 ; GFNI-NEXT: movq %r8, %r13
1531 ; GFNI-NEXT: andq %r14, %r13
1532 ; GFNI-NEXT: shrq %r8
1533 ; GFNI-NEXT: andq %r14, %r8
1534 ; GFNI-NEXT: leaq (%r8,%r13,2), %r8
1535 ; GFNI-NEXT: shrdq $48, %r8, %r9
1536 ; GFNI-NEXT: bswapq %rcx
1537 ; GFNI-NEXT: movq %rcx, %r13
1538 ; GFNI-NEXT: shrq $4, %r13
1539 ; GFNI-NEXT: andq %r11, %r13
1540 ; GFNI-NEXT: andq %r11, %rcx
1541 ; GFNI-NEXT: shlq $4, %rcx
1542 ; GFNI-NEXT: orq %r13, %rcx
1543 ; GFNI-NEXT: movq %rcx, %r13
1544 ; GFNI-NEXT: andq %r10, %r13
1545 ; GFNI-NEXT: shrq $2, %rcx
1546 ; GFNI-NEXT: andq %r10, %rcx
1547 ; GFNI-NEXT: leaq (%rcx,%r13,4), %rcx
1548 ; GFNI-NEXT: movq %rcx, %r13
1549 ; GFNI-NEXT: andq %r14, %r13
1550 ; GFNI-NEXT: shrq %rcx
1551 ; GFNI-NEXT: andq %r14, %rcx
1552 ; GFNI-NEXT: leaq (%rcx,%r13,2), %rcx
1553 ; GFNI-NEXT: shrdq $48, %rcx, %r8
1554 ; GFNI-NEXT: bswapq %rdx
1555 ; GFNI-NEXT: movq %rdx, %r13
1556 ; GFNI-NEXT: shrq $4, %r13
1557 ; GFNI-NEXT: andq %r11, %r13
1558 ; GFNI-NEXT: andq %r11, %rdx
1559 ; GFNI-NEXT: shlq $4, %rdx
1560 ; GFNI-NEXT: orq %r13, %rdx
1561 ; GFNI-NEXT: movq %rdx, %r13
1562 ; GFNI-NEXT: andq %r10, %r13
1563 ; GFNI-NEXT: shrq $2, %rdx
1564 ; GFNI-NEXT: andq %r10, %rdx
1565 ; GFNI-NEXT: leaq (%rdx,%r13,4), %rdx
1566 ; GFNI-NEXT: movq %rdx, %r13
1567 ; GFNI-NEXT: andq %r14, %r13
1568 ; GFNI-NEXT: shrq %rdx
1569 ; GFNI-NEXT: andq %r14, %rdx
1570 ; GFNI-NEXT: leaq (%rdx,%r13,2), %rdx
1571 ; GFNI-NEXT: shrdq $48, %rdx, %rcx
1572 ; GFNI-NEXT: bswapq %rsi
1573 ; GFNI-NEXT: movq %rsi, %r13
1574 ; GFNI-NEXT: shrq $4, %r13
1575 ; GFNI-NEXT: andq %r11, %r13
1576 ; GFNI-NEXT: andq %r11, %rsi
1577 ; GFNI-NEXT: shlq $4, %rsi
1578 ; GFNI-NEXT: orq %r13, %rsi
1579 ; GFNI-NEXT: movq %rsi, %r11
1580 ; GFNI-NEXT: andq %r10, %r11
1581 ; GFNI-NEXT: shrq $2, %rsi
1582 ; GFNI-NEXT: andq %r10, %rsi
1583 ; GFNI-NEXT: leaq (%rsi,%r11,4), %rsi
1584 ; GFNI-NEXT: movq %rsi, %r10
1585 ; GFNI-NEXT: andq %r14, %r10
1586 ; GFNI-NEXT: shrq %rsi
1587 ; GFNI-NEXT: andq %r14, %rsi
1588 ; GFNI-NEXT: leaq (%rsi,%r10,2), %rsi
1589 ; GFNI-NEXT: shrdq $48, %rsi, %rdx
1590 ; GFNI-NEXT: shrq $48, %rsi
1591 ; GFNI-NEXT: movq %rdx, 56(%rax)
1592 ; GFNI-NEXT: movq %rcx, 48(%rax)
1593 ; GFNI-NEXT: movq %r8, 40(%rax)
1594 ; GFNI-NEXT: movq %r9, 32(%rax)
1595 ; GFNI-NEXT: movq %r12, 24(%rax)
1596 ; GFNI-NEXT: movq %r15, 16(%rax)
1597 ; GFNI-NEXT: movq %rbx, 8(%rax)
1598 ; GFNI-NEXT: movq %rdi, (%rax)
1599 ; GFNI-NEXT: movw %si, 64(%rax)
1600 ; GFNI-NEXT: popq %rbx
1601 ; GFNI-NEXT: popq %r12
1602 ; GFNI-NEXT: popq %r13
1603 ; GFNI-NEXT: popq %r14
1604 ; GFNI-NEXT: popq %r15
1606 %Z = call i528 @llvm.bitreverse.i528(i528 %A)
1609 declare i528 @llvm.bitreverse.i528(i528)